devflow-engine 1.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (393) hide show
  1. devflow_engine/__init__.py +3 -0
  2. devflow_engine/agentic_prompts.py +100 -0
  3. devflow_engine/agentic_runtime.py +398 -0
  4. devflow_engine/api_key_flow_harness.py +539 -0
  5. devflow_engine/api_keys.py +357 -0
  6. devflow_engine/bootstrap/__init__.py +2 -0
  7. devflow_engine/bootstrap/provision_from_template.py +84 -0
  8. devflow_engine/cli/__init__.py +0 -0
  9. devflow_engine/cli/app.py +7270 -0
  10. devflow_engine/core/__init__.py +0 -0
  11. devflow_engine/core/config.py +86 -0
  12. devflow_engine/core/logging.py +29 -0
  13. devflow_engine/core/paths.py +45 -0
  14. devflow_engine/core/toml_kv.py +33 -0
  15. devflow_engine/devflow_event_worker.py +1292 -0
  16. devflow_engine/devflow_state.py +201 -0
  17. devflow_engine/devin2/__init__.py +9 -0
  18. devflow_engine/devin2/agent_definition.py +120 -0
  19. devflow_engine/devin2/pi_runner.py +204 -0
  20. devflow_engine/devin_orchestration.py +69 -0
  21. devflow_engine/docs/prompts/anti-patterns.md +42 -0
  22. devflow_engine/docs/prompts/devin-agent-prompt.md +55 -0
  23. devflow_engine/docs/prompts/devin2-agent-prompt.md +81 -0
  24. devflow_engine/docs/prompts/examples/devin-vapi-clone-reference-exchange.json +85 -0
  25. devflow_engine/doctor/__init__.py +2 -0
  26. devflow_engine/doctor/triage.py +140 -0
  27. devflow_engine/error/__init__.py +0 -0
  28. devflow_engine/error/remediation.py +21 -0
  29. devflow_engine/errors/error_solver_dag.py +522 -0
  30. devflow_engine/errors/runtime_observability.py +67 -0
  31. devflow_engine/idea/__init__.py +4 -0
  32. devflow_engine/idea/actors.py +481 -0
  33. devflow_engine/idea/agentic.py +465 -0
  34. devflow_engine/idea/analyze.py +93 -0
  35. devflow_engine/idea/devin_chat_dag.py +1 -0
  36. devflow_engine/idea/diff.py +99 -0
  37. devflow_engine/idea/drafts.py +446 -0
  38. devflow_engine/idea/idea_creation_dag.py +643 -0
  39. devflow_engine/idea/ideation_enrichment.py +355 -0
  40. devflow_engine/idea/ideation_enrichment_worker.py +19 -0
  41. devflow_engine/idea/paths.py +28 -0
  42. devflow_engine/idea/promote.py +53 -0
  43. devflow_engine/idea/redaction.py +27 -0
  44. devflow_engine/idea/repo_tools.py +1277 -0
  45. devflow_engine/idea/response_mode.py +30 -0
  46. devflow_engine/idea/story_pipeline.py +1585 -0
  47. devflow_engine/idea/sufficiency.py +376 -0
  48. devflow_engine/idea/traditional_stories.py +1257 -0
  49. devflow_engine/implementation/__init__.py +0 -0
  50. devflow_engine/implementation/alembic_preflight.py +700 -0
  51. devflow_engine/implementation/dag.py +8450 -0
  52. devflow_engine/implementation/green_gate.py +93 -0
  53. devflow_engine/implementation/prompts.py +108 -0
  54. devflow_engine/implementation/test_runtime.py +623 -0
  55. devflow_engine/integration/__init__.py +19 -0
  56. devflow_engine/integration/agentic.py +66 -0
  57. devflow_engine/integration/dag.py +3539 -0
  58. devflow_engine/integration/prompts.py +114 -0
  59. devflow_engine/integration/supabase_schema.sql +31 -0
  60. devflow_engine/integration/supabase_sync.py +177 -0
  61. devflow_engine/llm/__init__.py +1 -0
  62. devflow_engine/llm/cli_one_shot.py +84 -0
  63. devflow_engine/llm/cli_stream.py +371 -0
  64. devflow_engine/llm/execution_context.py +26 -0
  65. devflow_engine/llm/invoke.py +1322 -0
  66. devflow_engine/llm/provider_api.py +304 -0
  67. devflow_engine/llm/repo_knowledge.py +588 -0
  68. devflow_engine/llm_primitives.py +315 -0
  69. devflow_engine/orchestration.py +62 -0
  70. devflow_engine/planning/__init__.py +0 -0
  71. devflow_engine/planning/analyze_repo.py +92 -0
  72. devflow_engine/planning/render_drafts.py +133 -0
  73. devflow_engine/playground/__init__.py +0 -0
  74. devflow_engine/playground/hooks.py +26 -0
  75. devflow_engine/playwright_workflow/__init__.py +5 -0
  76. devflow_engine/playwright_workflow/dag.py +1317 -0
  77. devflow_engine/process/__init__.py +5 -0
  78. devflow_engine/process/dag.py +59 -0
  79. devflow_engine/project_registration/__init__.py +3 -0
  80. devflow_engine/project_registration/dag.py +1581 -0
  81. devflow_engine/project_registry.py +109 -0
  82. devflow_engine/prompts/devin/generic/prompt.md +6 -0
  83. devflow_engine/prompts/devin/ideation/prompt.md +263 -0
  84. devflow_engine/prompts/devin/ideation/scenarios.md +5 -0
  85. devflow_engine/prompts/devin/ideation_loop/prompt.md +6 -0
  86. devflow_engine/prompts/devin/insight/prompt.md +11 -0
  87. devflow_engine/prompts/devin/insight/scenarios.md +5 -0
  88. devflow_engine/prompts/devin/intake/prompt.md +15 -0
  89. devflow_engine/prompts/devin/iterate/prompt.md +12 -0
  90. devflow_engine/prompts/devin/shared/eval_doctrine.md +9 -0
  91. devflow_engine/prompts/devin/shared/principles.md +246 -0
  92. devflow_engine/prompts/devin_eval/assessment/prompt.md +18 -0
  93. devflow_engine/prompts/idea/api_ideation_agent/prompt.md +8 -0
  94. devflow_engine/prompts/idea/api_insight_agent/prompt.md +8 -0
  95. devflow_engine/prompts/idea/response_doctrine/prompt.md +18 -0
  96. devflow_engine/prompts/implementation/dependency_assessment/prompt.md +12 -0
  97. devflow_engine/prompts/implementation/green/green/prompt.md +11 -0
  98. devflow_engine/prompts/implementation/green/node_config/prompt.md +3 -0
  99. devflow_engine/prompts/implementation/green_review/outcome_review/prompt.md +5 -0
  100. devflow_engine/prompts/implementation/green_review/prior_run_review/prompt.md +5 -0
  101. devflow_engine/prompts/implementation/red/prompt.md +27 -0
  102. devflow_engine/prompts/implementation/redreview/prompt.md +23 -0
  103. devflow_engine/prompts/implementation/redreview_repair/prompt.md +16 -0
  104. devflow_engine/prompts/implementation/setupdoc/prompt.md +10 -0
  105. devflow_engine/prompts/implementation/story_planning/prompt.md +13 -0
  106. devflow_engine/prompts/implementation/test_design/prompt.md +27 -0
  107. devflow_engine/prompts/integration/README.md +185 -0
  108. devflow_engine/prompts/integration/green/example.md +67 -0
  109. devflow_engine/prompts/integration/green/green/prompt.md +10 -0
  110. devflow_engine/prompts/integration/green/node_config/prompt.md +42 -0
  111. devflow_engine/prompts/integration/green/past_prompts/20260417T212300/green/prompt.md +15 -0
  112. devflow_engine/prompts/integration/green/past_prompts/20260417T212300/node_config/prompt.md +42 -0
  113. devflow_engine/prompts/integration/green_enrich/example.md +79 -0
  114. devflow_engine/prompts/integration/green_enrich/green_enrich/prompt.md +9 -0
  115. devflow_engine/prompts/integration/green_enrich/node_config/prompt.md +41 -0
  116. devflow_engine/prompts/integration/green_enrich/past_prompts/20260417T212300/green_enrich/prompt.md +14 -0
  117. devflow_engine/prompts/integration/green_enrich/past_prompts/20260417T212300/node_config/prompt.md +41 -0
  118. devflow_engine/prompts/integration/red/code_repair/prompt.md +12 -0
  119. devflow_engine/prompts/integration/red/example.md +152 -0
  120. devflow_engine/prompts/integration/red/node_config/prompt.md +86 -0
  121. devflow_engine/prompts/integration/red/past_prompts/20260417T212300/code_repair/prompt.md +19 -0
  122. devflow_engine/prompts/integration/red/past_prompts/20260417T212300/node_config/prompt.md +84 -0
  123. devflow_engine/prompts/integration/red/past_prompts/20260417T212300/red/prompt.md +16 -0
  124. devflow_engine/prompts/integration/red/past_prompts/20260417T212300/red_repair/prompt.md +15 -0
  125. devflow_engine/prompts/integration/red/past_prompts/20260417T215032/code_repair/prompt.md +10 -0
  126. devflow_engine/prompts/integration/red/past_prompts/20260417T215032/node_config/prompt.md +84 -0
  127. devflow_engine/prompts/integration/red/past_prompts/20260417T215032/red_repair/prompt.md +11 -0
  128. devflow_engine/prompts/integration/red/red/prompt.md +11 -0
  129. devflow_engine/prompts/integration/red/red_repair/prompt.md +12 -0
  130. devflow_engine/prompts/integration/red_review/example.md +71 -0
  131. devflow_engine/prompts/integration/red_review/node_config/prompt.md +41 -0
  132. devflow_engine/prompts/integration/red_review/past_prompts/20260417T212300/node_config/prompt.md +41 -0
  133. devflow_engine/prompts/integration/red_review/past_prompts/20260417T212300/red_review/prompt.md +15 -0
  134. devflow_engine/prompts/integration/red_review/red_review/prompt.md +9 -0
  135. devflow_engine/prompts/integration/resolve/example.md +111 -0
  136. devflow_engine/prompts/integration/resolve/node_config/prompt.md +64 -0
  137. devflow_engine/prompts/integration/resolve/past_prompts/20260417T212300/node_config/prompt.md +64 -0
  138. devflow_engine/prompts/integration/resolve/past_prompts/20260417T212300/resolve_implicated_users/prompt.md +15 -0
  139. devflow_engine/prompts/integration/resolve/past_prompts/20260417T212300/resolve_side_effects/prompt.md +15 -0
  140. devflow_engine/prompts/integration/resolve/resolve_implicated_users/prompt.md +10 -0
  141. devflow_engine/prompts/integration/resolve/resolve_side_effects/prompt.md +10 -0
  142. devflow_engine/prompts/integration/validate/build_idea_acceptance_coverage/prompt.md +12 -0
  143. devflow_engine/prompts/integration/validate/code_repair/prompt.md +13 -0
  144. devflow_engine/prompts/integration/validate/example.md +143 -0
  145. devflow_engine/prompts/integration/validate/node_config/prompt.md +87 -0
  146. devflow_engine/prompts/integration/validate/past_prompts/20260417T212300/code_repair/prompt.md +19 -0
  147. devflow_engine/prompts/integration/validate/past_prompts/20260417T212300/node_config/prompt.md +67 -0
  148. devflow_engine/prompts/integration/validate/past_prompts/20260417T212300/validate_enrich_gate/prompt.md +17 -0
  149. devflow_engine/prompts/integration/validate/past_prompts/20260417T212300/validate_repair/prompt.md +16 -0
  150. devflow_engine/prompts/integration/validate/past_prompts/20260417T215032/code_repair/prompt.md +10 -0
  151. devflow_engine/prompts/integration/validate/past_prompts/20260417T215032/node_config/prompt.md +67 -0
  152. devflow_engine/prompts/integration/validate/past_prompts/20260417T215032/validate_repair/prompt.md +9 -0
  153. devflow_engine/prompts/integration/validate/validate_enrich_gate/prompt.md +10 -0
  154. devflow_engine/prompts/integration/validate/validate_repair/prompt.md +20 -0
  155. devflow_engine/prompts/integration/write_workflows/example.md +100 -0
  156. devflow_engine/prompts/integration/write_workflows/node_config/prompt.md +44 -0
  157. devflow_engine/prompts/integration/write_workflows/past_prompts/20260417T212300/node_config/prompt.md +44 -0
  158. devflow_engine/prompts/integration/write_workflows/past_prompts/20260417T212300/write_workflows/prompt.md +17 -0
  159. devflow_engine/prompts/integration/write_workflows/write_workflows/prompt.md +11 -0
  160. devflow_engine/prompts/iterate/README.md +7 -0
  161. devflow_engine/prompts/iterate/coder/prompt.md +11 -0
  162. devflow_engine/prompts/iterate/framer/prompt.md +11 -0
  163. devflow_engine/prompts/iterate/iterator/prompt.md +13 -0
  164. devflow_engine/prompts/iterate/observer/prompt.md +11 -0
  165. devflow_engine/prompts/recovery/diagnosis/prompt.md +7 -0
  166. devflow_engine/prompts/recovery/execution/prompt.md +8 -0
  167. devflow_engine/prompts/recovery/execution_verification/prompt.md +7 -0
  168. devflow_engine/prompts/recovery/failure_investigation/prompt.md +10 -0
  169. devflow_engine/prompts/recovery/preflight_health_repo_repair/prompt.md +8 -0
  170. devflow_engine/prompts/recovery/remediation_execution/prompt.md +11 -0
  171. devflow_engine/prompts/recovery/root_cause_investigation/prompt.md +12 -0
  172. devflow_engine/prompts/scope_idea/doctrine/prompt.md +7 -0
  173. devflow_engine/prompts/source_doc_eval/document/prompt.md +6 -0
  174. devflow_engine/prompts/source_doc_eval/targeted_mutation/prompt.md +9 -0
  175. devflow_engine/prompts/source_doc_mutation/domain_entities/prompt.md +6 -0
  176. devflow_engine/prompts/source_doc_mutation/product_brief/prompt.md +6 -0
  177. devflow_engine/prompts/source_doc_mutation/project_doc_coherence/prompt.md +7 -0
  178. devflow_engine/prompts/source_doc_mutation/project_doc_render/prompt.md +9 -0
  179. devflow_engine/prompts/source_doc_mutation/source_doc_coherence/prompt.md +5 -0
  180. devflow_engine/prompts/source_doc_mutation/source_doc_enrichment_coherence/prompt.md +6 -0
  181. devflow_engine/prompts/source_doc_mutation/user_workflows/prompt.md +6 -0
  182. devflow_engine/prompts/source_scope/doctrine/prompt.md +10 -0
  183. devflow_engine/prompts/ui_grounding/doctrine/prompt.md +7 -0
  184. devflow_engine/recovery/__init__.py +3 -0
  185. devflow_engine/recovery/dag.py +2609 -0
  186. devflow_engine/recovery/models.py +220 -0
  187. devflow_engine/refactor.py +93 -0
  188. devflow_engine/registry/__init__.py +1 -0
  189. devflow_engine/registry/cards.py +238 -0
  190. devflow_engine/registry/domain_normalize.py +60 -0
  191. devflow_engine/registry/effects.py +65 -0
  192. devflow_engine/registry/enforce_report.py +150 -0
  193. devflow_engine/registry/module_cards_classify.py +164 -0
  194. devflow_engine/registry/module_cards_draft.py +184 -0
  195. devflow_engine/registry/module_cards_gate.py +59 -0
  196. devflow_engine/registry/packages.py +347 -0
  197. devflow_engine/registry/pathways.py +323 -0
  198. devflow_engine/review/__init__.py +11 -0
  199. devflow_engine/review/dag.py +588 -0
  200. devflow_engine/review/review_story.py +67 -0
  201. devflow_engine/scope_idea/__init__.py +3 -0
  202. devflow_engine/scope_idea/agentic.py +39 -0
  203. devflow_engine/scope_idea/dag.py +1069 -0
  204. devflow_engine/scope_idea/models.py +175 -0
  205. devflow_engine/skills/builtins/devflow/queue_failure_investigation/SKILL.md +112 -0
  206. devflow_engine/skills/builtins/devflow/queue_idea_to_story/SKILL.md +120 -0
  207. devflow_engine/skills/builtins/devflow/queue_integration/SKILL.md +105 -0
  208. devflow_engine/skills/builtins/devflow/queue_recovery/SKILL.md +108 -0
  209. devflow_engine/skills/builtins/devflow/queue_runtime_core/SKILL.md +155 -0
  210. devflow_engine/skills/builtins/devflow/queue_story_implementation/SKILL.md +122 -0
  211. devflow_engine/skills/builtins/devin/idea_to_story_handoff/SKILL.md +120 -0
  212. devflow_engine/skills/builtins/devin/ideation/SKILL.md +168 -0
  213. devflow_engine/skills/builtins/devin/ideation/state-and-phrasing-reference.md +18 -0
  214. devflow_engine/skills/builtins/devin/insight/SKILL.md +22 -0
  215. devflow_engine/skills/registry.example.yaml +42 -0
  216. devflow_engine/source_doc_assumptions.py +291 -0
  217. devflow_engine/source_doc_mutation_dag.py +1606 -0
  218. devflow_engine/source_doc_mutation_eval.py +417 -0
  219. devflow_engine/source_doc_mutation_worker.py +25 -0
  220. devflow_engine/source_docs_schema.py +207 -0
  221. devflow_engine/source_docs_updater.py +309 -0
  222. devflow_engine/source_scope/__init__.py +15 -0
  223. devflow_engine/source_scope/agentic.py +45 -0
  224. devflow_engine/source_scope/dag.py +1626 -0
  225. devflow_engine/source_scope/models.py +177 -0
  226. devflow_engine/stores/__init__.py +0 -0
  227. devflow_engine/stores/execution_store.py +3534 -0
  228. devflow_engine/story/__init__.py +0 -0
  229. devflow_engine/story/contracts.py +160 -0
  230. devflow_engine/story/discovery.py +47 -0
  231. devflow_engine/story/evidence.py +118 -0
  232. devflow_engine/story/hashing.py +27 -0
  233. devflow_engine/story/implemented_queue_purge.py +148 -0
  234. devflow_engine/story/indexer.py +105 -0
  235. devflow_engine/story/io.py +20 -0
  236. devflow_engine/story/markdown_contracts.py +298 -0
  237. devflow_engine/story/reconciliation.py +408 -0
  238. devflow_engine/story/validate_stories.py +149 -0
  239. devflow_engine/story/validate_tests_story.py +512 -0
  240. devflow_engine/story/validation.py +133 -0
  241. devflow_engine/ui_grounding/__init__.py +11 -0
  242. devflow_engine/ui_grounding/agentic.py +31 -0
  243. devflow_engine/ui_grounding/dag.py +874 -0
  244. devflow_engine/ui_grounding/models.py +224 -0
  245. devflow_engine/ui_grounding/pencil_bridge.py +247 -0
  246. devflow_engine/vendor/__init__.py +0 -0
  247. devflow_engine/vendor/datalumina_genai/__init__.py +11 -0
  248. devflow_engine/vendor/datalumina_genai/core/__init__.py +0 -0
  249. devflow_engine/vendor/datalumina_genai/core/exceptions.py +9 -0
  250. devflow_engine/vendor/datalumina_genai/core/nodes/__init__.py +0 -0
  251. devflow_engine/vendor/datalumina_genai/core/nodes/agent.py +48 -0
  252. devflow_engine/vendor/datalumina_genai/core/nodes/agent_streaming_node.py +26 -0
  253. devflow_engine/vendor/datalumina_genai/core/nodes/base.py +89 -0
  254. devflow_engine/vendor/datalumina_genai/core/nodes/concurrent.py +30 -0
  255. devflow_engine/vendor/datalumina_genai/core/nodes/router.py +69 -0
  256. devflow_engine/vendor/datalumina_genai/core/schema.py +72 -0
  257. devflow_engine/vendor/datalumina_genai/core/task.py +52 -0
  258. devflow_engine/vendor/datalumina_genai/core/validate.py +139 -0
  259. devflow_engine/vendor/datalumina_genai/core/workflow.py +200 -0
  260. devflow_engine/worker.py +1086 -0
  261. devflow_engine/worker_guard.py +233 -0
  262. devflow_engine-1.0.0.dist-info/METADATA +235 -0
  263. devflow_engine-1.0.0.dist-info/RECORD +393 -0
  264. devflow_engine-1.0.0.dist-info/WHEEL +4 -0
  265. devflow_engine-1.0.0.dist-info/entry_points.txt +3 -0
  266. devin/__init__.py +6 -0
  267. devin/dag.py +58 -0
  268. devin/dag_two_arm.py +138 -0
  269. devin/devin_chat_scenario_catalog.json +588 -0
  270. devin/devin_eval.py +677 -0
  271. devin/nodes/__init__.py +0 -0
  272. devin/nodes/ideation/__init__.py +0 -0
  273. devin/nodes/ideation/node.py +195 -0
  274. devin/nodes/ideation/playground.py +267 -0
  275. devin/nodes/ideation/prompt.md +65 -0
  276. devin/nodes/ideation/scenarios/continue_refinement.py +13 -0
  277. devin/nodes/ideation/scenarios/continue_refinement_evals.py +18 -0
  278. devin/nodes/ideation/scenarios/idea_fits_existing_patterns.py +17 -0
  279. devin/nodes/ideation/scenarios/idea_fits_existing_patterns_evals.py +16 -0
  280. devin/nodes/ideation/scenarios/large_idea_split.py +4 -0
  281. devin/nodes/ideation/scenarios/large_idea_split_evals.py +17 -0
  282. devin/nodes/ideation/scenarios/source_documentation_added.py +4 -0
  283. devin/nodes/ideation/scenarios/source_documentation_added_evals.py +16 -0
  284. devin/nodes/ideation/scenarios/user_says_create_it.py +30 -0
  285. devin/nodes/ideation/scenarios/user_says_create_it_evals.py +23 -0
  286. devin/nodes/ideation/scenarios/vague_idea.py +16 -0
  287. devin/nodes/ideation/scenarios/vague_idea_evals.py +47 -0
  288. devin/nodes/ideation/tools.json +312 -0
  289. devin/nodes/insight/__init__.py +0 -0
  290. devin/nodes/insight/node.py +49 -0
  291. devin/nodes/insight/playground.py +154 -0
  292. devin/nodes/insight/prompt.md +61 -0
  293. devin/nodes/insight/scenarios/architecture_pattern_query.py +15 -0
  294. devin/nodes/insight/scenarios/architecture_pattern_query_evals.py +25 -0
  295. devin/nodes/insight/scenarios/codebase_exploration.py +15 -0
  296. devin/nodes/insight/scenarios/codebase_exploration_evals.py +23 -0
  297. devin/nodes/insight/scenarios/devin_ideation_routing.py +19 -0
  298. devin/nodes/insight/scenarios/devin_ideation_routing_evals.py +39 -0
  299. devin/nodes/insight/scenarios/devin_insight_routing.py +20 -0
  300. devin/nodes/insight/scenarios/devin_insight_routing_evals.py +40 -0
  301. devin/nodes/insight/scenarios/operational_debugging.py +15 -0
  302. devin/nodes/insight/scenarios/operational_debugging_evals.py +23 -0
  303. devin/nodes/insight/scenarios/operational_question.py +9 -0
  304. devin/nodes/insight/scenarios/operational_question_evals.py +8 -0
  305. devin/nodes/insight/scenarios/queue_status.py +15 -0
  306. devin/nodes/insight/scenarios/queue_status_evals.py +23 -0
  307. devin/nodes/insight/scenarios/source_doc_explanation.py +14 -0
  308. devin/nodes/insight/scenarios/source_doc_explanation_evals.py +21 -0
  309. devin/nodes/insight/scenarios/worker_state_check.py +15 -0
  310. devin/nodes/insight/scenarios/worker_state_check_evals.py +22 -0
  311. devin/nodes/insight/tools.json +126 -0
  312. devin/nodes/intake/__init__.py +0 -0
  313. devin/nodes/intake/node.py +27 -0
  314. devin/nodes/intake/playground.py +47 -0
  315. devin/nodes/intake/prompt.md +12 -0
  316. devin/nodes/intake/scenarios/ideation_routing.py +4 -0
  317. devin/nodes/intake/scenarios/ideation_routing_evals.py +5 -0
  318. devin/nodes/intake/scenarios/insight_routing.py +4 -0
  319. devin/nodes/intake/scenarios/insight_routing_evals.py +5 -0
  320. devin/nodes/iterate/README.md +44 -0
  321. devin/nodes/iterate/__init__.py +1 -0
  322. devin/nodes/iterate/_archived_design_stages/01-objectives-requirements.md +112 -0
  323. devin/nodes/iterate/_archived_design_stages/02-evals.md +131 -0
  324. devin/nodes/iterate/_archived_design_stages/03-tools-and-boundaries.md +110 -0
  325. devin/nodes/iterate/_archived_design_stages/04-harness-and-playground.md +32 -0
  326. devin/nodes/iterate/_archived_design_stages/05-prompt-deferred.md +11 -0
  327. devin/nodes/iterate/_archived_design_stages/coder_agent_design/01-objectives-requirements.md +20 -0
  328. devin/nodes/iterate/_archived_design_stages/coder_agent_design/02-evals.md +8 -0
  329. devin/nodes/iterate/_archived_design_stages/coder_agent_design/03-tools-and-boundaries.md +14 -0
  330. devin/nodes/iterate/_archived_design_stages/coder_agent_design/04-harness-and-playground.md +12 -0
  331. devin/nodes/iterate/_archived_design_stages/framer_agent_design/01-objectives-requirements.md +20 -0
  332. devin/nodes/iterate/_archived_design_stages/framer_agent_design/02-evals.md +8 -0
  333. devin/nodes/iterate/_archived_design_stages/framer_agent_design/03-tools-and-boundaries.md +13 -0
  334. devin/nodes/iterate/_archived_design_stages/framer_agent_design/04-harness-and-playground.md +12 -0
  335. devin/nodes/iterate/_archived_design_stages/iterator_agent_design/01-objectives-requirements.md +25 -0
  336. devin/nodes/iterate/_archived_design_stages/iterator_agent_design/02-evals.md +9 -0
  337. devin/nodes/iterate/_archived_design_stages/iterator_agent_design/03-tools-and-boundaries.md +14 -0
  338. devin/nodes/iterate/_archived_design_stages/iterator_agent_design/04-harness-and-playground.md +12 -0
  339. devin/nodes/iterate/_archived_design_stages/observer_agent_design/01-objectives-requirements.md +20 -0
  340. devin/nodes/iterate/_archived_design_stages/observer_agent_design/02-evals.md +8 -0
  341. devin/nodes/iterate/_archived_design_stages/observer_agent_design/03-tools-and-boundaries.md +14 -0
  342. devin/nodes/iterate/_archived_design_stages/observer_agent_design/04-harness-and-playground.md +13 -0
  343. devin/nodes/iterate/agent-roles.md +89 -0
  344. devin/nodes/iterate/agents/README.md +10 -0
  345. devin/nodes/iterate/artifacts.md +504 -0
  346. devin/nodes/iterate/contract.md +100 -0
  347. devin/nodes/iterate/eval-plan.md +74 -0
  348. devin/nodes/iterate/node.py +100 -0
  349. devin/nodes/iterate/pipeline/README.md +13 -0
  350. devin/nodes/iterate/playground-contract.md +76 -0
  351. devin/nodes/iterate/prompt.md +11 -0
  352. devin/nodes/iterate/scenarios/README.md +38 -0
  353. devin/nodes/iterate/scenarios/artifact-and-loop-scenarios.md +101 -0
  354. devin/nodes/iterate/scenarios/coder_artifact_alignment.py +32 -0
  355. devin/nodes/iterate/scenarios/coder_artifact_alignment_evals.py +45 -0
  356. devin/nodes/iterate/scenarios/coder_bounded_fix.py +27 -0
  357. devin/nodes/iterate/scenarios/coder_bounded_fix_evals.py +45 -0
  358. devin/nodes/iterate/scenarios/devin_iterate_routing.py +21 -0
  359. devin/nodes/iterate/scenarios/devin_iterate_routing_evals.py +36 -0
  360. devin/nodes/iterate/scenarios/framer_scope_boundary.py +25 -0
  361. devin/nodes/iterate/scenarios/framer_scope_boundary_evals.py +57 -0
  362. devin/nodes/iterate/scenarios/framer_task_framing.py +25 -0
  363. devin/nodes/iterate/scenarios/framer_task_framing_evals.py +58 -0
  364. devin/nodes/iterate/scenarios/iterate_error_fix.py +21 -0
  365. devin/nodes/iterate/scenarios/iterate_error_fix_evals.py +39 -0
  366. devin/nodes/iterate/scenarios/iterate_quick_change.py +21 -0
  367. devin/nodes/iterate/scenarios/iterate_quick_change_evals.py +35 -0
  368. devin/nodes/iterate/scenarios/iterate_to_idea_promotion.py +23 -0
  369. devin/nodes/iterate/scenarios/iterate_to_idea_promotion_evals.py +53 -0
  370. devin/nodes/iterate/scenarios/iterate_to_insight_reroute.py +23 -0
  371. devin/nodes/iterate/scenarios/iterate_to_insight_reroute_evals.py +53 -0
  372. devin/nodes/iterate/scenarios/observer_evidence_seam.py +28 -0
  373. devin/nodes/iterate/scenarios/observer_evidence_seam_evals.py +55 -0
  374. devin/nodes/iterate/scenarios/observer_repro_creation.py +28 -0
  375. devin/nodes/iterate/scenarios/observer_repro_creation_evals.py +45 -0
  376. devin/nodes/iterate/scenarios/routing-matrix.md +45 -0
  377. devin/nodes/shared/__init__.py +0 -0
  378. devin/nodes/shared/filemaker_expert.md +80 -0
  379. devin/nodes/shared/filemaker_expert.py +354 -0
  380. devin/nodes/shared/filemaker_expert_eval/runner.py +176 -0
  381. devin/nodes/shared/filemaker_expert_eval/scenarios.json +65 -0
  382. devin/nodes/shared/goldilocks_advisor_eval/runner.py +214 -0
  383. devin/nodes/shared/goldilocks_advisor_eval/scenarios.json +58 -0
  384. devin/nodes/shared/helpers.py +156 -0
  385. devin/nodes/shared/idea_compliance_advisor_eval/runner.py +252 -0
  386. devin/nodes/shared/idea_compliance_advisor_eval/scenarios.json +75 -0
  387. devin/nodes/shared/models.py +44 -0
  388. devin/nodes/shared/post.py +40 -0
  389. devin/nodes/shared/router.py +107 -0
  390. devin/nodes/shared/tools.py +191 -0
  391. devin/shared/devin-chat-rubric.md +237 -0
  392. devin/shared/devin-chat-scenario-suite.md +90 -0
  393. devin/shared/eval_doctrine.md +9 -0
@@ -0,0 +1,1585 @@
1
+ from __future__ import annotations
2
+
3
+ import hashlib
4
+ import json
5
+ import os
6
+ import re
7
+ import subprocess
8
+ import sys
9
+ import uuid
10
+ from dataclasses import dataclass
11
+ from datetime import UTC, datetime
12
+ from pathlib import Path
13
+ from typing import Any
14
+ from urllib.parse import quote
15
+ from urllib.request import Request, urlopen
16
+
17
+ from pydantic import BaseModel
18
+
19
+ from ..devflow_state import publish_devflow_state
20
+ from ..project_registry import find_project_for_repo_root
21
+ from ..planning.render_drafts import render_draft_story_markdown
22
+ from ..stores.execution_store import ExecutionStore
23
+ from ..story.contracts import validate_story_contract
24
+ from ..story.markdown_contracts import MANDATORY_CONTRACT_FORMATION_MODE_LINE, PlaneOracle, StoryContract
25
+ from ..ui_grounding.models import GroundedStoryReferencePatchArtifact
26
+ from ..vendor.datalumina_genai.core.nodes.agent import AgentConfig, AgentNode
27
+ from ..vendor.datalumina_genai.core.nodes.base import Node
28
+ from ..vendor.datalumina_genai.core.schema import NodeConfig, WorkflowSchema
29
+ from ..vendor.datalumina_genai.core.task import TaskContext
30
+ from ..vendor.datalumina_genai.core.workflow import Workflow
31
+ from .actors import load_actor_registry, normalize_idea_actors, resolve_actor_entry, write_actor_registry
32
+ from .paths import get_idea_paths
33
+ from .sufficiency import evaluate_idea_sufficiency, extract_sufficient_idea, load_idea_source, render_sufficiency_json
34
+ from .traditional_stories import (
35
+ TraditionalStoryInsufficiencyError,
36
+ _extract_json,
37
+ _load_llm_cli_config,
38
+ generate_story_coverage_requirements,
39
+ generate_traditional_user_story_set,
40
+ validate_sufficient_idea,
41
+ )
42
+ from ..llm.cli_one_shot import run_one_shot
43
+
44
+
45
+ DAG_ID = "idea_to_devflow_stories_dag"
46
+
47
+
48
+ @dataclass(frozen=True)
49
+ class IdeaStoryDagResult:
50
+ exit_code: int
51
+ run_id: str
52
+ pipeline_dir: Path
53
+ message: str
54
+ story_set_id: str | None
55
+ devflow_story_set_id: str | None
56
+ failed_stage: str | None = None
57
+ resume_cursor: dict[str, Any] | None = None
58
+
59
+
60
+ class IdeaStoryDagEvent(BaseModel):
61
+ repo_root: str
62
+ idea_id: str
63
+ raw_text: str | None = None
64
+ source_path: str | None = None
65
+ max_stories: int = 0
66
+ planes: list[str]
67
+ pipeline_key: str
68
+
69
+
70
+ _CURRENT_STORE: ExecutionStore | None = None
71
+ _CURRENT_RUN_ID: str | None = None
72
+
73
+
74
+ def _store_run() -> tuple[ExecutionStore, str]:
75
+ if _CURRENT_STORE is None or _CURRENT_RUN_ID is None:
76
+ raise RuntimeError("idea story pipeline missing runtime store/run_id")
77
+ return _CURRENT_STORE, _CURRENT_RUN_ID
78
+
79
+
80
+ def _stable_hash(payload: Any) -> str:
81
+ return hashlib.sha256(json.dumps(payload, sort_keys=True).encode("utf-8")).hexdigest()
82
+
83
+
84
+ def _stable_id(prefix: str, payload: Any, *, size: int = 12) -> str:
85
+ return f"{prefix}{_stable_hash(payload)[:size]}"
86
+
87
+
88
+ def _deterministic_uuid4(seed: str) -> str:
89
+ raw = bytearray(hashlib.sha256(seed.encode("utf-8")).digest()[:16])
90
+ raw[6] = (raw[6] & 0x0F) | 0x40
91
+ raw[8] = (raw[8] & 0x3F) | 0x80
92
+ return str(uuid.UUID(bytes=bytes(raw)))
93
+
94
+
95
+ def _parse_list_option(planes: list[str]) -> list[str]:
96
+ return sorted({str(item).strip() for item in planes if str(item).strip()})
97
+
98
+
99
+ CANONICAL_PLANES = ["auth", "api", "ui", "integrations", "ops"]
100
+
101
+
102
+ def _validate_candidate_planes(planes: list[str]) -> list[str]:
103
+ allowed = set(CANONICAL_PLANES)
104
+ invalid = [plane for plane in planes if plane not in allowed]
105
+ if invalid:
106
+ raise ValueError(f"Invalid planes: {invalid}. Allowed planes are auth, api, ui, integrations, ops.")
107
+ return planes
108
+
109
+
110
+ def _resolve_story_plane_candidates(planes: list[str]) -> list[str]:
111
+ normalized = _validate_candidate_planes(_parse_list_option(planes))
112
+ return normalized or list(CANONICAL_PLANES)
113
+
114
+
115
+ def _pipeline_root(repo_root: Path, *, idea_id: str, pipeline_key: str) -> Path:
116
+ return get_idea_paths(repo_root, idea_id=idea_id).idea_dir / "pipelines" / DAG_ID / pipeline_key
117
+
118
+
119
+ def _read_idea_payload(repo_root: Path, *, idea_id: str) -> dict[str, Any]:
120
+ idea_json = get_idea_paths(repo_root, idea_id=idea_id).idea_dir / "idea.json"
121
+ if not idea_json.exists():
122
+ return {"idea_id": idea_id}
123
+ return json.loads(idea_json.read_text(encoding="utf-8"))
124
+
125
+
126
+ def _write_json(path: Path, payload: dict[str, Any]) -> None:
127
+ path.parent.mkdir(parents=True, exist_ok=True)
128
+ path.write_text(json.dumps(payload, indent=2, sort_keys=True) + "\n", encoding="utf-8")
129
+
130
+
131
+ def _clean_dfs_summary(text: str) -> str:
132
+ cleaned = re.sub(r"\[<more repeated useless text>\]", "", str(text or ""), flags=re.IGNORECASE)
133
+ cleaned = re.sub(r"\s+", " ", cleaned).strip()
134
+ return cleaned[:240] if cleaned else "Processing story generation"
135
+
136
+
137
+ def _dfs_running(*, repo_root: Path, idea_id: str, run_id: str, summary: str) -> None:
138
+ project = find_project_for_repo_root(repo_root)
139
+ project_id = None if project is None else str(project.get("project_id") or "").strip()
140
+ if not project_id:
141
+ idea_json = get_idea_paths(repo_root, idea_id=idea_id).idea_dir / "idea.json"
142
+ if idea_json.exists():
143
+ try:
144
+ project_id = str(json.loads(idea_json.read_text(encoding="utf-8")).get("project_id") or "").strip()
145
+ except Exception:
146
+ project_id = None
147
+ if not project_id:
148
+ return
149
+ publish_devflow_state(
150
+ project_id=project_id,
151
+ run_id=run_id,
152
+ current_state="running",
153
+ current_status="processing",
154
+ run_summary=_clean_dfs_summary(summary),
155
+ display="project",
156
+ display_path=f"idea:{idea_id}",
157
+ )
158
+
159
+
160
+ def _dfs_node_running(*, repo_root: Path, idea_id: str, run_id: str, node_id: str, summary: str) -> None:
161
+ _dfs_running(repo_root=repo_root, idea_id=idea_id, run_id=run_id, summary=summary)
162
+
163
+
164
+ def _dfs_terminal(*, repo_root: Path, idea_id: str, run_id: str, summary: str, current_state: str, current_status: str, error_message: str | None = None) -> None:
165
+ project = find_project_for_repo_root(repo_root)
166
+ project_id = None if project is None else str(project.get("project_id") or "").strip()
167
+ if not project_id:
168
+ idea_json = get_idea_paths(repo_root, idea_id=idea_id).idea_dir / "idea.json"
169
+ if idea_json.exists():
170
+ try:
171
+ project_id = str(json.loads(idea_json.read_text(encoding="utf-8")).get("project_id") or "").strip()
172
+ except Exception:
173
+ project_id = None
174
+ if not project_id:
175
+ return
176
+ publish_devflow_state(
177
+ project_id=project_id,
178
+ run_id=run_id,
179
+ current_state=current_state,
180
+ current_status=current_status,
181
+ run_summary=_clean_dfs_summary(summary),
182
+ error_message=_clean_dfs_summary(error_message or "") if error_message else None,
183
+ display="project",
184
+ display_path=f"idea:{idea_id}",
185
+ )
186
+
187
+
188
+ def _resolve_supabase_rest_config() -> tuple[str, str] | None:
189
+ if os.environ.get("PYTEST_CURRENT_TEST"):
190
+ return None
191
+ url = (
192
+ os.environ.get("DEVFLOW_SUPABASE_URL")
193
+ or os.environ.get("SUPABASE_URL")
194
+ or os.environ.get("SUPABASE_URL")
195
+ )
196
+ key = (
197
+ os.environ.get("DEVFLOW_SUPABASE_SERVICE_KEY")
198
+ or os.environ.get("SUPABASE_SERVICE_ROLE_KEY")
199
+ or os.environ.get("SUPABASE_SERVICE_KEY")
200
+ )
201
+ if not url or not key:
202
+ from ..devflow_state import _keychain_get # type: ignore
203
+ url = url or _keychain_get("Supabase URL", "Clarity")
204
+ key = key or _keychain_get("Supabase Service Key", "Clarity")
205
+ if not url or not key:
206
+ return None
207
+ return url.rstrip("/"), key
208
+
209
+
210
+ def _postgrest_request(*, method: str, url: str, key: str, body: Any | None = None, prefer: str | None = None) -> Any:
211
+ payload = None if body is None else json.dumps(body).encode("utf-8")
212
+ req = Request(url, data=payload, method=method)
213
+ req.add_header("apikey", key)
214
+ req.add_header("Authorization", f"Bearer {key}")
215
+ if body is not None:
216
+ req.add_header("Content-Type", "application/json")
217
+ if prefer:
218
+ req.add_header("Prefer", prefer)
219
+ with urlopen(req, timeout=30) as resp:
220
+ raw = resp.read().decode("utf-8")
221
+ return json.loads(raw) if raw else None
222
+
223
+
224
+ def _update_idea_pipeline_status(*, repo_root: Path, idea_id: str, status: str) -> None:
225
+ """Update devflow_project_ideas.status for the given idea."""
226
+ config = _resolve_supabase_rest_config()
227
+ if config is None:
228
+ return
229
+ url, key = config
230
+ try:
231
+ _postgrest_request(
232
+ method="PATCH",
233
+ url=f"{url}/rest/v1/devflow_project_ideas?idea_id=eq.{quote(idea_id)}",
234
+ key=key,
235
+ body={"status": status, "updated_at": datetime.now(UTC).isoformat()},
236
+ )
237
+ except Exception:
238
+ return # non-fatal
239
+
240
+
241
+ def _is_uuid_like(value: str | None) -> bool:
242
+ candidate = str(value or "").strip()
243
+ if not candidate:
244
+ return False
245
+ try:
246
+ uuid.UUID(candidate)
247
+ except ValueError:
248
+ return False
249
+ return True
250
+
251
+
252
+ def _lookup_project_id_for_idea(*, url: str, key: str, idea_id: str) -> str | None:
253
+ try:
254
+ rows = _postgrest_request(
255
+ method="GET",
256
+ url=f"{url}/rest/v1/devflow_project_ideas?select=project_id&idea_id=eq.{quote(idea_id)}&limit=1",
257
+ key=key,
258
+ )
259
+ except Exception:
260
+ return None
261
+ if not isinstance(rows, list) or not rows:
262
+ return None
263
+ project_id = str((rows[0] or {}).get("project_id") or "").strip()
264
+ return project_id or None
265
+
266
+
267
+ def _resolve_story_sync_project_id(*, url: str, key: str, idea_id: str, idea_payload: dict[str, Any]) -> str | None:
268
+ project_id = str(idea_payload.get("project_id") or "").strip()
269
+ if _is_uuid_like(project_id):
270
+ return project_id
271
+ resolved_project_id = _lookup_project_id_for_idea(url=url, key=key, idea_id=idea_id)
272
+ if _is_uuid_like(resolved_project_id):
273
+ return resolved_project_id
274
+ return project_id or resolved_project_id or None
275
+
276
+
277
+ def _sync_devflow_stories_to_supabase(*, repo_root: Path, idea_id: str, run_id: str, devflow_story_set_id: str) -> None:
278
+ config = _resolve_supabase_rest_config()
279
+ if config is None:
280
+ return
281
+ url, key = config
282
+ idea_json = get_idea_paths(repo_root, idea_id=idea_id).idea_dir / "idea.json"
283
+ if not idea_json.exists():
284
+ return
285
+ idea_payload = json.loads(idea_json.read_text(encoding="utf-8"))
286
+ project_id = _resolve_story_sync_project_id(url=url, key=key, idea_id=idea_id, idea_payload=idea_payload)
287
+ if not project_id:
288
+ return
289
+
290
+ compiled_manifest_path = get_idea_paths(repo_root, idea_id=idea_id).idea_dir / "devflow_story_sets" / devflow_story_set_id / "manifest.json"
291
+ if not compiled_manifest_path.exists():
292
+ return
293
+ compiled_manifest = json.loads(compiled_manifest_path.read_text(encoding="utf-8"))
294
+ source_story_set_id = str(compiled_manifest.get("source_story_set_id") or "").strip()
295
+ if not source_story_set_id:
296
+ return
297
+ trad_manifest_path = get_idea_paths(repo_root, idea_id=idea_id).idea_dir / "traditional_user_stories" / source_story_set_id / "manifest.json"
298
+ if not trad_manifest_path.exists():
299
+ return
300
+ trad_manifest = json.loads(trad_manifest_path.read_text(encoding="utf-8"))
301
+ trad_story_paths = [repo_root / rel for rel in (trad_manifest.get("story_paths") or [])]
302
+ compiled_story_paths = [repo_root / rel for rel in (compiled_manifest.get("story_paths") or [])]
303
+ compiled_by_index = {idx: path for idx, path in enumerate(sorted(compiled_story_paths), start=1)}
304
+
305
+ rows: list[dict[str, Any]] = []
306
+ for index, trad_path in enumerate(sorted(trad_story_paths), start=1):
307
+ if not trad_path.exists():
308
+ continue
309
+ trad_text = trad_path.read_text(encoding="utf-8")
310
+ title, _actor, acceptance, statement = _story_statement_from_traditional(trad_text)
311
+ compiled_path = compiled_by_index.get(index)
312
+ compiled_payload: dict[str, Any] = {}
313
+ if compiled_path and compiled_path.exists():
314
+ compiled_payload = json.loads(compiled_path.read_text(encoding="utf-8"))
315
+ rows.append({
316
+ "idea_id": idea_id,
317
+ "story_id": str(compiled_payload.get("story_id") or f"TRAD:{idea_id}:{index:03d}"),
318
+ "story_uuid": str(compiled_payload.get("story_uuid") or "") or None,
319
+ "project_id": project_id,
320
+ "run_id": run_id,
321
+ "title": title,
322
+ "summary": statement or None,
323
+ "acceptance_criteria": acceptance,
324
+ "status": "ready_for_implementation",
325
+ "plane": str(compiled_payload.get("plane") or "") or None,
326
+ "required_planes": compiled_payload.get("required_planes") or [],
327
+ "devflow_story_set_id": devflow_story_set_id,
328
+ "source_story_set_id": source_story_set_id,
329
+ "artifact_path": str(trad_path),
330
+ "compiled_story_id": str(compiled_payload.get("story_id") or "") or None,
331
+ "compiled_story_path": str(compiled_path) if compiled_path else None,
332
+ "updated_at": datetime.now(UTC).isoformat(),
333
+ })
334
+ delete_url = f"{url}/rest/v1/devflow_idea_stories?idea_id=eq.{quote(idea_id)}"
335
+ try:
336
+ _postgrest_request(method="DELETE", url=delete_url, key=key)
337
+ if rows:
338
+ _postgrest_request(method="POST", url=f"{url}/rest/v1/devflow_idea_stories?on_conflict=story_id", key=key, body=rows, prefer="resolution=merge-duplicates")
339
+ except Exception:
340
+ return
341
+
342
+
343
+ def _load_json_report(path: Path) -> dict[str, Any]:
344
+ if not path.exists():
345
+ return {}
346
+ return json.loads(path.read_text(encoding="utf-8"))
347
+
348
+
349
+ def _story_statement_from_traditional(text: str) -> tuple[str, str, list[str], str]:
350
+ lines = [line.rstrip() for line in text.splitlines()]
351
+ title = ""
352
+ statement_lines: list[str] = []
353
+ actor = ""
354
+ acceptance: list[str] = []
355
+ in_acceptance = False
356
+
357
+ for line in lines:
358
+ stripped = line.strip()
359
+ if stripped.startswith("**Title:**"):
360
+ title = stripped.split(":", 1)[1].strip()
361
+ continue
362
+
363
+ if stripped in {"## User Value", "## Acceptance Criteria"}:
364
+ in_acceptance = stripped == "## Acceptance Criteria"
365
+ continue
366
+
367
+ if stripped.startswith("As a "):
368
+ actor = stripped[len("As a "):].rstrip(",").strip()
369
+ statement_lines.append(stripped)
370
+ continue
371
+
372
+ if stripped.startswith("I want ") or stripped.startswith("so that "):
373
+ statement_lines.append(stripped)
374
+ continue
375
+
376
+ if in_acceptance and stripped:
377
+ acceptance.append(stripped)
378
+
379
+ return title or "Compiled DevFlow story", actor, acceptance, "\n".join(statement_lines).strip()
380
+
381
+
382
+ _HTTP_ANCHOR_RE = re.compile(r"^(GET|POST|PUT|PATCH|DELETE)\s+/\S+")
383
+ _SCHEME_ANCHOR_RE = re.compile(r"^[a-z][a-z0-9_-]{1,32}:")
384
+
385
+
386
+ def _anchor_is_allowed(anchor: str) -> bool:
387
+ value = (anchor or "").strip()
388
+ if not value:
389
+ return False
390
+ return bool(_SCHEME_ANCHOR_RE.match(value) or _HTTP_ANCHOR_RE.match(value))
391
+
392
+
393
+ def _adjudicate_story_planes(*, repo_root: Path, idea_id: str, story: dict[str, Any], candidate_planes: list[str]) -> dict[str, Any]:
394
+ prompt = {
395
+ "task": "adjudicate_story_planes",
396
+ "idea_id": idea_id,
397
+ "story": story,
398
+ "candidate_planes": candidate_planes,
399
+ "instructions": [
400
+ "Return JSON only. No markdown. No prose outside JSON.",
401
+ "Select only the planes that are truly required for this specific story.",
402
+ "Do not include a plane unless the story's user-visible outcome requires coverage/oracles in that plane.",
403
+ "Do not default to all planes.",
404
+ "For every included and excluded plane, provide a concise rationale.",
405
+ ],
406
+ "output_schema": {
407
+ "required_planes": ["api"],
408
+ "included_planes": [{"plane": "api", "rationale": "string"}],
409
+ "excluded_planes": [{"plane": "ui", "rationale": "string"}],
410
+ },
411
+ }
412
+ base_cmd, delivery = _load_llm_cli_config()
413
+ result = run_one_shot(base_cmd=base_cmd, delivery=delivery, prompt=json.dumps(prompt, indent=2, sort_keys=True), cwd=repo_root)
414
+ if not result.ok:
415
+ raise RuntimeError(result.stderr or result.stdout or "story plane adjudication LLM command failed")
416
+ raw_json = _extract_json(result.stdout)
417
+ if raw_json is None:
418
+ raise RuntimeError("Failed to locate JSON in story plane adjudication output.")
419
+ parsed = json.loads(raw_json)
420
+ if not isinstance(parsed, dict):
421
+ raise RuntimeError("Story plane adjudication output must be a JSON object.")
422
+ required_planes = sorted({str(item).strip() for item in (parsed.get("required_planes") or []) if str(item).strip()})
423
+ invalid = [plane for plane in required_planes if plane not in {"auth", "api", "ui", "integrations", "ops"}]
424
+ if invalid:
425
+ raise RuntimeError(f"Story plane adjudication returned invalid planes: {invalid}")
426
+ if not required_planes:
427
+ raise RuntimeError("Story plane adjudication returned no required planes.")
428
+ parsed["required_planes"] = required_planes
429
+ return parsed
430
+
431
+
432
+ def _validate_generated_story_payload(*, payload: dict[str, Any], source_path: Path) -> list[dict[str, str]]:
433
+ plane_oracles = [
434
+ PlaneOracle(
435
+ plane=str(item.get("plane") or "").strip(),
436
+ oracle=str(item.get("oracle") or "").strip(),
437
+ anchor=str(item.get("anchor") or "").strip(),
438
+ )
439
+ for item in (payload.get("plane_oracles") or [])
440
+ if isinstance(item, dict)
441
+ ]
442
+ contract = StoryContract(
443
+ story_uuid=str(payload.get("story_uuid") or "").strip(),
444
+ story_id=str(payload.get("story_id") or "").strip(),
445
+ title=str(payload.get("title") or "").strip(),
446
+ required_planes=[str(item).strip() for item in (payload.get("required_planes") or []) if str(item).strip()],
447
+ contract_formation_mode_line_present=str(payload.get("contract_formation_mode") or "").strip()
448
+ == MANDATORY_CONTRACT_FORMATION_MODE_LINE,
449
+ plane_oracles=plane_oracles,
450
+ raw_text=json.dumps(payload, sort_keys=True),
451
+ source_path=str(source_path),
452
+ )
453
+
454
+ issues = [
455
+ {"code": issue.code, "message": issue.message, "path": issue.path}
456
+ for issue in validate_story_contract(contract, start_line=1)
457
+ ]
458
+ for index, oracle in enumerate(plane_oracles):
459
+ if oracle.anchor and not _anchor_is_allowed(oracle.anchor):
460
+ issues.append(
461
+ {
462
+ "code": "invalid_plane_oracle_anchor_format",
463
+ "message": "plane_oracles anchor must be a stable address",
464
+ "path": f"{source_path}:plane_oracles[{index}].anchor",
465
+ }
466
+ )
467
+ return issues
468
+
469
+
470
+ def _load_grounded_story_reference_patch(
471
+ repo_root: Path,
472
+ *,
473
+ idea_id: str,
474
+ story_paths: list[Path],
475
+ ) -> GroundedStoryReferencePatchArtifact | None:
476
+ idea_dir = get_idea_paths(repo_root, idea_id=idea_id).idea_dir
477
+ pipeline_root = idea_dir / "pipelines" / "ui_grounding_dag"
478
+ if not pipeline_root.exists():
479
+ return None
480
+
481
+ source_refs = {str(path.relative_to(repo_root)) for path in story_paths}
482
+ source_refs.add(f"idea:{idea_id}")
483
+ idea_json = idea_dir / "idea.json"
484
+ if idea_json.exists():
485
+ source_refs.add(str(idea_json.relative_to(repo_root)))
486
+
487
+ candidates = sorted(pipeline_root.glob("*/grounded_story_reference_patch.json"), key=lambda path: path.stat().st_mtime, reverse=True)
488
+ for patch_path in candidates:
489
+ try:
490
+ patch = GroundedStoryReferencePatchArtifact.model_validate_json(patch_path.read_text(encoding="utf-8"))
491
+ except Exception:
492
+ continue
493
+ if patch.idea_id != idea_id:
494
+ continue
495
+ target_refs = {str(item).strip() for item in patch.target_refs if str(item).strip()}
496
+ if target_refs and target_refs.isdisjoint(source_refs):
497
+ continue
498
+ return patch
499
+ return None
500
+
501
+
502
+ def _merge_grounded_ui_anchors(
503
+ *,
504
+ payload: dict[str, Any],
505
+ ui_patch: GroundedStoryReferencePatchArtifact | None,
506
+ ) -> dict[str, Any]:
507
+ if ui_patch is None or not ui_patch.attached_ui_anchors:
508
+ return payload
509
+
510
+ merged = dict(payload)
511
+ ui_anchor_values = [
512
+ str(item.get("anchor") or "").strip()
513
+ for item in ui_patch.attached_ui_anchors
514
+ if isinstance(item, dict) and str(item.get("anchor") or "").strip()
515
+ ]
516
+ if not ui_anchor_values:
517
+ return merged
518
+
519
+ seen_evidence: set[str] = set()
520
+ evidence: list[str] = []
521
+ for anchor in [*(merged.get("evidence_anchors") or []), *ui_anchor_values]:
522
+ value = str(anchor or "").strip()
523
+ if value and value not in seen_evidence:
524
+ seen_evidence.add(value)
525
+ evidence.append(value)
526
+ merged["evidence_anchors"] = evidence
527
+
528
+ plane_oracles = []
529
+ preferred_ui_anchor = ui_anchor_values[0]
530
+ for item in (merged.get("plane_oracles") or []):
531
+ if not isinstance(item, dict):
532
+ continue
533
+ oracle = dict(item)
534
+ if str(oracle.get("plane") or "").strip() == "ui":
535
+ oracle["anchor"] = preferred_ui_anchor
536
+ plane_oracles.append(oracle)
537
+ merged["plane_oracles"] = plane_oracles
538
+ merged["ui_grounding"] = {
539
+ "origin": ui_patch.origin,
540
+ "supporting_evidence_refs": list(ui_patch.supporting_evidence_refs),
541
+ "attached_ui_anchors": list(ui_patch.attached_ui_anchors),
542
+ }
543
+ return merged
544
+
545
+
546
+ def _grounded_ui_patch_cache_key(ui_patch: GroundedStoryReferencePatchArtifact | None) -> dict[str, Any] | None:
547
+ if ui_patch is None:
548
+ return None
549
+ return {
550
+ "origin": ui_patch.origin,
551
+ "target_refs": list(ui_patch.target_refs),
552
+ "attached_ui_anchors": list(ui_patch.attached_ui_anchors),
553
+ "supporting_evidence_refs": list(ui_patch.supporting_evidence_refs),
554
+ }
555
+
556
+
557
+ def _compile_devflow_story_set(
558
+ *,
559
+ repo_root: Path,
560
+ idea_id: str,
561
+ story_set_id: str,
562
+ story_paths: list[Path],
563
+ planes: list[str],
564
+ story_plane_adjudication: dict[str, Any] | None = None,
565
+ pass_index: int = 1,
566
+ prior_validation_issues: list[dict[str, str]] | None = None,
567
+ force_rebuild: bool = False,
568
+ ) -> tuple[str, Path, list[Path]]:
569
+ paths = get_idea_paths(repo_root, idea_id=idea_id)
570
+ grounded_ui_patch = _load_grounded_story_reference_patch(repo_root, idea_id=idea_id, story_paths=story_paths)
571
+ compile_id = _stable_id(
572
+ "dfs_",
573
+ {
574
+ "idea_id": idea_id,
575
+ "story_set_id": story_set_id,
576
+ "planes": planes,
577
+ "story_paths": [str(p.relative_to(repo_root)) for p in story_paths],
578
+ "grounded_ui_patch": _grounded_ui_patch_cache_key(grounded_ui_patch),
579
+ },
580
+ )
581
+ root = paths.idea_dir / "devflow_story_sets" / compile_id
582
+ manifest_path = root / "manifest.json"
583
+ if manifest_path.exists() and not force_rebuild:
584
+ manifest = json.loads(manifest_path.read_text(encoding="utf-8"))
585
+ compiled_paths = [repo_root / rel_path for rel_path in manifest.get("story_paths", [])]
586
+ return compile_id, root, compiled_paths
587
+
588
+ root.mkdir(parents=True, exist_ok=True)
589
+ for stale_path in root.glob("story_*.json"):
590
+ stale_path.unlink()
591
+ compiled_paths: list[Path] = []
592
+ adjudication_map = {
593
+ str(item.get("story_path")): item
594
+ for item in ((story_plane_adjudication or {}).get("stories") or [])
595
+ if isinstance(item, dict) and str(item.get("story_path") or "").strip()
596
+ }
597
+ for index, source_path in enumerate(sorted(story_paths), start=1):
598
+ source_text = source_path.read_text(encoding="utf-8")
599
+ title, actor_label, acceptance, statement = _story_statement_from_traditional(source_text)
600
+ rel_source = str(source_path.relative_to(repo_root))
601
+ story_id = f"STORY:idea:{idea_id}:compiled:{index:03d}"
602
+ story_planes = [str(item).strip() for item in ((adjudication_map.get(str(source_path)) or {}).get("required_planes") or planes) if str(item).strip()]
603
+ story_uuid = _deterministic_uuid4(f"{idea_id}:{story_set_id}:{rel_source}:{','.join(story_planes)}")
604
+ payload = {
605
+ "story_uuid": story_uuid,
606
+ "story_id": story_id,
607
+ "title": title,
608
+ "contract_formation_mode": MANDATORY_CONTRACT_FORMATION_MODE_LINE,
609
+ "required_planes": story_planes,
610
+ "plane_oracles": [
611
+ {
612
+ "plane": plane,
613
+ "oracle": f"{title}: the user-observable outcome remains satisfied for the {plane} plane.",
614
+ "anchor": f"artifact:{rel_source}",
615
+ }
616
+ for plane in story_planes
617
+ ],
618
+ "evidence_anchors": [f"artifact:{rel_source}"],
619
+ "draft_story_id": f"compiled_{story_set_id}_{index:03d}",
620
+ "plane": story_planes[0],
621
+ "source_analysis_id": story_set_id,
622
+ }
623
+ actor_registry = load_actor_registry(repo_root)
624
+ primary_actor = resolve_actor_entry(actor=actor_label, actor_registry=actor_registry) if actor_label else None
625
+ if primary_actor is None and actor_label:
626
+ raise RuntimeError(f"Traditional story actor {actor_label!r} is not present in canonical actor registry during handoff.")
627
+ if primary_actor is not None:
628
+ payload["primary_actor"] = {
629
+ "id": str(primary_actor.get("id") or "").strip(),
630
+ "label": str(primary_actor.get("label") or "").strip(),
631
+ "kind": str(primary_actor.get("kind") or "human").strip() or "human",
632
+ "inherits_from": str(primary_actor.get("inherits_from") or "").strip() or None,
633
+ }
634
+ if statement:
635
+ payload["user_value_statement"] = statement
636
+ if acceptance:
637
+ payload["acceptance_criteria"] = acceptance
638
+ payload = _merge_grounded_ui_anchors(payload=payload, ui_patch=grounded_ui_patch)
639
+ out_path = root / f"story_{index:03d}.json"
640
+ out_path.write_text(json.dumps(payload, indent=2, sort_keys=True) + "\n", encoding="utf-8")
641
+ compiled_paths.append(out_path)
642
+
643
+ manifest = {
644
+ "devflow_story_set_id": compile_id,
645
+ "idea_id": idea_id,
646
+ "kind": "devflow_story_set",
647
+ "planes": planes,
648
+ "source_story_set_id": story_set_id,
649
+ "source_story_paths": [str(path.relative_to(repo_root)) for path in sorted(story_paths)],
650
+ "story_paths": [str(path.relative_to(repo_root)) for path in compiled_paths],
651
+ }
652
+ manifest_path.write_text(json.dumps(manifest, indent=2, sort_keys=True), encoding="utf-8")
653
+ return compile_id, root, compiled_paths
654
+
655
+
656
+ def _build_devflow_story_validation_workspace(*, root: Path, compiled_paths: list[Path], pass_index: int) -> Path:
657
+ validation_root = root / "_validation" / f"pass_{pass_index:03d}"
658
+ story_dir = validation_root / "ai_docs" / "context" / "v2" / "project_docs" / "user_stories" / "generated"
659
+ story_dir.mkdir(parents=True, exist_ok=True)
660
+
661
+ for index, compiled_path in enumerate(sorted(compiled_paths), start=1):
662
+ payload = json.loads(compiled_path.read_text(encoding="utf-8"))
663
+ markdown = render_draft_story_markdown(payload)
664
+ out_path = story_dir / f"story_{index:03d}.md"
665
+ out_path.write_text(markdown, encoding="utf-8")
666
+
667
+ return validation_root
668
+
669
+
670
+ def _run_devflow_story_validator(*, engine_root: Path, validation_repo_root: Path, report_path: Path) -> subprocess.CompletedProcess[str]:
671
+ return subprocess.run(
672
+ [
673
+ sys.executable,
674
+ "-m",
675
+ "devflow_engine.story.validate_stories",
676
+ "--repo-root",
677
+ str(validation_repo_root),
678
+ "--json",
679
+ str(report_path),
680
+ ],
681
+ cwd=str(validation_repo_root),
682
+ text=True,
683
+ capture_output=True,
684
+ check=False,
685
+ )
686
+
687
+
688
+ def _resolve_existing_sufficient_idea(repo_root: Path, *, idea_id: str) -> tuple[dict[str, Any], dict[str, Any]] | None:
689
+ payload = _read_idea_payload(repo_root, idea_id=idea_id)
690
+ candidate = payload.get("sufficient_idea")
691
+ if not isinstance(candidate, dict):
692
+ return None
693
+ missing = validate_sufficient_idea(candidate)
694
+ if missing:
695
+ return None
696
+ return payload, candidate
697
+
698
+
699
+ class SufficiencyGateNode(Node):
700
+ async def process(self, task_context: TaskContext) -> TaskContext:
701
+ event = task_context.event
702
+ repo_root = Path(event.repo_root)
703
+ store, run_id = _store_run()
704
+ _dfs_node_running(repo_root=repo_root, idea_id=event.idea_id, run_id=run_id, node_id="sufficiency_gate", summary="Reviewing idea readiness")
705
+ node_exec_id = store.create_node_attempt(run_id=run_id, node_id="sufficiency_gate", node_name="SufficiencyGate", attempt=1)
706
+
707
+ stage_path = _pipeline_root(repo_root, idea_id=event.idea_id, pipeline_key=event.pipeline_key) / "sufficiency.json"
708
+ existing = _resolve_existing_sufficient_idea(repo_root, idea_id=event.idea_id)
709
+ if existing is not None:
710
+ payload, sufficient_idea = existing
711
+ result_payload = {
712
+ "input": {"mode": "existing_idea_json", "source": str((get_idea_paths(repo_root, idea_id=event.idea_id).idea_dir / "idea.json").relative_to(repo_root))},
713
+ "sufficient_idea": sufficient_idea,
714
+ "story_generation_pass": True,
715
+ "reused_existing": True,
716
+ "identified_gaps": [],
717
+ "blockers": [],
718
+ "discussion_points": [],
719
+ "sufficiency_quotient": 100,
720
+ }
721
+ _write_json(stage_path, result_payload)
722
+ store.add_artifact(run_id=run_id, node_exec_id=node_exec_id, kind="idea_sufficiency", uri=str(stage_path), metadata=result_payload)
723
+ store.mark_node_finished(node_exec_id=node_exec_id, status="succeeded", output={"artifact_path": str(stage_path), "reused_existing": True})
724
+ task_context.metadata["sufficient_idea"] = sufficient_idea
725
+ task_context.metadata["sufficiency_payload"] = result_payload
726
+ return task_context
727
+
728
+ try:
729
+ raw_text, input_meta = load_idea_source(
730
+ text=event.raw_text,
731
+ source_path=Path(event.source_path) if event.source_path else None,
732
+ )
733
+ except ValueError as exc:
734
+ payload = {"story_generation_pass": False, "error": str(exc), "blockers": ["input_source"]}
735
+ _write_json(stage_path, payload)
736
+ store.add_artifact(run_id=run_id, node_exec_id=node_exec_id, kind="idea_sufficiency", uri=str(stage_path), metadata=payload)
737
+ store.mark_node_finished(node_exec_id=node_exec_id, status="failed", output=payload, error={"message": str(exc)})
738
+ task_context.metadata["message"] = json.dumps(payload, indent=2, sort_keys=True) + "\npipeline blocked: insufficient ideation\n"
739
+ task_context.metadata["exit_code"] = 2
740
+ task_context.stop_workflow()
741
+ return task_context
742
+
743
+ result = evaluate_idea_sufficiency(raw_text, input_meta=input_meta)
744
+ payload = dict(result.payload)
745
+ payload["sufficient_idea"] = extract_sufficient_idea(raw_text)
746
+ _write_json(stage_path, payload)
747
+ store.add_artifact(run_id=run_id, node_exec_id=node_exec_id, kind="idea_sufficiency", uri=str(stage_path), metadata=payload)
748
+
749
+ if not result.passed:
750
+ store.mark_node_finished(node_exec_id=node_exec_id, status="failed", output=payload, error={"message": "insufficient ideation"})
751
+ task_context.metadata["message"] = render_sufficiency_json(result) + "pipeline blocked: insufficient ideation\n"
752
+ task_context.metadata["exit_code"] = 2
753
+ task_context.stop_workflow()
754
+ return task_context
755
+
756
+ idea_dir = get_idea_paths(repo_root, idea_id=event.idea_id).idea_dir
757
+ idea_dir.mkdir(parents=True, exist_ok=True)
758
+ idea_json = idea_dir / "idea.json"
759
+ current = _read_idea_payload(repo_root, idea_id=event.idea_id)
760
+ current["idea_id"] = event.idea_id
761
+ current.setdefault("title", str(payload["sufficient_idea"].get("summary") or event.idea_id))
762
+ current["sufficient_idea"] = payload["sufficient_idea"]
763
+ current["ideation_output"] = payload
764
+ idea_json.write_text(json.dumps(current, indent=2, sort_keys=True), encoding="utf-8")
765
+
766
+ store.mark_node_finished(node_exec_id=node_exec_id, status="succeeded", output={"artifact_path": str(stage_path), "reused_existing": False})
767
+ task_context.metadata["sufficient_idea"] = payload["sufficient_idea"]
768
+ task_context.metadata["sufficiency_payload"] = payload
769
+ return task_context
770
+
771
+
772
+ class ActorRegistryNormalizationNode(Node):
773
+ async def process(self, task_context: TaskContext) -> TaskContext:
774
+ event = task_context.event
775
+ repo_root = Path(event.repo_root)
776
+ store, run_id = _store_run()
777
+ _dfs_node_running(repo_root=repo_root, idea_id=event.idea_id, run_id=run_id, node_id="actor_registry_normalization", summary="Normalizing canonical actors")
778
+ node_exec_id = store.create_node_attempt(run_id=run_id, node_id="actor_registry_normalization", node_name="ActorRegistryNormalization", attempt=1)
779
+ stage_path = _pipeline_root(repo_root, idea_id=event.idea_id, pipeline_key=event.pipeline_key) / "actor_registry_normalization.json"
780
+
781
+ sufficient_idea = dict(task_context.metadata.get("sufficient_idea") or {})
782
+ existing_registry = load_actor_registry(repo_root)
783
+ normalized = normalize_idea_actors(
784
+ repo_root=repo_root,
785
+ idea_id=event.idea_id,
786
+ sufficient_idea=sufficient_idea,
787
+ existing_registry=existing_registry,
788
+ )
789
+ merged_registry = {
790
+ "actors": list(normalized.get("resolved_actors") or normalized.get("actors") or []),
791
+ "canonical_actors": list(normalized.get("canonical_actors") or []),
792
+ "notes": normalized.get("notes") or existing_registry.get("notes") or [],
793
+ "path": normalized.get("path") or str(existing_registry.get("path") or ""),
794
+ }
795
+ registry_path = write_actor_registry(
796
+ repo_root,
797
+ actors=merged_registry["actors"],
798
+ notes=merged_registry["notes"],
799
+ )
800
+
801
+ sufficient_idea["target_users"] = list(merged_registry["canonical_actors"])
802
+ idea_payload = _read_idea_payload(repo_root, idea_id=event.idea_id)
803
+ if isinstance(idea_payload.get("sufficient_idea"), dict):
804
+ idea_payload["sufficient_idea"] = {**dict(idea_payload["sufficient_idea"]), "target_users": list(merged_registry["canonical_actors"])}
805
+ idea_payload["actor_registry"] = {
806
+ "path": str(registry_path.relative_to(repo_root)),
807
+ "canonical_actors": list(merged_registry["canonical_actors"]),
808
+ "actors": list(merged_registry["actors"]),
809
+ "notes": list(merged_registry["notes"]),
810
+ }
811
+ _write_json(get_idea_paths(repo_root, idea_id=event.idea_id).idea_dir / "idea.json", idea_payload)
812
+
813
+ payload = {
814
+ "actor_registry_path": str(registry_path.relative_to(repo_root)),
815
+ "existing_actor_count": len(existing_registry.get("actors") or []),
816
+ "canonical_actors": list(merged_registry["canonical_actors"]),
817
+ "resolved_actors": list(merged_registry["actors"]),
818
+ "additions": list(normalized.get("additions") or []),
819
+ "repairs": list(normalized.get("repairs") or []),
820
+ "notes": list(merged_registry["notes"]),
821
+ }
822
+ _write_json(stage_path, payload)
823
+ store.add_artifact(run_id=run_id, node_exec_id=node_exec_id, kind="idea_actor_registry", uri=str(stage_path), metadata=payload)
824
+ store.mark_node_finished(node_exec_id=node_exec_id, status="succeeded", output=payload)
825
+ task_context.metadata["sufficient_idea"] = sufficient_idea
826
+ task_context.metadata["actor_registry"] = merged_registry
827
+ return task_context
828
+
829
+
830
+ class PlaneRequirementGateNode(Node):
831
+ async def process(self, task_context: TaskContext) -> TaskContext:
832
+ event = task_context.event
833
+ repo_root = Path(event.repo_root)
834
+ store, run_id = _store_run()
835
+ _dfs_node_running(repo_root=repo_root, idea_id=event.idea_id, run_id=run_id, node_id="plane_requirement_gate", summary="Resolving candidate story planes")
836
+ node_exec_id = store.create_node_attempt(run_id=run_id, node_id="plane_requirement_gate", node_name="PlaneRequirementGate", attempt=1)
837
+ stage_path = _pipeline_root(repo_root, idea_id=event.idea_id, pipeline_key=event.pipeline_key) / "plane_requirement.json"
838
+ candidate_planes = _resolve_story_plane_candidates(event.planes)
839
+ payload = {
840
+ "candidate_planes": candidate_planes,
841
+ "source": "upstream_candidate_planes" if _parse_list_option(event.planes) else "canonical_story_plane_universe",
842
+ }
843
+ _write_json(stage_path, payload)
844
+ store.add_artifact(run_id=run_id, node_exec_id=node_exec_id, kind="idea_plane_requirement", uri=str(stage_path), metadata=payload)
845
+ store.mark_node_finished(node_exec_id=node_exec_id, status="succeeded", output=payload)
846
+ task_context.metadata["candidate_planes"] = candidate_planes
847
+ return task_context
848
+
849
+
850
+ class StoryCoverageRequirementsNode(Node):
851
+ async def process(self, task_context: TaskContext) -> TaskContext:
852
+ event = task_context.event
853
+ repo_root = Path(event.repo_root)
854
+ store, run_id = _store_run()
855
+ _dfs_node_running(repo_root=repo_root, idea_id=event.idea_id, run_id=run_id, node_id="story_coverage_requirements", summary="Generating story coverage requirements")
856
+ node_exec_id = store.create_node_attempt(
857
+ run_id=run_id,
858
+ node_id="story_coverage_requirements",
859
+ node_name="StoryCoverageRequirements",
860
+ attempt=1,
861
+ )
862
+ sufficient_idea = dict(task_context.metadata.get("sufficient_idea") or {})
863
+ payload = _read_idea_payload(repo_root, idea_id=event.idea_id)
864
+ requirements = generate_story_coverage_requirements(
865
+ repo_root=repo_root,
866
+ idea_id=event.idea_id,
867
+ payload=payload,
868
+ sufficient_idea=sufficient_idea,
869
+ )
870
+ stage_path = _pipeline_root(repo_root, idea_id=event.idea_id, pipeline_key=event.pipeline_key) / "coverage_requirements.json"
871
+ _write_json(stage_path, requirements)
872
+ stage_payload = {"coverage_requirements": requirements, "artifact_path": str(stage_path)}
873
+ store.add_artifact(run_id=run_id, node_exec_id=node_exec_id, kind="traditional_story_coverage_requirements", uri=str(stage_path), metadata=requirements)
874
+ store.mark_node_finished(node_exec_id=node_exec_id, status="succeeded", output=stage_payload)
875
+ task_context.metadata["coverage_requirements"] = requirements
876
+ if isinstance(requirements.get("actor_registry"), dict):
877
+ task_context.metadata["actor_registry"] = dict(requirements.get("actor_registry") or {})
878
+ return task_context
879
+
880
+
881
+ class TraditionalStoryGenerationNode(Node):
882
+ async def process(self, task_context: TaskContext) -> TaskContext:
883
+ event = task_context.event
884
+ repo_root = Path(event.repo_root)
885
+ store, run_id = _store_run()
886
+ _dfs_node_running(repo_root=repo_root, idea_id=event.idea_id, run_id=run_id, node_id="traditional_story_generation", summary="Generating user stories")
887
+ node_exec_id = store.create_node_attempt(
888
+ run_id=run_id,
889
+ node_id="traditional_story_generation",
890
+ node_name="TraditionalStoryGeneration",
891
+ attempt=1,
892
+ )
893
+
894
+ try:
895
+ story_set = generate_traditional_user_story_set(
896
+ repo_root=repo_root,
897
+ idea_id=event.idea_id,
898
+ max_stories=event.max_stories,
899
+ coverage_requirements=dict(task_context.metadata.get("coverage_requirements") or {}),
900
+ actor_registry=dict(task_context.metadata.get("actor_registry") or {}),
901
+ )
902
+ except TraditionalStoryInsufficiencyError as exc:
903
+ stage_payload = {
904
+ "story_set_id": exc.story_set_id,
905
+ "stories_dir": str(exc.root),
906
+ "report_path": str(exc.report_path),
907
+ "sufficiency": exc.report,
908
+ }
909
+ stage_path = _pipeline_root(repo_root, idea_id=event.idea_id, pipeline_key=event.pipeline_key) / "traditional_story_generation.json"
910
+ _write_json(stage_path, stage_payload)
911
+ store.add_artifact(
912
+ run_id=run_id,
913
+ node_exec_id=node_exec_id,
914
+ kind="traditional_user_story_report",
915
+ uri=str(stage_path),
916
+ metadata=stage_payload,
917
+ )
918
+ store.mark_node_finished(node_exec_id=node_exec_id, status="succeeded", output=stage_payload)
919
+ task_context.metadata["story_set_id"] = exc.story_set_id
920
+ task_context.metadata["stories_dir"] = str(exc.root)
921
+ task_context.metadata["story_paths"] = []
922
+ task_context.metadata["traditional_story_sufficiency"] = exc.report
923
+ return task_context
924
+ manifest_path = story_set.root / "manifest.json"
925
+ manifest = json.loads(manifest_path.read_text(encoding="utf-8"))
926
+ stage_payload = {
927
+ "story_set_id": story_set.story_set_id,
928
+ "stories_dir": str(story_set.root),
929
+ "story_paths": manifest.get("story_paths", []),
930
+ "sufficiency": story_set.sufficiency_report,
931
+ }
932
+ stage_path = _pipeline_root(repo_root, idea_id=event.idea_id, pipeline_key=event.pipeline_key) / "traditional_story_generation.json"
933
+ _write_json(stage_path, stage_payload)
934
+ store.add_artifact(run_id=run_id, node_exec_id=node_exec_id, kind="traditional_user_stories", uri=str(stage_path), metadata=stage_payload)
935
+ store.mark_node_finished(node_exec_id=node_exec_id, status="succeeded", output=stage_payload)
936
+ task_context.metadata["story_set_id"] = story_set.story_set_id
937
+ task_context.metadata["stories_dir"] = str(story_set.root)
938
+ task_context.metadata["story_paths"] = [str(path) for path in story_set.story_paths]
939
+ task_context.metadata["traditional_story_sufficiency"] = story_set.sufficiency_report
940
+ return task_context
941
+
942
+
943
+ class TraditionalStoryCoverageGateNode(AgentNode):
944
+ def get_agent_config(self) -> AgentConfig:
945
+ return AgentConfig(
946
+ instructions=(
947
+ "Evaluate whether the generated traditional user stories sufficiently represent the validated idea, "
948
+ "using structured semantic judgment."
949
+ )
950
+ )
951
+
952
+ async def process(self, task_context: TaskContext) -> TaskContext:
953
+ event = task_context.event
954
+ repo_root = Path(event.repo_root)
955
+ store, run_id = _store_run()
956
+ _dfs_node_running(repo_root=repo_root, idea_id=event.idea_id, run_id=run_id, node_id="traditional_story_coverage_gate", summary="Reviewing story coverage")
957
+ node_exec_id = store.create_node_attempt(
958
+ run_id=run_id,
959
+ node_id="traditional_story_coverage_gate",
960
+ node_name="TraditionalStoryCoverageGate",
961
+ attempt=1,
962
+ )
963
+
964
+ report = dict(task_context.metadata.get("traditional_story_sufficiency") or {})
965
+ stage_payload = {
966
+ "story_set_id": str(task_context.metadata.get("story_set_id") or ""),
967
+ "stories_dir": str(task_context.metadata.get("stories_dir") or ""),
968
+ "sufficiency": report,
969
+ "evaluation_mode": report.get("evaluation_mode") or "agentic",
970
+ }
971
+ stage_path = _pipeline_root(repo_root, idea_id=event.idea_id, pipeline_key=event.pipeline_key) / "traditional_story_coverage_gate.json"
972
+ _write_json(stage_path, stage_payload)
973
+ store.add_artifact(
974
+ run_id=run_id,
975
+ node_exec_id=node_exec_id,
976
+ kind="traditional_story_coverage_gate",
977
+ uri=str(stage_path),
978
+ metadata=stage_payload,
979
+ )
980
+
981
+ if not report.get("passed"):
982
+ store.mark_node_finished(
983
+ node_exec_id=node_exec_id,
984
+ status="failed",
985
+ output=stage_payload,
986
+ error={"message": "traditional story coverage gate failed after refinement"},
987
+ )
988
+ task_context.metadata["message"] = (
989
+ f"dag_id: {DAG_ID}\n"
990
+ f"pipeline_dir: {_pipeline_root(repo_root, idea_id=event.idea_id, pipeline_key=event.pipeline_key)}\n"
991
+ f"traditional story coverage insufficient after {report.get('pass_count')} pass(es)\n"
992
+ f"{json.dumps({'final_findings': report.get('final_findings'), 'report_path': str(stage_path)}, indent=2, sort_keys=True)}\n"
993
+ )
994
+ task_context.metadata["exit_code"] = 2
995
+ task_context.stop_workflow()
996
+ return task_context
997
+
998
+ store.mark_node_finished(node_exec_id=node_exec_id, status="succeeded", output=stage_payload)
999
+ return task_context
1000
+
1001
+
1002
+ class TraditionalStoryDecompositionNode(AgentNode):
1003
+ def get_agent_config(self) -> AgentConfig:
1004
+ return AgentConfig(
1005
+ instructions=(
1006
+ "Evaluate whether the covered traditional user stories are cleanly decomposed into atomic, non-overlapping stories."
1007
+ )
1008
+ )
1009
+
1010
+ async def process(self, task_context: TaskContext) -> TaskContext:
1011
+ event = task_context.event
1012
+ repo_root = Path(event.repo_root)
1013
+ store, run_id = _store_run()
1014
+ _dfs_node_running(repo_root=repo_root, idea_id=event.idea_id, run_id=run_id, node_id="traditional_story_decomposition", summary="Reviewing story decomposition")
1015
+ node_exec_id = store.create_node_attempt(
1016
+ run_id=run_id,
1017
+ node_id="traditional_story_decomposition",
1018
+ node_name="TraditionalStoryDecomposition",
1019
+ attempt=1,
1020
+ )
1021
+
1022
+ report = dict((task_context.metadata.get("traditional_story_sufficiency") or {}).get("decomposition") or {})
1023
+ stage_payload = {
1024
+ "story_set_id": str(task_context.metadata.get("story_set_id") or ""),
1025
+ "stories_dir": str(task_context.metadata.get("stories_dir") or ""),
1026
+ "decomposition": report,
1027
+ "evaluation_mode": report.get("evaluation_mode") or "agentic",
1028
+ }
1029
+ stage_path = _pipeline_root(repo_root, idea_id=event.idea_id, pipeline_key=event.pipeline_key) / "traditional_story_decomposition.json"
1030
+ _write_json(stage_path, stage_payload)
1031
+ store.add_artifact(
1032
+ run_id=run_id,
1033
+ node_exec_id=node_exec_id,
1034
+ kind="traditional_story_decomposition",
1035
+ uri=str(stage_path),
1036
+ metadata=stage_payload,
1037
+ )
1038
+ if not report.get("passed"):
1039
+ store.mark_node_finished(node_exec_id=node_exec_id, status="failed", output=stage_payload, error={"message": "traditional story decomposition failed"})
1040
+ task_context.metadata["message"] = (
1041
+ f"dag_id: {DAG_ID}\n"
1042
+ f"pipeline_dir: {_pipeline_root(repo_root, idea_id=event.idea_id, pipeline_key=event.pipeline_key)}\n"
1043
+ f"traditional story decomposition failed\n"
1044
+ f"{json.dumps({'findings': report.get('findings'), 'report_path': str(stage_path)}, indent=2, sort_keys=True)}\n"
1045
+ )
1046
+ task_context.metadata["exit_code"] = 2
1047
+ task_context.stop_workflow()
1048
+ return task_context
1049
+ store.mark_node_finished(node_exec_id=node_exec_id, status="succeeded", output=stage_payload)
1050
+ return task_context
1051
+
1052
+
1053
+ class StoryPlaneAdjudicationNode(Node):
1054
+ async def process(self, task_context: TaskContext) -> TaskContext:
1055
+ event = task_context.event
1056
+ repo_root = Path(event.repo_root)
1057
+ store, run_id = _store_run()
1058
+ _dfs_node_running(repo_root=repo_root, idea_id=event.idea_id, run_id=run_id, node_id="story_plane_adjudication", summary="Confirming story planes")
1059
+ node_exec_id = store.create_node_attempt(run_id=run_id, node_id="story_plane_adjudication", node_name="StoryPlaneAdjudication", attempt=1)
1060
+
1061
+ story_paths = [Path(path_str) for path_str in task_context.metadata.get("story_paths", [])]
1062
+ if not story_paths:
1063
+ payload = {"error": "No traditional stories available for plane adjudication."}
1064
+ pipeline_dir = _pipeline_root(repo_root, idea_id=event.idea_id, pipeline_key=event.pipeline_key)
1065
+ failure_payload = _handoff_failure_payload(
1066
+ stage="story_plane_adjudication",
1067
+ idea_id=event.idea_id,
1068
+ pipeline_dir=pipeline_dir,
1069
+ story_set_id=str(task_context.metadata.get("story_set_id") or "") or None,
1070
+ details=payload,
1071
+ resume_cursor=_resume_cursor_for_story_pipeline(pipeline_dir=pipeline_dir, story_set_id=str(task_context.metadata.get("story_set_id") or "") or None),
1072
+ )
1073
+ _write_handoff_failure(pipeline_dir=pipeline_dir, payload=failure_payload)
1074
+ stage_path = pipeline_dir / "story_plane_adjudication.json"
1075
+ _write_json(stage_path, payload)
1076
+ store.add_artifact(run_id=run_id, node_exec_id=node_exec_id, kind="story_plane_adjudication", uri=str(stage_path), metadata=payload)
1077
+ store.mark_node_finished(node_exec_id=node_exec_id, status="failed", output=payload, error={"message": payload["error"]})
1078
+ task_context.metadata["message"] = json.dumps(payload, indent=2, sort_keys=True) + "\n"
1079
+ task_context.metadata["exit_code"] = 2
1080
+ task_context.stop_workflow()
1081
+ return task_context
1082
+
1083
+ candidate_planes = list(task_context.metadata.get("candidate_planes") or _resolve_story_plane_candidates(event.planes))
1084
+
1085
+ adjudications = []
1086
+ for source_path in story_paths:
1087
+ text = source_path.read_text(encoding="utf-8")
1088
+ title, _actor, acceptance, statement = _story_statement_from_traditional(text)
1089
+ story_payload = {
1090
+ "title": title,
1091
+ "acceptance_criteria": acceptance,
1092
+ "statement": statement,
1093
+ "source_path": str(source_path),
1094
+ }
1095
+ adjudication = _adjudicate_story_planes(repo_root=repo_root, idea_id=event.idea_id, story=story_payload, candidate_planes=candidate_planes)
1096
+ adjudications.append({
1097
+ "story_path": str(source_path),
1098
+ **adjudication,
1099
+ })
1100
+ stage_payload = {"candidate_planes": candidate_planes, "stories": adjudications}
1101
+ stage_path = _pipeline_root(repo_root, idea_id=event.idea_id, pipeline_key=event.pipeline_key) / "story_plane_adjudication.json"
1102
+ _write_json(stage_path, stage_payload)
1103
+ store.add_artifact(run_id=run_id, node_exec_id=node_exec_id, kind="story_plane_adjudication", uri=str(stage_path), metadata=stage_payload)
1104
+ store.mark_node_finished(node_exec_id=node_exec_id, status="succeeded", output=stage_payload)
1105
+ task_context.metadata["story_plane_adjudication"] = stage_payload
1106
+ return task_context
1107
+
1108
+
1109
+ class DevflowStoryCompilationNode(Node):
1110
+ async def process(self, task_context: TaskContext) -> TaskContext:
1111
+ event = task_context.event
1112
+ repo_root = Path(event.repo_root)
1113
+ store, run_id = _store_run()
1114
+ _dfs_node_running(repo_root=repo_root, idea_id=event.idea_id, run_id=run_id, node_id="devflow_story_compilation", summary="Compiling DevFlow stories")
1115
+ engine_root = Path(__file__).resolve().parents[3]
1116
+ node_exec_id = store.create_node_attempt(
1117
+ run_id=run_id,
1118
+ node_id="devflow_story_compilation",
1119
+ node_name="DevflowStoryCompilation",
1120
+ attempt=1,
1121
+ )
1122
+
1123
+ story_paths = [Path(path_str) for path_str in task_context.metadata.get("story_paths", [])]
1124
+ story_set_id = str(task_context.metadata.get("story_set_id") or "")
1125
+ compile_id = ""
1126
+ root: Path | None = None
1127
+ compiled_paths: list[Path] = []
1128
+ validation_history: list[dict[str, Any]] = []
1129
+ validation_issues: list[dict[str, str]] = []
1130
+ validation_report: dict[str, Any] = {"ok": False, "issue_count": 0, "issues": []}
1131
+
1132
+ for pass_index in range(1, 4):
1133
+ compile_id, root, compiled_paths = _compile_devflow_story_set(
1134
+ repo_root=repo_root,
1135
+ idea_id=event.idea_id,
1136
+ story_set_id=story_set_id,
1137
+ story_paths=story_paths,
1138
+ planes=event.planes,
1139
+ story_plane_adjudication=task_context.metadata.get("story_plane_adjudication"),
1140
+ pass_index=pass_index,
1141
+ prior_validation_issues=validation_issues,
1142
+ force_rebuild=pass_index > 1,
1143
+ )
1144
+ validation_workspace = _build_devflow_story_validation_workspace(
1145
+ root=root,
1146
+ compiled_paths=compiled_paths,
1147
+ pass_index=pass_index,
1148
+ )
1149
+ pass_report_path = root / f"validation_pass_{pass_index:03d}.json"
1150
+ validator = _run_devflow_story_validator(
1151
+ engine_root=engine_root,
1152
+ validation_repo_root=validation_workspace,
1153
+ report_path=pass_report_path,
1154
+ )
1155
+ validation_report = _load_json_report(pass_report_path)
1156
+ validation_report.setdefault("ok", validator.returncode == 0)
1157
+ validation_report.setdefault("issue_count", len(validation_report.get("issues") or []))
1158
+ validation_report.setdefault("issues", [])
1159
+ validation_issues = list(validation_report.get("issues") or [])
1160
+ validation_history.append(
1161
+ {
1162
+ "pass_index": pass_index,
1163
+ "ok": bool(validation_report.get("ok")),
1164
+ "issue_count": int(validation_report.get("issue_count") or 0),
1165
+ "issues": validation_issues,
1166
+ "validator_returncode": validator.returncode,
1167
+ "validator_stdout": validator.stdout,
1168
+ "validator_stderr": validator.stderr,
1169
+ "validator_report_path": str(pass_report_path),
1170
+ }
1171
+ )
1172
+ if validation_report.get("ok"):
1173
+ break
1174
+
1175
+ assert root is not None
1176
+ manifest = json.loads((root / "manifest.json").read_text(encoding="utf-8"))
1177
+ consolidated_report = {
1178
+ "devflow_story_set_id": compile_id,
1179
+ "idea_id": event.idea_id,
1180
+ "ok": bool(validation_report.get("ok")),
1181
+ "pass_count": len(validation_history),
1182
+ "history": validation_history,
1183
+ "final_issues": validation_issues,
1184
+ "final_issue_count": len(validation_issues),
1185
+ }
1186
+ consolidated_report_path = root / "validation_report.json"
1187
+ _write_json(consolidated_report_path, consolidated_report)
1188
+ store.add_artifact(
1189
+ run_id=run_id,
1190
+ node_exec_id=node_exec_id,
1191
+ kind="devflow_story_validation_report",
1192
+ uri=str(consolidated_report_path),
1193
+ metadata=consolidated_report,
1194
+ content_type="application/json",
1195
+ byte_size=len(json.dumps(consolidated_report, sort_keys=True)),
1196
+ )
1197
+
1198
+ if not validation_report.get("ok"):
1199
+ stage_payload = {
1200
+ "devflow_story_set_id": compile_id,
1201
+ "devflow_stories_dir": str(root),
1202
+ "story_paths": manifest.get("story_paths", []),
1203
+ "source_story_set_id": story_set_id,
1204
+ "planes": event.planes,
1205
+ "validation": {
1206
+ "ok": False,
1207
+ "issue_count": len(validation_issues),
1208
+ "issues": validation_issues,
1209
+ "pass_count": len(validation_history),
1210
+ "history": validation_history,
1211
+ "report_path": str(consolidated_report_path),
1212
+ },
1213
+ }
1214
+ pipeline_dir = _pipeline_root(repo_root, idea_id=event.idea_id, pipeline_key=event.pipeline_key)
1215
+ stage_path = pipeline_dir / "devflow_story_compilation.json"
1216
+ _write_json(stage_path, stage_payload)
1217
+ failure_payload = _handoff_failure_payload(
1218
+ stage="devflow_story_compilation",
1219
+ idea_id=event.idea_id,
1220
+ pipeline_dir=pipeline_dir,
1221
+ story_set_id=story_set_id or None,
1222
+ devflow_story_set_id=compile_id or None,
1223
+ details=stage_payload.get("validation") or {},
1224
+ resume_cursor=_resume_cursor_for_story_pipeline(pipeline_dir=pipeline_dir, story_set_id=story_set_id or None),
1225
+ )
1226
+ _write_handoff_failure(pipeline_dir=pipeline_dir, payload=failure_payload)
1227
+ store.mark_node_finished(
1228
+ node_exec_id=node_exec_id,
1229
+ status="failed",
1230
+ output=stage_payload,
1231
+ error={"message": "story contract validation failed"},
1232
+ )
1233
+ task_context.metadata["message"] = (
1234
+ f"dag_id: {DAG_ID}\n"
1235
+ f"pipeline_dir: {_pipeline_root(repo_root, idea_id=event.idea_id, pipeline_key=event.pipeline_key)}\n"
1236
+ f"story contract validation failed after {len(validation_history)} pass(es)\n"
1237
+ f"{json.dumps(stage_payload['validation'], indent=2, sort_keys=True)}\n"
1238
+ )
1239
+ task_context.metadata["exit_code"] = 1
1240
+ task_context.stop_workflow()
1241
+ return task_context
1242
+
1243
+ project_entry = find_project_for_repo_root(repo_root)
1244
+ project_id = None if project_entry is None else str(project_entry.get("project_id") or "") or None
1245
+ story_artifact_ids: list[str] = []
1246
+ for compiled_path in compiled_paths:
1247
+ story_payload = json.loads(compiled_path.read_text(encoding="utf-8"))
1248
+ artifact_id = store.add_artifact(
1249
+ run_id=run_id,
1250
+ node_exec_id=node_exec_id,
1251
+ kind="generated_devflow_story_json",
1252
+ uri=str(compiled_path),
1253
+ metadata=story_payload,
1254
+ content_type="application/json",
1255
+ byte_size=len(json.dumps(story_payload, sort_keys=True)),
1256
+ )
1257
+ story_artifact_ids.append(artifact_id)
1258
+ store.enqueue_story_task(
1259
+ project_id=project_id,
1260
+ enqueue_run_id=run_id,
1261
+ story_artifact_id=artifact_id,
1262
+ story_id=str(story_payload.get("story_id") or ""),
1263
+ title=str(story_payload.get("title") or ""),
1264
+ )
1265
+
1266
+ manifest["story_artifact_ids"] = story_artifact_ids
1267
+ (root / "manifest.json").write_text(json.dumps(manifest, indent=2, sort_keys=True), encoding="utf-8")
1268
+ stage_payload = {
1269
+ "devflow_story_set_id": compile_id,
1270
+ "devflow_stories_dir": str(root),
1271
+ "story_paths": manifest.get("story_paths", []),
1272
+ "story_artifact_ids": story_artifact_ids,
1273
+ "source_story_set_id": story_set_id,
1274
+ "planes": event.planes,
1275
+ "project_id": project_id,
1276
+ "validation": {
1277
+ "ok": True,
1278
+ "issue_count": 0,
1279
+ "issues": [],
1280
+ "pass_count": len(validation_history),
1281
+ "history": validation_history,
1282
+ "report_path": str(consolidated_report_path),
1283
+ },
1284
+ }
1285
+ stage_path = _pipeline_root(repo_root, idea_id=event.idea_id, pipeline_key=event.pipeline_key) / "devflow_story_compilation.json"
1286
+ _write_json(stage_path, stage_payload)
1287
+ store.add_artifact(run_id=run_id, node_exec_id=node_exec_id, kind="devflow_story_set", uri=str(stage_path), metadata=stage_payload)
1288
+ store.mark_node_finished(node_exec_id=node_exec_id, status="succeeded", output=stage_payload)
1289
+ task_context.metadata["devflow_story_set_id"] = compile_id
1290
+ task_context.metadata["pipeline_dir"] = str(_pipeline_root(repo_root, idea_id=event.idea_id, pipeline_key=event.pipeline_key))
1291
+ pass_count = int((task_context.metadata.get("traditional_story_sufficiency") or {}).get("pass_count") or 1)
1292
+ task_context.metadata["message"] = (
1293
+ f"dag_id: {DAG_ID}\n"
1294
+ f"pipeline_dir: {_pipeline_root(repo_root, idea_id=event.idea_id, pipeline_key=event.pipeline_key)}\n"
1295
+ f"traditional_story_sufficiency_pass: {pass_count}/3\n"
1296
+ f"story_set_id: {story_set_id}\n"
1297
+ f"stories_dir: {get_idea_paths(repo_root, idea_id=event.idea_id).idea_dir / 'traditional_user_stories' / story_set_id}\n"
1298
+ f"devflow_story_set_id: {compile_id}\n"
1299
+ f"devflow_stories_dir: {root}\n"
1300
+ )
1301
+ return task_context
1302
+
1303
+
1304
+ class IdeaToDevflowStoriesWorkflow(Workflow):
1305
+ workflow_schema = WorkflowSchema(
1306
+ description="Idea to DevFlow stories DAG (GenAI workflow framework)",
1307
+ event_schema=IdeaStoryDagEvent,
1308
+ start=SufficiencyGateNode,
1309
+ nodes=[
1310
+ NodeConfig(node=SufficiencyGateNode, connections=[ActorRegistryNormalizationNode]),
1311
+ NodeConfig(node=ActorRegistryNormalizationNode, connections=[PlaneRequirementGateNode]),
1312
+ NodeConfig(node=PlaneRequirementGateNode, connections=[StoryCoverageRequirementsNode]),
1313
+ NodeConfig(node=StoryCoverageRequirementsNode, connections=[TraditionalStoryGenerationNode]),
1314
+ NodeConfig(node=TraditionalStoryGenerationNode, connections=[TraditionalStoryCoverageGateNode]),
1315
+ NodeConfig(node=TraditionalStoryCoverageGateNode, connections=[TraditionalStoryDecompositionNode]),
1316
+ NodeConfig(node=TraditionalStoryDecompositionNode, connections=[StoryPlaneAdjudicationNode]),
1317
+ NodeConfig(node=StoryPlaneAdjudicationNode, connections=[DevflowStoryCompilationNode]),
1318
+ NodeConfig(node=DevflowStoryCompilationNode, connections=[]),
1319
+ ],
1320
+ )
1321
+
1322
+
1323
+ def _handoff_failure_payload(*, stage: str, idea_id: str, pipeline_dir: Path, story_set_id: str | None = None, devflow_story_set_id: str | None = None, details: dict[str, Any] | None = None, resume_cursor: dict[str, Any] | None = None) -> dict[str, Any]:
1324
+ return {
1325
+ "idea_id": idea_id,
1326
+ "failed_stage": stage,
1327
+ "story_set_id": story_set_id,
1328
+ "devflow_story_set_id": devflow_story_set_id,
1329
+ "pipeline_dir": str(pipeline_dir),
1330
+ "resume_cursor": resume_cursor,
1331
+ "details": details or {},
1332
+ }
1333
+
1334
+
1335
+ def _write_handoff_failure(*, pipeline_dir: Path, payload: dict[str, Any]) -> Path:
1336
+ path = pipeline_dir / "handoff_failure.json"
1337
+ _write_json(path, payload)
1338
+ return path
1339
+
1340
+
1341
+ def _resume_cursor_for_story_pipeline(*, pipeline_dir: Path, story_set_id: str | None) -> dict[str, Any]:
1342
+ if not (pipeline_dir / "story_plane_adjudication.json").exists():
1343
+ return {"kind": "idea_to_devflow_stories", "stage": "story_plane_adjudication", "pipeline_dir": str(pipeline_dir), "story_set_id": story_set_id}
1344
+ if not (pipeline_dir / "devflow_story_compilation.json").exists():
1345
+ return {"kind": "idea_to_devflow_stories", "stage": "devflow_story_compilation", "pipeline_dir": str(pipeline_dir), "story_set_id": story_set_id}
1346
+ return {"kind": "idea_to_devflow_stories", "stage": "retry_pipeline", "pipeline_dir": str(pipeline_dir), "story_set_id": story_set_id}
1347
+
1348
+
1349
+ def _load_pipeline_stage(pipeline_dir: Path, stage_name: str) -> dict[str, Any]:
1350
+ path = pipeline_dir / f"{stage_name}.json"
1351
+ if not path.exists():
1352
+ return {}
1353
+ payload = json.loads(path.read_text(encoding="utf-8"))
1354
+ return payload if isinstance(payload, dict) else {}
1355
+
1356
+
1357
+ def _resume_story_pipeline_context(*, repo_root: Path, event: IdeaStoryDagEvent, pipeline_dir: Path) -> tuple[TaskContext, type[Node]] | None:
1358
+ handoff_failure_path = pipeline_dir / "handoff_failure.json"
1359
+ handoff_failure: dict[str, Any] = {}
1360
+ stage = ""
1361
+ if handoff_failure_path.exists():
1362
+ loaded = json.loads(handoff_failure_path.read_text(encoding="utf-8"))
1363
+ if isinstance(loaded, dict):
1364
+ handoff_failure = loaded
1365
+ resume_cursor = handoff_failure.get("resume_cursor") if isinstance(handoff_failure.get("resume_cursor"), dict) else {}
1366
+ if str(resume_cursor.get("kind") or "") == "idea_to_devflow_stories":
1367
+ stage = str(resume_cursor.get("stage") or handoff_failure.get("failed_stage") or "").strip()
1368
+
1369
+ generation_payload = _load_pipeline_stage(pipeline_dir, "traditional_story_generation")
1370
+ coverage_payload = _load_pipeline_stage(pipeline_dir, "traditional_story_coverage_gate")
1371
+ decomposition_payload = _load_pipeline_stage(pipeline_dir, "traditional_story_decomposition")
1372
+ plane_payload = _load_pipeline_stage(pipeline_dir, "story_plane_adjudication")
1373
+ compilation_payload = _load_pipeline_stage(pipeline_dir, "devflow_story_compilation")
1374
+ story_paths = [repo_root / Path(path_str) for path_str in (generation_payload.get("story_paths") or []) if str(path_str).strip()]
1375
+ if not story_paths or not all(path.exists() for path in story_paths):
1376
+ return None
1377
+
1378
+ if not stage:
1379
+ if plane_payload.get("stories") and not compilation_payload:
1380
+ stage = "devflow_story_compilation"
1381
+ elif decomposition_payload.get("decomposition") and not plane_payload:
1382
+ stage = "story_plane_adjudication"
1383
+ elif coverage_payload.get("sufficiency") and not decomposition_payload:
1384
+ stage = "traditional_story_decomposition"
1385
+ elif coverage_payload.get("sufficiency") and decomposition_payload and not plane_payload:
1386
+ stage = "story_plane_adjudication"
1387
+ else:
1388
+ return None
1389
+
1390
+ start_node: type[Node]
1391
+ if stage == "devflow_story_compilation" and (plane_payload.get("stories") or plane_payload.get("candidate_planes")):
1392
+ start_node = DevflowStoryCompilationNode
1393
+ elif stage == "story_plane_adjudication" and decomposition_payload.get("decomposition"):
1394
+ start_node = StoryPlaneAdjudicationNode
1395
+ elif stage == "traditional_story_decomposition" and coverage_payload.get("sufficiency"):
1396
+ start_node = TraditionalStoryDecompositionNode
1397
+ else:
1398
+ return None
1399
+
1400
+ ctx = TaskContext(event=event)
1401
+ ctx.metadata["story_set_id"] = str(generation_payload.get("story_set_id") or handoff_failure.get("story_set_id") or "") or None
1402
+ ctx.metadata["stories_dir"] = str(generation_payload.get("stories_dir") or "")
1403
+ ctx.metadata["story_paths"] = [str(path) for path in story_paths]
1404
+ ctx.metadata["traditional_story_sufficiency"] = dict(coverage_payload.get("sufficiency") or generation_payload.get("sufficiency") or {})
1405
+ if decomposition_payload.get("decomposition"):
1406
+ ctx.metadata["traditional_story_decomposition"] = dict(decomposition_payload.get("decomposition") or {})
1407
+ if plane_payload.get("stories") or plane_payload.get("candidate_planes"):
1408
+ ctx.metadata["story_plane_adjudication"] = dict(plane_payload)
1409
+ return ctx, start_node
1410
+
1411
+
1412
+ def build_pipeline_key(
1413
+ *,
1414
+ repo_root: Path,
1415
+ idea_id: str,
1416
+ text: str | None,
1417
+ source_path: Path | None,
1418
+ max_stories: int,
1419
+ planes: list[str],
1420
+ ) -> str:
1421
+ payload = _read_idea_payload(repo_root, idea_id=idea_id)
1422
+ if source_path is not None or text:
1423
+ raw_text, _ = load_idea_source(text=text, source_path=source_path)
1424
+ source_payload: dict[str, Any] = {"raw_text": raw_text}
1425
+ else:
1426
+ source_payload = {"sufficient_idea": payload.get("sufficient_idea"), "idea_id": idea_id}
1427
+ return _stable_id(
1428
+ "run_",
1429
+ {
1430
+ "idea_id": idea_id,
1431
+ "max_stories": max_stories,
1432
+ "planes": _parse_list_option(planes),
1433
+ "source": source_payload,
1434
+ },
1435
+ )
1436
+
1437
+
1438
+ async def run_idea_to_devflow_stories_dag_async(
1439
+ *,
1440
+ repo_root: Path,
1441
+ store: ExecutionStore,
1442
+ idea_id: str,
1443
+ text: str | None,
1444
+ source_path: Path | None,
1445
+ max_stories: int,
1446
+ planes: list[str],
1447
+ ) -> IdeaStoryDagResult:
1448
+ normalized_planes = _validate_candidate_planes(_parse_list_option(planes))
1449
+ pipeline_key = build_pipeline_key(
1450
+ repo_root=repo_root,
1451
+ idea_id=idea_id,
1452
+ text=text,
1453
+ source_path=source_path,
1454
+ max_stories=max_stories,
1455
+ planes=normalized_planes,
1456
+ )
1457
+ pipeline_dir = _pipeline_root(repo_root, idea_id=idea_id, pipeline_key=pipeline_key)
1458
+ pipeline_dir.mkdir(parents=True, exist_ok=True)
1459
+
1460
+ run_id = store.create_run(
1461
+ dag_id=DAG_ID,
1462
+ dag_version="v1",
1463
+ root_correlation_id=f"corr_{pipeline_key}",
1464
+ config={
1465
+ "idea_id": idea_id,
1466
+ "pipeline_key": pipeline_key,
1467
+ "max_stories": max_stories,
1468
+ "planes": normalized_planes,
1469
+ },
1470
+ )
1471
+ store.mark_run_started(run_id=run_id)
1472
+
1473
+ wf = IdeaToDevflowStoriesWorkflow()
1474
+ event_payload = {
1475
+ "repo_root": str(repo_root),
1476
+ "idea_id": idea_id,
1477
+ "raw_text": text,
1478
+ "source_path": str(source_path) if source_path else None,
1479
+ "max_stories": max_stories,
1480
+ "planes": normalized_planes,
1481
+ "pipeline_key": pipeline_key,
1482
+ }
1483
+ resume_plan = _resume_story_pipeline_context(
1484
+ repo_root=repo_root,
1485
+ event=IdeaStoryDagEvent(**event_payload),
1486
+ pipeline_dir=pipeline_dir,
1487
+ )
1488
+ # Publish initial state so DFS shows the pipeline started
1489
+ _dfs_running(repo_root=repo_root, idea_id=idea_id, run_id=run_id, summary="Starting story generation pipeline")
1490
+ _update_idea_pipeline_status(repo_root=repo_root, idea_id=idea_id, status="generating_stories")
1491
+
1492
+ global _CURRENT_STORE, _CURRENT_RUN_ID
1493
+ _CURRENT_STORE = store
1494
+ _CURRENT_RUN_ID = run_id
1495
+ try:
1496
+ if resume_plan is None:
1497
+ ctx = await wf._run(event_payload)
1498
+ else:
1499
+ ctx, start_node = resume_plan
1500
+ current_node_class: type[Node] | None = start_node
1501
+ while current_node_class:
1502
+ if ctx.should_stop:
1503
+ break
1504
+ current_node = wf.nodes[current_node_class].node
1505
+ ctx = await current_node(task_context=ctx).process(ctx)
1506
+ current_node_class = await wf._get_next_node_class(current_node_class, ctx)
1507
+ except Exception as exc:
1508
+ store.mark_run_finished(run_id=run_id, status="failed")
1509
+ _dfs_terminal(repo_root=repo_root, idea_id=idea_id, run_id=run_id, summary="Story generation failed", current_state="failed", current_status="failed", error_message=str(exc))
1510
+ _update_idea_pipeline_status(repo_root=repo_root, idea_id=idea_id, status="generation_failed")
1511
+ raise
1512
+ finally:
1513
+ _CURRENT_STORE = None
1514
+ _CURRENT_RUN_ID = None
1515
+
1516
+ exit_code = int(ctx.metadata.get("exit_code") or 0)
1517
+ store.mark_run_finished(run_id=run_id, status="succeeded" if exit_code == 0 else "failed")
1518
+ if exit_code == 0 and ctx.metadata.get("devflow_story_set_id"):
1519
+ _sync_devflow_stories_to_supabase(
1520
+ repo_root=repo_root,
1521
+ idea_id=idea_id,
1522
+ run_id=run_id,
1523
+ devflow_story_set_id=str(ctx.metadata.get("devflow_story_set_id")),
1524
+ )
1525
+
1526
+ if exit_code == 0:
1527
+ _dfs_terminal(
1528
+ repo_root=repo_root,
1529
+ idea_id=idea_id,
1530
+ run_id=run_id,
1531
+ summary="Story generation complete",
1532
+ current_state="completed",
1533
+ current_status="succeeded",
1534
+ )
1535
+ _update_idea_pipeline_status(repo_root=repo_root, idea_id=idea_id, status="stories_generated")
1536
+ else:
1537
+ _dfs_terminal(
1538
+ repo_root=repo_root,
1539
+ idea_id=idea_id,
1540
+ run_id=run_id,
1541
+ summary="Story generation failed",
1542
+ current_state="failed",
1543
+ current_status="failed",
1544
+ )
1545
+ _update_idea_pipeline_status(repo_root=repo_root, idea_id=idea_id, status="generation_failed")
1546
+
1547
+ handoff_failure_path = pipeline_dir / "handoff_failure.json"
1548
+ handoff_failure = json.loads(handoff_failure_path.read_text(encoding="utf-8")) if handoff_failure_path.exists() else {}
1549
+ return IdeaStoryDagResult(
1550
+ exit_code=exit_code,
1551
+ run_id=run_id,
1552
+ pipeline_dir=pipeline_dir,
1553
+ message=str(ctx.metadata.get("message") or ""),
1554
+ story_set_id=ctx.metadata.get("story_set_id"),
1555
+ devflow_story_set_id=ctx.metadata.get("devflow_story_set_id"),
1556
+ failed_stage=str(handoff_failure.get("failed_stage") or "") or None,
1557
+ resume_cursor=handoff_failure.get("resume_cursor") if isinstance(handoff_failure.get("resume_cursor"), dict) else None,
1558
+ )
1559
+
1560
+
1561
+ def run_idea_to_devflow_stories_dag(
1562
+ *,
1563
+ repo_root: Path,
1564
+ store: ExecutionStore,
1565
+ idea_id: str,
1566
+ text: str | None,
1567
+ source_path: Path | None,
1568
+ max_stories: int,
1569
+ planes: list[str],
1570
+ ) -> IdeaStoryDagResult:
1571
+ wf = IdeaToDevflowStoriesWorkflow()
1572
+ _ = wf
1573
+ import asyncio
1574
+
1575
+ return asyncio.run(
1576
+ run_idea_to_devflow_stories_dag_async(
1577
+ repo_root=repo_root,
1578
+ store=store,
1579
+ idea_id=idea_id,
1580
+ text=text,
1581
+ source_path=source_path,
1582
+ max_stories=max_stories,
1583
+ planes=planes,
1584
+ )
1585
+ )