devflow-engine 1.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (393) hide show
  1. devflow_engine/__init__.py +3 -0
  2. devflow_engine/agentic_prompts.py +100 -0
  3. devflow_engine/agentic_runtime.py +398 -0
  4. devflow_engine/api_key_flow_harness.py +539 -0
  5. devflow_engine/api_keys.py +357 -0
  6. devflow_engine/bootstrap/__init__.py +2 -0
  7. devflow_engine/bootstrap/provision_from_template.py +84 -0
  8. devflow_engine/cli/__init__.py +0 -0
  9. devflow_engine/cli/app.py +7270 -0
  10. devflow_engine/core/__init__.py +0 -0
  11. devflow_engine/core/config.py +86 -0
  12. devflow_engine/core/logging.py +29 -0
  13. devflow_engine/core/paths.py +45 -0
  14. devflow_engine/core/toml_kv.py +33 -0
  15. devflow_engine/devflow_event_worker.py +1292 -0
  16. devflow_engine/devflow_state.py +201 -0
  17. devflow_engine/devin2/__init__.py +9 -0
  18. devflow_engine/devin2/agent_definition.py +120 -0
  19. devflow_engine/devin2/pi_runner.py +204 -0
  20. devflow_engine/devin_orchestration.py +69 -0
  21. devflow_engine/docs/prompts/anti-patterns.md +42 -0
  22. devflow_engine/docs/prompts/devin-agent-prompt.md +55 -0
  23. devflow_engine/docs/prompts/devin2-agent-prompt.md +81 -0
  24. devflow_engine/docs/prompts/examples/devin-vapi-clone-reference-exchange.json +85 -0
  25. devflow_engine/doctor/__init__.py +2 -0
  26. devflow_engine/doctor/triage.py +140 -0
  27. devflow_engine/error/__init__.py +0 -0
  28. devflow_engine/error/remediation.py +21 -0
  29. devflow_engine/errors/error_solver_dag.py +522 -0
  30. devflow_engine/errors/runtime_observability.py +67 -0
  31. devflow_engine/idea/__init__.py +4 -0
  32. devflow_engine/idea/actors.py +481 -0
  33. devflow_engine/idea/agentic.py +465 -0
  34. devflow_engine/idea/analyze.py +93 -0
  35. devflow_engine/idea/devin_chat_dag.py +1 -0
  36. devflow_engine/idea/diff.py +99 -0
  37. devflow_engine/idea/drafts.py +446 -0
  38. devflow_engine/idea/idea_creation_dag.py +643 -0
  39. devflow_engine/idea/ideation_enrichment.py +355 -0
  40. devflow_engine/idea/ideation_enrichment_worker.py +19 -0
  41. devflow_engine/idea/paths.py +28 -0
  42. devflow_engine/idea/promote.py +53 -0
  43. devflow_engine/idea/redaction.py +27 -0
  44. devflow_engine/idea/repo_tools.py +1277 -0
  45. devflow_engine/idea/response_mode.py +30 -0
  46. devflow_engine/idea/story_pipeline.py +1585 -0
  47. devflow_engine/idea/sufficiency.py +376 -0
  48. devflow_engine/idea/traditional_stories.py +1257 -0
  49. devflow_engine/implementation/__init__.py +0 -0
  50. devflow_engine/implementation/alembic_preflight.py +700 -0
  51. devflow_engine/implementation/dag.py +8450 -0
  52. devflow_engine/implementation/green_gate.py +93 -0
  53. devflow_engine/implementation/prompts.py +108 -0
  54. devflow_engine/implementation/test_runtime.py +623 -0
  55. devflow_engine/integration/__init__.py +19 -0
  56. devflow_engine/integration/agentic.py +66 -0
  57. devflow_engine/integration/dag.py +3539 -0
  58. devflow_engine/integration/prompts.py +114 -0
  59. devflow_engine/integration/supabase_schema.sql +31 -0
  60. devflow_engine/integration/supabase_sync.py +177 -0
  61. devflow_engine/llm/__init__.py +1 -0
  62. devflow_engine/llm/cli_one_shot.py +84 -0
  63. devflow_engine/llm/cli_stream.py +371 -0
  64. devflow_engine/llm/execution_context.py +26 -0
  65. devflow_engine/llm/invoke.py +1322 -0
  66. devflow_engine/llm/provider_api.py +304 -0
  67. devflow_engine/llm/repo_knowledge.py +588 -0
  68. devflow_engine/llm_primitives.py +315 -0
  69. devflow_engine/orchestration.py +62 -0
  70. devflow_engine/planning/__init__.py +0 -0
  71. devflow_engine/planning/analyze_repo.py +92 -0
  72. devflow_engine/planning/render_drafts.py +133 -0
  73. devflow_engine/playground/__init__.py +0 -0
  74. devflow_engine/playground/hooks.py +26 -0
  75. devflow_engine/playwright_workflow/__init__.py +5 -0
  76. devflow_engine/playwright_workflow/dag.py +1317 -0
  77. devflow_engine/process/__init__.py +5 -0
  78. devflow_engine/process/dag.py +59 -0
  79. devflow_engine/project_registration/__init__.py +3 -0
  80. devflow_engine/project_registration/dag.py +1581 -0
  81. devflow_engine/project_registry.py +109 -0
  82. devflow_engine/prompts/devin/generic/prompt.md +6 -0
  83. devflow_engine/prompts/devin/ideation/prompt.md +263 -0
  84. devflow_engine/prompts/devin/ideation/scenarios.md +5 -0
  85. devflow_engine/prompts/devin/ideation_loop/prompt.md +6 -0
  86. devflow_engine/prompts/devin/insight/prompt.md +11 -0
  87. devflow_engine/prompts/devin/insight/scenarios.md +5 -0
  88. devflow_engine/prompts/devin/intake/prompt.md +15 -0
  89. devflow_engine/prompts/devin/iterate/prompt.md +12 -0
  90. devflow_engine/prompts/devin/shared/eval_doctrine.md +9 -0
  91. devflow_engine/prompts/devin/shared/principles.md +246 -0
  92. devflow_engine/prompts/devin_eval/assessment/prompt.md +18 -0
  93. devflow_engine/prompts/idea/api_ideation_agent/prompt.md +8 -0
  94. devflow_engine/prompts/idea/api_insight_agent/prompt.md +8 -0
  95. devflow_engine/prompts/idea/response_doctrine/prompt.md +18 -0
  96. devflow_engine/prompts/implementation/dependency_assessment/prompt.md +12 -0
  97. devflow_engine/prompts/implementation/green/green/prompt.md +11 -0
  98. devflow_engine/prompts/implementation/green/node_config/prompt.md +3 -0
  99. devflow_engine/prompts/implementation/green_review/outcome_review/prompt.md +5 -0
  100. devflow_engine/prompts/implementation/green_review/prior_run_review/prompt.md +5 -0
  101. devflow_engine/prompts/implementation/red/prompt.md +27 -0
  102. devflow_engine/prompts/implementation/redreview/prompt.md +23 -0
  103. devflow_engine/prompts/implementation/redreview_repair/prompt.md +16 -0
  104. devflow_engine/prompts/implementation/setupdoc/prompt.md +10 -0
  105. devflow_engine/prompts/implementation/story_planning/prompt.md +13 -0
  106. devflow_engine/prompts/implementation/test_design/prompt.md +27 -0
  107. devflow_engine/prompts/integration/README.md +185 -0
  108. devflow_engine/prompts/integration/green/example.md +67 -0
  109. devflow_engine/prompts/integration/green/green/prompt.md +10 -0
  110. devflow_engine/prompts/integration/green/node_config/prompt.md +42 -0
  111. devflow_engine/prompts/integration/green/past_prompts/20260417T212300/green/prompt.md +15 -0
  112. devflow_engine/prompts/integration/green/past_prompts/20260417T212300/node_config/prompt.md +42 -0
  113. devflow_engine/prompts/integration/green_enrich/example.md +79 -0
  114. devflow_engine/prompts/integration/green_enrich/green_enrich/prompt.md +9 -0
  115. devflow_engine/prompts/integration/green_enrich/node_config/prompt.md +41 -0
  116. devflow_engine/prompts/integration/green_enrich/past_prompts/20260417T212300/green_enrich/prompt.md +14 -0
  117. devflow_engine/prompts/integration/green_enrich/past_prompts/20260417T212300/node_config/prompt.md +41 -0
  118. devflow_engine/prompts/integration/red/code_repair/prompt.md +12 -0
  119. devflow_engine/prompts/integration/red/example.md +152 -0
  120. devflow_engine/prompts/integration/red/node_config/prompt.md +86 -0
  121. devflow_engine/prompts/integration/red/past_prompts/20260417T212300/code_repair/prompt.md +19 -0
  122. devflow_engine/prompts/integration/red/past_prompts/20260417T212300/node_config/prompt.md +84 -0
  123. devflow_engine/prompts/integration/red/past_prompts/20260417T212300/red/prompt.md +16 -0
  124. devflow_engine/prompts/integration/red/past_prompts/20260417T212300/red_repair/prompt.md +15 -0
  125. devflow_engine/prompts/integration/red/past_prompts/20260417T215032/code_repair/prompt.md +10 -0
  126. devflow_engine/prompts/integration/red/past_prompts/20260417T215032/node_config/prompt.md +84 -0
  127. devflow_engine/prompts/integration/red/past_prompts/20260417T215032/red_repair/prompt.md +11 -0
  128. devflow_engine/prompts/integration/red/red/prompt.md +11 -0
  129. devflow_engine/prompts/integration/red/red_repair/prompt.md +12 -0
  130. devflow_engine/prompts/integration/red_review/example.md +71 -0
  131. devflow_engine/prompts/integration/red_review/node_config/prompt.md +41 -0
  132. devflow_engine/prompts/integration/red_review/past_prompts/20260417T212300/node_config/prompt.md +41 -0
  133. devflow_engine/prompts/integration/red_review/past_prompts/20260417T212300/red_review/prompt.md +15 -0
  134. devflow_engine/prompts/integration/red_review/red_review/prompt.md +9 -0
  135. devflow_engine/prompts/integration/resolve/example.md +111 -0
  136. devflow_engine/prompts/integration/resolve/node_config/prompt.md +64 -0
  137. devflow_engine/prompts/integration/resolve/past_prompts/20260417T212300/node_config/prompt.md +64 -0
  138. devflow_engine/prompts/integration/resolve/past_prompts/20260417T212300/resolve_implicated_users/prompt.md +15 -0
  139. devflow_engine/prompts/integration/resolve/past_prompts/20260417T212300/resolve_side_effects/prompt.md +15 -0
  140. devflow_engine/prompts/integration/resolve/resolve_implicated_users/prompt.md +10 -0
  141. devflow_engine/prompts/integration/resolve/resolve_side_effects/prompt.md +10 -0
  142. devflow_engine/prompts/integration/validate/build_idea_acceptance_coverage/prompt.md +12 -0
  143. devflow_engine/prompts/integration/validate/code_repair/prompt.md +13 -0
  144. devflow_engine/prompts/integration/validate/example.md +143 -0
  145. devflow_engine/prompts/integration/validate/node_config/prompt.md +87 -0
  146. devflow_engine/prompts/integration/validate/past_prompts/20260417T212300/code_repair/prompt.md +19 -0
  147. devflow_engine/prompts/integration/validate/past_prompts/20260417T212300/node_config/prompt.md +67 -0
  148. devflow_engine/prompts/integration/validate/past_prompts/20260417T212300/validate_enrich_gate/prompt.md +17 -0
  149. devflow_engine/prompts/integration/validate/past_prompts/20260417T212300/validate_repair/prompt.md +16 -0
  150. devflow_engine/prompts/integration/validate/past_prompts/20260417T215032/code_repair/prompt.md +10 -0
  151. devflow_engine/prompts/integration/validate/past_prompts/20260417T215032/node_config/prompt.md +67 -0
  152. devflow_engine/prompts/integration/validate/past_prompts/20260417T215032/validate_repair/prompt.md +9 -0
  153. devflow_engine/prompts/integration/validate/validate_enrich_gate/prompt.md +10 -0
  154. devflow_engine/prompts/integration/validate/validate_repair/prompt.md +20 -0
  155. devflow_engine/prompts/integration/write_workflows/example.md +100 -0
  156. devflow_engine/prompts/integration/write_workflows/node_config/prompt.md +44 -0
  157. devflow_engine/prompts/integration/write_workflows/past_prompts/20260417T212300/node_config/prompt.md +44 -0
  158. devflow_engine/prompts/integration/write_workflows/past_prompts/20260417T212300/write_workflows/prompt.md +17 -0
  159. devflow_engine/prompts/integration/write_workflows/write_workflows/prompt.md +11 -0
  160. devflow_engine/prompts/iterate/README.md +7 -0
  161. devflow_engine/prompts/iterate/coder/prompt.md +11 -0
  162. devflow_engine/prompts/iterate/framer/prompt.md +11 -0
  163. devflow_engine/prompts/iterate/iterator/prompt.md +13 -0
  164. devflow_engine/prompts/iterate/observer/prompt.md +11 -0
  165. devflow_engine/prompts/recovery/diagnosis/prompt.md +7 -0
  166. devflow_engine/prompts/recovery/execution/prompt.md +8 -0
  167. devflow_engine/prompts/recovery/execution_verification/prompt.md +7 -0
  168. devflow_engine/prompts/recovery/failure_investigation/prompt.md +10 -0
  169. devflow_engine/prompts/recovery/preflight_health_repo_repair/prompt.md +8 -0
  170. devflow_engine/prompts/recovery/remediation_execution/prompt.md +11 -0
  171. devflow_engine/prompts/recovery/root_cause_investigation/prompt.md +12 -0
  172. devflow_engine/prompts/scope_idea/doctrine/prompt.md +7 -0
  173. devflow_engine/prompts/source_doc_eval/document/prompt.md +6 -0
  174. devflow_engine/prompts/source_doc_eval/targeted_mutation/prompt.md +9 -0
  175. devflow_engine/prompts/source_doc_mutation/domain_entities/prompt.md +6 -0
  176. devflow_engine/prompts/source_doc_mutation/product_brief/prompt.md +6 -0
  177. devflow_engine/prompts/source_doc_mutation/project_doc_coherence/prompt.md +7 -0
  178. devflow_engine/prompts/source_doc_mutation/project_doc_render/prompt.md +9 -0
  179. devflow_engine/prompts/source_doc_mutation/source_doc_coherence/prompt.md +5 -0
  180. devflow_engine/prompts/source_doc_mutation/source_doc_enrichment_coherence/prompt.md +6 -0
  181. devflow_engine/prompts/source_doc_mutation/user_workflows/prompt.md +6 -0
  182. devflow_engine/prompts/source_scope/doctrine/prompt.md +10 -0
  183. devflow_engine/prompts/ui_grounding/doctrine/prompt.md +7 -0
  184. devflow_engine/recovery/__init__.py +3 -0
  185. devflow_engine/recovery/dag.py +2609 -0
  186. devflow_engine/recovery/models.py +220 -0
  187. devflow_engine/refactor.py +93 -0
  188. devflow_engine/registry/__init__.py +1 -0
  189. devflow_engine/registry/cards.py +238 -0
  190. devflow_engine/registry/domain_normalize.py +60 -0
  191. devflow_engine/registry/effects.py +65 -0
  192. devflow_engine/registry/enforce_report.py +150 -0
  193. devflow_engine/registry/module_cards_classify.py +164 -0
  194. devflow_engine/registry/module_cards_draft.py +184 -0
  195. devflow_engine/registry/module_cards_gate.py +59 -0
  196. devflow_engine/registry/packages.py +347 -0
  197. devflow_engine/registry/pathways.py +323 -0
  198. devflow_engine/review/__init__.py +11 -0
  199. devflow_engine/review/dag.py +588 -0
  200. devflow_engine/review/review_story.py +67 -0
  201. devflow_engine/scope_idea/__init__.py +3 -0
  202. devflow_engine/scope_idea/agentic.py +39 -0
  203. devflow_engine/scope_idea/dag.py +1069 -0
  204. devflow_engine/scope_idea/models.py +175 -0
  205. devflow_engine/skills/builtins/devflow/queue_failure_investigation/SKILL.md +112 -0
  206. devflow_engine/skills/builtins/devflow/queue_idea_to_story/SKILL.md +120 -0
  207. devflow_engine/skills/builtins/devflow/queue_integration/SKILL.md +105 -0
  208. devflow_engine/skills/builtins/devflow/queue_recovery/SKILL.md +108 -0
  209. devflow_engine/skills/builtins/devflow/queue_runtime_core/SKILL.md +155 -0
  210. devflow_engine/skills/builtins/devflow/queue_story_implementation/SKILL.md +122 -0
  211. devflow_engine/skills/builtins/devin/idea_to_story_handoff/SKILL.md +120 -0
  212. devflow_engine/skills/builtins/devin/ideation/SKILL.md +168 -0
  213. devflow_engine/skills/builtins/devin/ideation/state-and-phrasing-reference.md +18 -0
  214. devflow_engine/skills/builtins/devin/insight/SKILL.md +22 -0
  215. devflow_engine/skills/registry.example.yaml +42 -0
  216. devflow_engine/source_doc_assumptions.py +291 -0
  217. devflow_engine/source_doc_mutation_dag.py +1606 -0
  218. devflow_engine/source_doc_mutation_eval.py +417 -0
  219. devflow_engine/source_doc_mutation_worker.py +25 -0
  220. devflow_engine/source_docs_schema.py +207 -0
  221. devflow_engine/source_docs_updater.py +309 -0
  222. devflow_engine/source_scope/__init__.py +15 -0
  223. devflow_engine/source_scope/agentic.py +45 -0
  224. devflow_engine/source_scope/dag.py +1626 -0
  225. devflow_engine/source_scope/models.py +177 -0
  226. devflow_engine/stores/__init__.py +0 -0
  227. devflow_engine/stores/execution_store.py +3534 -0
  228. devflow_engine/story/__init__.py +0 -0
  229. devflow_engine/story/contracts.py +160 -0
  230. devflow_engine/story/discovery.py +47 -0
  231. devflow_engine/story/evidence.py +118 -0
  232. devflow_engine/story/hashing.py +27 -0
  233. devflow_engine/story/implemented_queue_purge.py +148 -0
  234. devflow_engine/story/indexer.py +105 -0
  235. devflow_engine/story/io.py +20 -0
  236. devflow_engine/story/markdown_contracts.py +298 -0
  237. devflow_engine/story/reconciliation.py +408 -0
  238. devflow_engine/story/validate_stories.py +149 -0
  239. devflow_engine/story/validate_tests_story.py +512 -0
  240. devflow_engine/story/validation.py +133 -0
  241. devflow_engine/ui_grounding/__init__.py +11 -0
  242. devflow_engine/ui_grounding/agentic.py +31 -0
  243. devflow_engine/ui_grounding/dag.py +874 -0
  244. devflow_engine/ui_grounding/models.py +224 -0
  245. devflow_engine/ui_grounding/pencil_bridge.py +247 -0
  246. devflow_engine/vendor/__init__.py +0 -0
  247. devflow_engine/vendor/datalumina_genai/__init__.py +11 -0
  248. devflow_engine/vendor/datalumina_genai/core/__init__.py +0 -0
  249. devflow_engine/vendor/datalumina_genai/core/exceptions.py +9 -0
  250. devflow_engine/vendor/datalumina_genai/core/nodes/__init__.py +0 -0
  251. devflow_engine/vendor/datalumina_genai/core/nodes/agent.py +48 -0
  252. devflow_engine/vendor/datalumina_genai/core/nodes/agent_streaming_node.py +26 -0
  253. devflow_engine/vendor/datalumina_genai/core/nodes/base.py +89 -0
  254. devflow_engine/vendor/datalumina_genai/core/nodes/concurrent.py +30 -0
  255. devflow_engine/vendor/datalumina_genai/core/nodes/router.py +69 -0
  256. devflow_engine/vendor/datalumina_genai/core/schema.py +72 -0
  257. devflow_engine/vendor/datalumina_genai/core/task.py +52 -0
  258. devflow_engine/vendor/datalumina_genai/core/validate.py +139 -0
  259. devflow_engine/vendor/datalumina_genai/core/workflow.py +200 -0
  260. devflow_engine/worker.py +1086 -0
  261. devflow_engine/worker_guard.py +233 -0
  262. devflow_engine-1.0.0.dist-info/METADATA +235 -0
  263. devflow_engine-1.0.0.dist-info/RECORD +393 -0
  264. devflow_engine-1.0.0.dist-info/WHEEL +4 -0
  265. devflow_engine-1.0.0.dist-info/entry_points.txt +3 -0
  266. devin/__init__.py +6 -0
  267. devin/dag.py +58 -0
  268. devin/dag_two_arm.py +138 -0
  269. devin/devin_chat_scenario_catalog.json +588 -0
  270. devin/devin_eval.py +677 -0
  271. devin/nodes/__init__.py +0 -0
  272. devin/nodes/ideation/__init__.py +0 -0
  273. devin/nodes/ideation/node.py +195 -0
  274. devin/nodes/ideation/playground.py +267 -0
  275. devin/nodes/ideation/prompt.md +65 -0
  276. devin/nodes/ideation/scenarios/continue_refinement.py +13 -0
  277. devin/nodes/ideation/scenarios/continue_refinement_evals.py +18 -0
  278. devin/nodes/ideation/scenarios/idea_fits_existing_patterns.py +17 -0
  279. devin/nodes/ideation/scenarios/idea_fits_existing_patterns_evals.py +16 -0
  280. devin/nodes/ideation/scenarios/large_idea_split.py +4 -0
  281. devin/nodes/ideation/scenarios/large_idea_split_evals.py +17 -0
  282. devin/nodes/ideation/scenarios/source_documentation_added.py +4 -0
  283. devin/nodes/ideation/scenarios/source_documentation_added_evals.py +16 -0
  284. devin/nodes/ideation/scenarios/user_says_create_it.py +30 -0
  285. devin/nodes/ideation/scenarios/user_says_create_it_evals.py +23 -0
  286. devin/nodes/ideation/scenarios/vague_idea.py +16 -0
  287. devin/nodes/ideation/scenarios/vague_idea_evals.py +47 -0
  288. devin/nodes/ideation/tools.json +312 -0
  289. devin/nodes/insight/__init__.py +0 -0
  290. devin/nodes/insight/node.py +49 -0
  291. devin/nodes/insight/playground.py +154 -0
  292. devin/nodes/insight/prompt.md +61 -0
  293. devin/nodes/insight/scenarios/architecture_pattern_query.py +15 -0
  294. devin/nodes/insight/scenarios/architecture_pattern_query_evals.py +25 -0
  295. devin/nodes/insight/scenarios/codebase_exploration.py +15 -0
  296. devin/nodes/insight/scenarios/codebase_exploration_evals.py +23 -0
  297. devin/nodes/insight/scenarios/devin_ideation_routing.py +19 -0
  298. devin/nodes/insight/scenarios/devin_ideation_routing_evals.py +39 -0
  299. devin/nodes/insight/scenarios/devin_insight_routing.py +20 -0
  300. devin/nodes/insight/scenarios/devin_insight_routing_evals.py +40 -0
  301. devin/nodes/insight/scenarios/operational_debugging.py +15 -0
  302. devin/nodes/insight/scenarios/operational_debugging_evals.py +23 -0
  303. devin/nodes/insight/scenarios/operational_question.py +9 -0
  304. devin/nodes/insight/scenarios/operational_question_evals.py +8 -0
  305. devin/nodes/insight/scenarios/queue_status.py +15 -0
  306. devin/nodes/insight/scenarios/queue_status_evals.py +23 -0
  307. devin/nodes/insight/scenarios/source_doc_explanation.py +14 -0
  308. devin/nodes/insight/scenarios/source_doc_explanation_evals.py +21 -0
  309. devin/nodes/insight/scenarios/worker_state_check.py +15 -0
  310. devin/nodes/insight/scenarios/worker_state_check_evals.py +22 -0
  311. devin/nodes/insight/tools.json +126 -0
  312. devin/nodes/intake/__init__.py +0 -0
  313. devin/nodes/intake/node.py +27 -0
  314. devin/nodes/intake/playground.py +47 -0
  315. devin/nodes/intake/prompt.md +12 -0
  316. devin/nodes/intake/scenarios/ideation_routing.py +4 -0
  317. devin/nodes/intake/scenarios/ideation_routing_evals.py +5 -0
  318. devin/nodes/intake/scenarios/insight_routing.py +4 -0
  319. devin/nodes/intake/scenarios/insight_routing_evals.py +5 -0
  320. devin/nodes/iterate/README.md +44 -0
  321. devin/nodes/iterate/__init__.py +1 -0
  322. devin/nodes/iterate/_archived_design_stages/01-objectives-requirements.md +112 -0
  323. devin/nodes/iterate/_archived_design_stages/02-evals.md +131 -0
  324. devin/nodes/iterate/_archived_design_stages/03-tools-and-boundaries.md +110 -0
  325. devin/nodes/iterate/_archived_design_stages/04-harness-and-playground.md +32 -0
  326. devin/nodes/iterate/_archived_design_stages/05-prompt-deferred.md +11 -0
  327. devin/nodes/iterate/_archived_design_stages/coder_agent_design/01-objectives-requirements.md +20 -0
  328. devin/nodes/iterate/_archived_design_stages/coder_agent_design/02-evals.md +8 -0
  329. devin/nodes/iterate/_archived_design_stages/coder_agent_design/03-tools-and-boundaries.md +14 -0
  330. devin/nodes/iterate/_archived_design_stages/coder_agent_design/04-harness-and-playground.md +12 -0
  331. devin/nodes/iterate/_archived_design_stages/framer_agent_design/01-objectives-requirements.md +20 -0
  332. devin/nodes/iterate/_archived_design_stages/framer_agent_design/02-evals.md +8 -0
  333. devin/nodes/iterate/_archived_design_stages/framer_agent_design/03-tools-and-boundaries.md +13 -0
  334. devin/nodes/iterate/_archived_design_stages/framer_agent_design/04-harness-and-playground.md +12 -0
  335. devin/nodes/iterate/_archived_design_stages/iterator_agent_design/01-objectives-requirements.md +25 -0
  336. devin/nodes/iterate/_archived_design_stages/iterator_agent_design/02-evals.md +9 -0
  337. devin/nodes/iterate/_archived_design_stages/iterator_agent_design/03-tools-and-boundaries.md +14 -0
  338. devin/nodes/iterate/_archived_design_stages/iterator_agent_design/04-harness-and-playground.md +12 -0
  339. devin/nodes/iterate/_archived_design_stages/observer_agent_design/01-objectives-requirements.md +20 -0
  340. devin/nodes/iterate/_archived_design_stages/observer_agent_design/02-evals.md +8 -0
  341. devin/nodes/iterate/_archived_design_stages/observer_agent_design/03-tools-and-boundaries.md +14 -0
  342. devin/nodes/iterate/_archived_design_stages/observer_agent_design/04-harness-and-playground.md +13 -0
  343. devin/nodes/iterate/agent-roles.md +89 -0
  344. devin/nodes/iterate/agents/README.md +10 -0
  345. devin/nodes/iterate/artifacts.md +504 -0
  346. devin/nodes/iterate/contract.md +100 -0
  347. devin/nodes/iterate/eval-plan.md +74 -0
  348. devin/nodes/iterate/node.py +100 -0
  349. devin/nodes/iterate/pipeline/README.md +13 -0
  350. devin/nodes/iterate/playground-contract.md +76 -0
  351. devin/nodes/iterate/prompt.md +11 -0
  352. devin/nodes/iterate/scenarios/README.md +38 -0
  353. devin/nodes/iterate/scenarios/artifact-and-loop-scenarios.md +101 -0
  354. devin/nodes/iterate/scenarios/coder_artifact_alignment.py +32 -0
  355. devin/nodes/iterate/scenarios/coder_artifact_alignment_evals.py +45 -0
  356. devin/nodes/iterate/scenarios/coder_bounded_fix.py +27 -0
  357. devin/nodes/iterate/scenarios/coder_bounded_fix_evals.py +45 -0
  358. devin/nodes/iterate/scenarios/devin_iterate_routing.py +21 -0
  359. devin/nodes/iterate/scenarios/devin_iterate_routing_evals.py +36 -0
  360. devin/nodes/iterate/scenarios/framer_scope_boundary.py +25 -0
  361. devin/nodes/iterate/scenarios/framer_scope_boundary_evals.py +57 -0
  362. devin/nodes/iterate/scenarios/framer_task_framing.py +25 -0
  363. devin/nodes/iterate/scenarios/framer_task_framing_evals.py +58 -0
  364. devin/nodes/iterate/scenarios/iterate_error_fix.py +21 -0
  365. devin/nodes/iterate/scenarios/iterate_error_fix_evals.py +39 -0
  366. devin/nodes/iterate/scenarios/iterate_quick_change.py +21 -0
  367. devin/nodes/iterate/scenarios/iterate_quick_change_evals.py +35 -0
  368. devin/nodes/iterate/scenarios/iterate_to_idea_promotion.py +23 -0
  369. devin/nodes/iterate/scenarios/iterate_to_idea_promotion_evals.py +53 -0
  370. devin/nodes/iterate/scenarios/iterate_to_insight_reroute.py +23 -0
  371. devin/nodes/iterate/scenarios/iterate_to_insight_reroute_evals.py +53 -0
  372. devin/nodes/iterate/scenarios/observer_evidence_seam.py +28 -0
  373. devin/nodes/iterate/scenarios/observer_evidence_seam_evals.py +55 -0
  374. devin/nodes/iterate/scenarios/observer_repro_creation.py +28 -0
  375. devin/nodes/iterate/scenarios/observer_repro_creation_evals.py +45 -0
  376. devin/nodes/iterate/scenarios/routing-matrix.md +45 -0
  377. devin/nodes/shared/__init__.py +0 -0
  378. devin/nodes/shared/filemaker_expert.md +80 -0
  379. devin/nodes/shared/filemaker_expert.py +354 -0
  380. devin/nodes/shared/filemaker_expert_eval/runner.py +176 -0
  381. devin/nodes/shared/filemaker_expert_eval/scenarios.json +65 -0
  382. devin/nodes/shared/goldilocks_advisor_eval/runner.py +214 -0
  383. devin/nodes/shared/goldilocks_advisor_eval/scenarios.json +58 -0
  384. devin/nodes/shared/helpers.py +156 -0
  385. devin/nodes/shared/idea_compliance_advisor_eval/runner.py +252 -0
  386. devin/nodes/shared/idea_compliance_advisor_eval/scenarios.json +75 -0
  387. devin/nodes/shared/models.py +44 -0
  388. devin/nodes/shared/post.py +40 -0
  389. devin/nodes/shared/router.py +107 -0
  390. devin/nodes/shared/tools.py +191 -0
  391. devin/shared/devin-chat-rubric.md +237 -0
  392. devin/shared/devin-chat-scenario-suite.md +90 -0
  393. devin/shared/eval_doctrine.md +9 -0
@@ -0,0 +1,1581 @@
1
+ from __future__ import annotations
2
+
3
+ import json
4
+ import os
5
+ import re
6
+ import sqlite3
7
+ import subprocess
8
+ from dataclasses import dataclass
9
+ from datetime import UTC, datetime
10
+ from hashlib import sha256
11
+ from pathlib import Path
12
+ from typing import Any, Callable
13
+ from urllib.parse import quote
14
+ from urllib.request import Request, urlopen
15
+
16
+ from pydantic import BaseModel
17
+
18
+ from ..stores.execution_store import ExecutionStore
19
+ from ..source_docs_updater import ensure_source_doc_scaffold
20
+ from ..devflow_state import publish_devflow_state
21
+ from ..vendor.datalumina_genai.core.nodes.base import Node
22
+ from ..vendor.datalumina_genai.core.schema import NodeConfig, WorkflowSchema
23
+ from ..vendor.datalumina_genai.core.task import TaskContext
24
+ from ..vendor.datalumina_genai.core.workflow import Workflow
25
+
26
+
27
+ DAG_ID = "project_registration_dag"
28
+
29
+ RegistrationCallback = Callable[[Path], None]
30
+
31
+ _CURRENT_STORE: ExecutionStore | None = None
32
+ _CURRENT_RUN_ID: str | None = None
33
+ _CURRENT_REGISTER_REPO: RegistrationCallback | None = None
34
+
35
+
36
+ @dataclass(frozen=True)
37
+ class ProjectRegistrationDagResult:
38
+ exit_code: int
39
+ run_id: str
40
+ project_id: str
41
+ registration_state: str
42
+ pipeline_dir: Path
43
+ repo_root: Path
44
+ workspace_path: Path
45
+ registry_entry: dict[str, object]
46
+ state_history: list[dict[str, str]]
47
+ message: str
48
+
49
+
50
+ class StateHistoryRecord(BaseModel):
51
+ state: str
52
+ timestamp: str
53
+
54
+
55
+ class ProjectRegistrationDagEvent(BaseModel):
56
+ repo_root: str
57
+ workspace_path: str
58
+ remote_url: str | None = None
59
+ project_name: str | None = None
60
+ command_name: str
61
+ project_id: str
62
+ pipeline_key: str
63
+ occurred_at: str
64
+
65
+
66
+ class ProjectShellArtifact(BaseModel):
67
+ dag_id: str
68
+ project_id: str
69
+ owner: str
70
+ repo: str
71
+ repo_root: str
72
+ workspace_path: str
73
+ remote_url: str | None = None
74
+ registration_command: str
75
+ registration_state: str
76
+ state_history: list[StateHistoryRecord]
77
+ created_at: str
78
+ updated_at: str
79
+
80
+
81
+ class BindingRequestArtifact(BaseModel):
82
+ dag_id: str
83
+ project_id: str
84
+ registration_state: str
85
+ state_history: list[StateHistoryRecord]
86
+ project_shell_ref: str
87
+ state_history_ref: str
88
+ registry_entry_ref: str
89
+ created_at: str
90
+ updated_at: str
91
+
92
+
93
+ class EngineRegistrationArtifact(BaseModel):
94
+ dag_id: str
95
+ project_id: str
96
+ registration_state: str
97
+ command_name: str
98
+ repo_root: str
99
+ workspace_path: str
100
+ remote_url: str | None = None
101
+ state_history: list[StateHistoryRecord]
102
+ ok: bool
103
+ error_message: str | None = None
104
+ created_at: str
105
+ updated_at: str
106
+
107
+
108
+ class ProjectRegistrationSummary(BaseModel):
109
+ exit_code: int
110
+ run_id: str
111
+ pipeline_dir: str
112
+ message: str
113
+ outcome: dict[str, Any]
114
+
115
+
116
+ class ProjectRegistrationArtifact(BaseModel):
117
+ dag_id: str
118
+ run_id: str
119
+ project_id: str
120
+ command_name: str
121
+ repo_root: str
122
+ workspace_path: str
123
+ remote_url: str | None = None
124
+ registration_state: str
125
+ state_history: list[StateHistoryRecord]
126
+ project_shell_ref: str
127
+ state_history_ref: str
128
+ engine_registration_ref: str
129
+ summary_ref: str
130
+ created_at: str
131
+ updated_at: str
132
+
133
+
134
+ def _store_run() -> tuple[ExecutionStore, str]:
135
+ if _CURRENT_STORE is None or _CURRENT_RUN_ID is None:
136
+ raise RuntimeError("project registration dag missing runtime store/run_id")
137
+ return _CURRENT_STORE, _CURRENT_RUN_ID
138
+
139
+
140
+ def _registration_callback() -> RegistrationCallback:
141
+ if _CURRENT_REGISTER_REPO is None:
142
+ raise RuntimeError("project registration dag missing repo registration callback")
143
+ return _CURRENT_REGISTER_REPO
144
+
145
+
146
+ def _devflow_home() -> Path:
147
+ import os
148
+
149
+ base = os.environ.get("DEVFLOW_HOME") or os.environ.get("HOME")
150
+ if base:
151
+ return Path(base).expanduser().resolve()
152
+ return Path.home().resolve()
153
+
154
+
155
+ def _projects_registry_path() -> Path:
156
+ return _devflow_home() / ".devflow" / "registry" / "projects.json"
157
+
158
+
159
+ def _read_projects_registry() -> dict[str, object]:
160
+ path = _projects_registry_path()
161
+ try:
162
+ raw = json.loads(path.read_text(encoding="utf-8"))
163
+ except Exception:
164
+ return {"schema_version": 1, "projects": []}
165
+ if not isinstance(raw, dict):
166
+ return {"schema_version": 1, "projects": []}
167
+ projects = raw.get("projects")
168
+ if not isinstance(projects, list):
169
+ projects = []
170
+ return {"schema_version": 1, "projects": projects}
171
+
172
+
173
+ def _write_projects_registry(registry: dict[str, object]) -> None:
174
+ path = _projects_registry_path()
175
+ path.parent.mkdir(parents=True, exist_ok=True)
176
+ path.write_text(json.dumps(registry, indent=2, sort_keys=True) + "\n", encoding="utf-8")
177
+
178
+
179
+ def _workspace_hash(remote_url: str | None, repo_root: Path) -> str:
180
+ ident = (remote_url or f"local:{repo_root.resolve()}").strip().lower()
181
+ return sha256(ident.encode("utf-8")).hexdigest()[:8]
182
+
183
+
184
+ def _infer_owner_repo(remote_url: str | None, repo_root: Path, project_name: str | None) -> tuple[str, str]:
185
+ if remote_url:
186
+ match = re.search(r"[:/](?P<owner>[^/]+)/(?P<repo>[^/]+?)(?:\.git)?$", remote_url)
187
+ if match:
188
+ return match.group("owner").lower(), match.group("repo").lower()
189
+ repo = (project_name or repo_root.name).strip().lower() or "project"
190
+ repo = re.sub(r"[^a-z0-9._-]+", "-", repo).strip("-") or "project"
191
+ return "local", repo
192
+
193
+
194
+ def _stable_project_identity(*, repo_root: Path, remote_url: str | None, project_name: str | None) -> tuple[str, str, str]:
195
+ owner, repo = _infer_owner_repo(remote_url, repo_root, project_name)
196
+ workspace_hash = _workspace_hash(remote_url, repo_root)
197
+ return owner, repo, f"proj_{workspace_hash}"
198
+
199
+
200
+ def _pipeline_root(repo_root: Path, project_id: str) -> Path:
201
+ return repo_root / ".devflow" / "projects" / project_id / "pipelines" / DAG_ID / "current"
202
+
203
+
204
+ # ---------------------------------------------------------------------------
205
+ # Supabase project UUID helpers
206
+ # ---------------------------------------------------------------------------
207
+
208
+ _UUID_RE = re.compile(
209
+ r"^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$"
210
+ )
211
+
212
+
213
+ def _is_uuid_like(value: str) -> bool:
214
+ """Return True if value looks like a UUID (8-4-4-4-12 hex)."""
215
+ return bool(_UUID_RE.match(value.lower()))
216
+
217
+
218
+ def _resolve_supabase_config_for_registration() -> tuple[str, str] | None:
219
+ """Return (url, key) if Supabase is configured, else None.
220
+
221
+ Skipped during pytest runs to preserve test isolation.
222
+ """
223
+ if os.environ.get("PYTEST_CURRENT_TEST"):
224
+ return None
225
+ url = (
226
+ os.environ.get("DEVFLOW_SUPABASE_URL")
227
+ or os.environ.get("SUPABASE_URL")
228
+ )
229
+ key = (
230
+ os.environ.get("DEVFLOW_SUPABASE_SERVICE_KEY")
231
+ or os.environ.get("SUPABASE_SERVICE_ROLE_KEY")
232
+ or os.environ.get("SUPABASE_SERVICE_KEY")
233
+ )
234
+ if not url or not key:
235
+ try:
236
+ from ..devflow_state import _keychain_get # type: ignore
237
+ url = url or _keychain_get("Supabase URL", "Clarity")
238
+ key = key or _keychain_get("Supabase Service Key", "Clarity")
239
+ except Exception:
240
+ pass
241
+ if not url or not key:
242
+ return None
243
+ return url.rstrip("/"), key
244
+
245
+
246
+ def _supabase_request(
247
+ *,
248
+ method: str,
249
+ url: str,
250
+ key: str,
251
+ body: Any | None = None,
252
+ prefer: str | None = None,
253
+ ) -> Any:
254
+ payload = None if body is None else json.dumps(body).encode("utf-8")
255
+ req = Request(url, data=payload, method=method)
256
+ req.add_header("apikey", key)
257
+ req.add_header("Authorization", f"Bearer {key}")
258
+ if body is not None:
259
+ req.add_header("Content-Type", "application/json")
260
+ if prefer:
261
+ req.add_header("Prefer", prefer)
262
+ with urlopen(req, timeout=30) as resp:
263
+ raw = resp.read().decode("utf-8")
264
+ return json.loads(raw) if raw else None
265
+
266
+
267
+ def _lookup_supabase_project_uuid(
268
+ *,
269
+ url: str,
270
+ key: str,
271
+ repo_root: Path,
272
+ remote_url: str | None,
273
+ owner: str | None = None,
274
+ repo: str | None = None,
275
+ ) -> str | None:
276
+ """Return Supabase devflow_projects.id UUID for this project, or None.
277
+
278
+ Lookup order (most → least specific):
279
+ 1. repo_url (case-insensitive ilike match)
280
+ 2. devflow_repo_root (exact match — set when the project was first registered
281
+ from a local engine run; may be blank on UI-created rows)
282
+ 3. repo_name (ilike match on the GitHub repo slug — reliable fallback for
283
+ rows created through the Supabase UI or API that never ran local import)
284
+ """
285
+ # 1. Try by remote_url first (most specific)
286
+ if remote_url:
287
+ stripped = remote_url.strip()
288
+ candidates: list[str] = []
289
+ for value in (stripped, stripped.lower()):
290
+ if value and value not in candidates:
291
+ candidates.append(value)
292
+ without_git = re.sub(r"\.git$", "", value, flags=re.IGNORECASE).rstrip("/")
293
+ if without_git and without_git not in candidates:
294
+ candidates.append(without_git)
295
+ for candidate in candidates:
296
+ rows = _supabase_request(
297
+ method="GET",
298
+ url=f"{url}/rest/v1/devflow_projects?select=id&repo_url=ilike.{quote(candidate)}&limit=1",
299
+ key=key,
300
+ )
301
+ if isinstance(rows, list) and rows:
302
+ val = str((rows[0] or {}).get("id") or "").strip()
303
+ if _is_uuid_like(val):
304
+ return val
305
+
306
+ # 2. Try by devflow_repo_root (exact match)
307
+ resolved_root = str(repo_root.expanduser().resolve())
308
+ rows = _supabase_request(
309
+ method="GET",
310
+ url=f"{url}/rest/v1/devflow_projects?select=id&devflow_repo_root=eq.{quote(resolved_root)}&limit=1",
311
+ key=key,
312
+ )
313
+ if isinstance(rows, list) and rows:
314
+ val = str((rows[0] or {}).get("id") or "").strip()
315
+ if _is_uuid_like(val):
316
+ return val
317
+
318
+ # 3. Fallback: try by repo_name slug (ilike).
319
+ # This catches rows that were created via the UI / API before any local
320
+ # import ran, where devflow_repo_root is blank and repo_url may differ in
321
+ # format (e.g. missing .git suffix or different casing).
322
+ if repo:
323
+ rows = _supabase_request(
324
+ method="GET",
325
+ url=f"{url}/rest/v1/devflow_projects?select=id&repo_name=ilike.{quote(repo)}&limit=1",
326
+ key=key,
327
+ )
328
+ if isinstance(rows, list) and rows:
329
+ val = str((rows[0] or {}).get("id") or "").strip()
330
+ if _is_uuid_like(val):
331
+ return val
332
+
333
+ return None
334
+
335
+
336
+ def _create_supabase_project_row(
337
+ *,
338
+ url: str,
339
+ key: str,
340
+ repo_root: Path,
341
+ remote_url: str | None,
342
+ project_name: str | None,
343
+ owner: str,
344
+ repo: str,
345
+ ) -> str:
346
+ """Create a new devflow_projects row and return its UUID.
347
+
348
+ Only sets columns that are available during local registration.
349
+ Raises RuntimeError if the insert fails.
350
+ """
351
+ resolved_root = str(repo_root.expanduser().resolve())
352
+ payload: dict[str, Any] = {
353
+ "name": project_name or repo or resolved_root.split("/")[-1],
354
+ "devflow_repo_root": resolved_root,
355
+ "status": "draft_unbound",
356
+ "metadata": {
357
+ "created_from": "devflow-engine",
358
+ "registration_mode": "local",
359
+ },
360
+ }
361
+ if remote_url:
362
+ payload["repo_url"] = remote_url
363
+ if owner and owner != "local":
364
+ payload["repo_owner"] = owner
365
+ if repo:
366
+ payload["repo_name"] = repo
367
+
368
+ rows = _supabase_request(
369
+ method="POST",
370
+ url=f"{url}/rest/v1/devflow_projects",
371
+ key=key,
372
+ body=payload,
373
+ prefer="return=representation",
374
+ )
375
+ if not isinstance(rows, list) or not rows:
376
+ raise RuntimeError("Failed to create devflow_projects row in Supabase")
377
+ val = str((rows[0] or {}).get("id") or "").strip()
378
+ if not _is_uuid_like(val):
379
+ raise RuntimeError(f"Supabase devflow_projects insert returned unexpected id: {val!r}")
380
+ return val
381
+
382
+
383
+ def _update_supabase_devflow_project_id(
384
+ *,
385
+ url: str,
386
+ key: str,
387
+ supabase_uuid: str,
388
+ ) -> None:
389
+ """Update devflow_projects.devflow_project_id = supabase_uuid for the row."""
390
+ try:
391
+ _supabase_request(
392
+ method="PATCH",
393
+ url=f"{url}/rest/v1/devflow_projects?id=eq.{quote(supabase_uuid)}",
394
+ key=key,
395
+ body={"devflow_project_id": supabase_uuid},
396
+ )
397
+ except Exception:
398
+ pass # Non-fatal: local state is correct, Supabase annotation is best-effort
399
+
400
+
401
+ def _detect_repo_default_branch(repo_root: Path) -> str | None:
402
+ commands = [
403
+ ["git", "-C", str(repo_root), "symbolic-ref", "--short", "refs/remotes/origin/HEAD"],
404
+ ["git", "-C", str(repo_root), "symbolic-ref", "--short", "HEAD"],
405
+ ]
406
+ for command in commands:
407
+ try:
408
+ proc = subprocess.run(
409
+ command,
410
+ capture_output=True,
411
+ text=True,
412
+ check=False,
413
+ timeout=10,
414
+ )
415
+ except Exception:
416
+ continue
417
+ if proc.returncode != 0:
418
+ continue
419
+ value = proc.stdout.strip()
420
+ if not value:
421
+ continue
422
+ branch = value.rsplit("/", 1)[-1].strip()
423
+ if branch:
424
+ return branch
425
+ return None
426
+
427
+
428
+ def _repair_supabase_project_row(
429
+ *,
430
+ url: str,
431
+ key: str,
432
+ supabase_uuid: str,
433
+ repo_root: Path,
434
+ remote_url: str | None,
435
+ project_name: str | None,
436
+ owner: str,
437
+ repo: str,
438
+ registration_status: str,
439
+ ) -> None:
440
+ resolved_root = str(repo_root.expanduser().resolve())
441
+ payload: dict[str, Any] = {
442
+ "devflow_repo_root": resolved_root,
443
+ "status": registration_status,
444
+ }
445
+ if remote_url:
446
+ payload["repo_url"] = remote_url
447
+ if project_name:
448
+ payload["name"] = project_name
449
+ if owner and owner != "local":
450
+ payload["repo_owner"] = owner
451
+ if repo:
452
+ payload["repo_name"] = repo
453
+ default_branch = _detect_repo_default_branch(repo_root)
454
+ if default_branch:
455
+ payload["repo_default_branch"] = default_branch
456
+ _supabase_request(
457
+ method="PATCH",
458
+ url=f"{url}/rest/v1/devflow_projects?id=eq.{quote(supabase_uuid)}",
459
+ key=key,
460
+ body=payload,
461
+ )
462
+
463
+
464
+ def _publish_devflow_state_for_registration(
465
+ *,
466
+ project_id: str,
467
+ run_id: str,
468
+ ) -> None:
469
+ config = _resolve_supabase_config_for_registration()
470
+ if config is None:
471
+ return
472
+ publish_devflow_state(
473
+ project_id=project_id,
474
+ run_id=run_id,
475
+ current_state="project_registered",
476
+ current_status="idle",
477
+ run_summary="project registration complete",
478
+ display="project",
479
+ display_path=f"project:{project_id}",
480
+ )
481
+
482
+
483
+ def _resolve_or_create_supabase_project_uuid(
484
+ *,
485
+ repo_root: Path,
486
+ remote_url: str | None,
487
+ project_name: str | None,
488
+ owner: str,
489
+ repo: str,
490
+ ) -> str | None:
491
+ """Return Supabase UUID for this project (look up or create).
492
+
493
+ Returns None when Supabase is not configured (e.g. during tests or
494
+ offline use). In that case the caller should fall back to the hash-based id.
495
+
496
+ Raises RuntimeError if Supabase IS configured but the operation fails.
497
+ """
498
+ config = _resolve_supabase_config_for_registration()
499
+ if config is None:
500
+ return None
501
+ url, key = config
502
+
503
+ try:
504
+ uuid = _lookup_supabase_project_uuid(
505
+ url=url,
506
+ key=key,
507
+ repo_root=repo_root,
508
+ remote_url=remote_url,
509
+ owner=owner,
510
+ repo=repo,
511
+ )
512
+ if uuid:
513
+ return uuid
514
+
515
+ # Not found → create a new row
516
+ return _create_supabase_project_row(
517
+ url=url,
518
+ key=key,
519
+ repo_root=repo_root,
520
+ remote_url=remote_url,
521
+ project_name=project_name,
522
+ owner=owner,
523
+ repo=repo,
524
+ )
525
+ except Exception as exc:
526
+ raise RuntimeError(
527
+ f"Supabase project UUID resolution failed for {repo_root}: {exc}"
528
+ ) from exc
529
+
530
+
531
+ def _read_existing_local_project_uuid(
532
+ *,
533
+ repo_root: Path,
534
+ workspace_path: Path,
535
+ remote_url: str | None,
536
+ ) -> str | None:
537
+ """Return a UUID-like project_id already stored in the local projects registry
538
+ for this repo, if one exists.
539
+
540
+ This prevents `run_project_registration_dag` from asking Supabase to create a
541
+ second devflow_projects row (and therefore a second/drifted project identity)
542
+ when the same repo is imported or re-initialised after it was already
543
+ successfully registered.
544
+
545
+ The local registry is the in-process cache of the last Supabase UUID that was
546
+ resolved for this repo. If the stored project_id is already UUID-shaped, it
547
+ IS the authoritative Supabase UUID and must be reused without a new lookup.
548
+ """
549
+ registry = _read_projects_registry()
550
+ projects = list(registry.get("projects", []))
551
+ resolved_root = str(repo_root.expanduser().resolve())
552
+ resolved_ws = str(workspace_path.expanduser().resolve())
553
+ for item in projects:
554
+ if not isinstance(item, dict):
555
+ continue
556
+ # Match by remote_url (most reliable)
557
+ if remote_url and item.get("remote_url") == remote_url:
558
+ pid = str(item.get("project_id") or "").strip()
559
+ if _is_uuid_like(pid):
560
+ return pid
561
+ # Match by workspace_path
562
+ ws = str(item.get("workspace_path") or "").strip()
563
+ if ws:
564
+ try:
565
+ if Path(ws).expanduser().resolve() == Path(resolved_ws):
566
+ pid = str(item.get("project_id") or "").strip()
567
+ if _is_uuid_like(pid):
568
+ return pid
569
+ except Exception:
570
+ pass
571
+ # Match by repo_root
572
+ rr = str(item.get("repo_root") or "").strip()
573
+ if rr:
574
+ try:
575
+ if Path(rr).expanduser().resolve() == Path(resolved_root):
576
+ pid = str(item.get("project_id") or "").strip()
577
+ if _is_uuid_like(pid):
578
+ return pid
579
+ except Exception:
580
+ pass
581
+ return None
582
+
583
+
584
+ # ---------------------------------------------------------------------------
585
+
586
+
587
+ def _write_json(path: Path, payload: dict[str, object]) -> None:
588
+ path.parent.mkdir(parents=True, exist_ok=True)
589
+ path.write_text(json.dumps(payload, indent=2, sort_keys=True) + "\n", encoding="utf-8")
590
+
591
+
592
+ def _jsonable_history(records: list[StateHistoryRecord]) -> list[dict[str, str]]:
593
+ return [item.model_dump() for item in records]
594
+
595
+
596
+ def _find_existing_project_entry(
597
+ *,
598
+ projects: list[object],
599
+ repo_root: Path,
600
+ workspace_path: Path,
601
+ remote_url: str | None,
602
+ ) -> tuple[int | None, dict[str, object] | None]:
603
+ for idx, item in enumerate(projects):
604
+ if not isinstance(item, dict):
605
+ continue
606
+ if remote_url and item.get("remote_url") == remote_url:
607
+ return idx, item
608
+ if Path(str(item.get("workspace_path", ""))).expanduser() == workspace_path:
609
+ return idx, item
610
+ if Path(str(item.get("repo_root", ""))).expanduser() == repo_root:
611
+ return idx, item
612
+ return None, None
613
+
614
+
615
+ def _upsert_registry_state(
616
+ *,
617
+ entry: dict[str, object],
618
+ found_index: int | None,
619
+ registry: dict[str, object],
620
+ projects: list[object],
621
+ state_history: list[StateHistoryRecord],
622
+ state: str,
623
+ timestamp: str,
624
+ ) -> tuple[int, dict[str, object]]:
625
+ seen_states = {item.state for item in state_history}
626
+ if state not in seen_states:
627
+ state_history.append(StateHistoryRecord(state=state, timestamp=timestamp))
628
+ entry["registration_state"] = state
629
+ entry["state_history"] = _jsonable_history(state_history)
630
+ entry["updated_at"] = timestamp
631
+ if state == "engine_registered":
632
+ entry["registered_at"] = timestamp
633
+ if state == "ready_for_source_scope":
634
+ entry["ready_for_source_scope_at"] = timestamp
635
+ if found_index is None:
636
+ projects.append(entry)
637
+ found_index = len(projects) - 1
638
+ else:
639
+ projects[found_index] = entry
640
+ registry["projects"] = projects
641
+ _write_projects_registry(registry)
642
+ return found_index, entry
643
+
644
+
645
+ def _write_summary(
646
+ *,
647
+ pipeline_dir: Path,
648
+ exit_code: int,
649
+ run_id: str,
650
+ outcome: dict[str, Any],
651
+ message: str,
652
+ ) -> Path:
653
+ summary = ProjectRegistrationSummary(
654
+ exit_code=exit_code,
655
+ run_id=run_id,
656
+ pipeline_dir=str(pipeline_dir),
657
+ message=message,
658
+ outcome=outcome,
659
+ )
660
+ summary_path = pipeline_dir / "summary.json"
661
+ _write_json(summary_path, summary.model_dump())
662
+ return summary_path
663
+
664
+
665
+ def _reconcile_execution_store_project_row(*, repo_root: Path, authoritative_project_id: str) -> None:
666
+ db_path = repo_root / ".devflow" / "execution.sqlite"
667
+ if not authoritative_project_id:
668
+ return
669
+
670
+ now = int(datetime.now(UTC).timestamp())
671
+ with sqlite3.connect(db_path) as conn:
672
+ conn.row_factory = sqlite3.Row
673
+ rows = conn.execute(
674
+ "SELECT rowid, project_id FROM projects WHERE repo_root=? ORDER BY created_at ASC, rowid ASC",
675
+ (str(repo_root),),
676
+ ).fetchall()
677
+ authoritative_rows = [row for row in rows if str(row["project_id"] or "") == authoritative_project_id]
678
+
679
+ if authoritative_rows:
680
+ keep_rowid = int(authoritative_rows[0]["rowid"])
681
+ conn.execute(
682
+ "DELETE FROM projects WHERE repo_root=? AND rowid<>?",
683
+ (str(repo_root), keep_rowid),
684
+ )
685
+ conn.execute(
686
+ "UPDATE projects SET project_id=?, name=?, repo_root=?, metadata_json=COALESCE(metadata_json, ?)"
687
+ " WHERE rowid=?",
688
+ (authoritative_project_id, authoritative_project_id, str(repo_root), json.dumps({}, sort_keys=True), keep_rowid),
689
+ )
690
+ conn.commit()
691
+ return
692
+
693
+ if rows:
694
+ keep_rowid = int(rows[0]["rowid"])
695
+ conn.execute(
696
+ "UPDATE projects SET project_id=?, name=?, repo_root=? WHERE rowid=?",
697
+ (authoritative_project_id, authoritative_project_id, str(repo_root), keep_rowid),
698
+ )
699
+ conn.execute(
700
+ "DELETE FROM projects WHERE repo_root=? AND rowid<>?",
701
+ (str(repo_root), keep_rowid),
702
+ )
703
+ conn.commit()
704
+ return
705
+
706
+ conn.execute(
707
+ "INSERT INTO projects(project_id, created_at, name, repo_root, metadata_json) VALUES(?,?,?,?,?)",
708
+ (authoritative_project_id, now, authoritative_project_id, str(repo_root), json.dumps({}, sort_keys=True)),
709
+ )
710
+ conn.commit()
711
+
712
+
713
+ def _project_existing_local_artifacts_to_supabase(*, repo_root: Path, authoritative_project_id: str, run_id: str) -> None:
714
+ if not authoritative_project_id:
715
+ return
716
+
717
+ now = datetime.now(UTC).isoformat()
718
+
719
+ try:
720
+ from ..source_scope import dag as source_scope_dag
721
+
722
+ source_scope_cfg = source_scope_dag._resolve_supabase_rest_config()
723
+ if source_scope_cfg is not None:
724
+ source_scope_url, source_scope_key = source_scope_cfg
725
+ scope_rows: list[dict[str, Any]] = []
726
+ for scope_path in sorted((repo_root / ".devflow" / "scopes").glob("*/scope.json")):
727
+ try:
728
+ payload = json.loads(scope_path.read_text(encoding="utf-8"))
729
+ except Exception:
730
+ continue
731
+ scope_rows.append(
732
+ {
733
+ "scope_id": str(payload.get("scope_id") or scope_path.parent.name),
734
+ "project_id": authoritative_project_id,
735
+ "scope_set_id": payload.get("scope_set_id"),
736
+ "run_id": run_id,
737
+ "title": str(payload.get("title") or payload.get("scope_id") or scope_path.parent.name),
738
+ "summary": payload.get("summary") or payload.get("description"),
739
+ "status": payload.get("status"),
740
+ "coverage_status": payload.get("coverage_status"),
741
+ "review_status": payload.get("review_status"),
742
+ "scope_set_title": payload.get("scope_set_title"),
743
+ "source_traceability_refs": payload.get("source_traceability_refs") or [],
744
+ "assumptions": payload.get("assumptions") or [],
745
+ "depends_on": payload.get("depends_on") or [],
746
+ "origin": "project_import",
747
+ "artifact_path": str(scope_path),
748
+ "updated_at": now,
749
+ }
750
+ )
751
+ if scope_rows:
752
+ source_scope_dag._postgrest_request(
753
+ method="POST",
754
+ url=f"{source_scope_url}/rest/v1/devflow_project_scopes?on_conflict=scope_id",
755
+ key=source_scope_key,
756
+ body=scope_rows,
757
+ prefer="resolution=merge-duplicates",
758
+ )
759
+ except Exception:
760
+ pass
761
+
762
+ try:
763
+ from ..scope_idea import dag as scope_idea_dag
764
+
765
+ scope_idea_cfg = scope_idea_dag._resolve_supabase_rest_config()
766
+ if scope_idea_cfg is not None:
767
+ scope_idea_url, scope_idea_key = scope_idea_cfg
768
+ idea_rows: list[dict[str, Any]] = []
769
+ for idea_path in sorted((repo_root / ".devflow" / "ideas").glob("*/idea.json")):
770
+ try:
771
+ payload = json.loads(idea_path.read_text(encoding="utf-8"))
772
+ except Exception:
773
+ continue
774
+ idea_rows.append(
775
+ {
776
+ "idea_id": str(payload.get("idea_id") or idea_path.parent.name),
777
+ "project_id": authoritative_project_id,
778
+ "scope_set_id": payload.get("scope_set_id"),
779
+ "scope_id": payload.get("scope_id"),
780
+ "run_id": run_id,
781
+ "title": str(payload.get("title") or payload.get("idea_id") or idea_path.parent.name),
782
+ "summary": payload.get("summary") or payload.get("description"),
783
+ "status": payload.get("status"),
784
+ "shape": payload.get("shape"),
785
+ "resolution_status": payload.get("resolution_status"),
786
+ "origin": "project_import",
787
+ "artifact_path": str(idea_path),
788
+ "updated_at": now,
789
+ }
790
+ )
791
+ if idea_rows:
792
+ scope_idea_dag._postgrest_request(
793
+ method="POST",
794
+ url=f"{scope_idea_url}/rest/v1/devflow_project_ideas?on_conflict=idea_id",
795
+ key=scope_idea_key,
796
+ body=idea_rows,
797
+ prefer="resolution=merge-duplicates",
798
+ )
799
+ except Exception:
800
+ pass
801
+
802
+ try:
803
+ from ..idea import story_pipeline
804
+
805
+ story_cfg = story_pipeline._resolve_supabase_rest_config()
806
+ if story_cfg is not None:
807
+ story_url, story_key = story_cfg
808
+ story_rows: list[dict[str, Any]] = []
809
+ ideas_root = repo_root / ".devflow" / "ideas"
810
+ for idea_dir in sorted([path for path in ideas_root.iterdir() if path.is_dir()] if ideas_root.exists() else []):
811
+ idea_id = idea_dir.name
812
+ for manifest_path in sorted((idea_dir / "devflow_story_sets").glob("*/manifest.json")):
813
+ try:
814
+ compiled_manifest = json.loads(manifest_path.read_text(encoding="utf-8"))
815
+ except Exception:
816
+ continue
817
+ source_story_set_id = str(compiled_manifest.get("source_story_set_id") or "").strip()
818
+ if not source_story_set_id:
819
+ continue
820
+ trad_manifest_path = idea_dir / "traditional_user_stories" / source_story_set_id / "manifest.json"
821
+ if not trad_manifest_path.exists():
822
+ continue
823
+ try:
824
+ trad_manifest = json.loads(trad_manifest_path.read_text(encoding="utf-8"))
825
+ except Exception:
826
+ continue
827
+ trad_story_paths = [repo_root / rel for rel in (trad_manifest.get("story_paths") or [])]
828
+ compiled_story_paths = [repo_root / rel for rel in (compiled_manifest.get("story_paths") or [])]
829
+ compiled_by_index = {idx: path for idx, path in enumerate(sorted(compiled_story_paths), start=1)}
830
+ for index, trad_path in enumerate(sorted(trad_story_paths), start=1):
831
+ if not trad_path.exists():
832
+ continue
833
+ title, _actor, acceptance, statement = story_pipeline._story_statement_from_traditional(
834
+ trad_path.read_text(encoding="utf-8")
835
+ )
836
+ compiled_path = compiled_by_index.get(index)
837
+ compiled_payload: dict[str, Any] = {}
838
+ if compiled_path and compiled_path.exists():
839
+ try:
840
+ compiled_payload = json.loads(compiled_path.read_text(encoding="utf-8"))
841
+ except Exception:
842
+ compiled_payload = {}
843
+ story_rows.append(
844
+ {
845
+ "idea_id": idea_id,
846
+ "story_id": str(compiled_payload.get("story_id") or f"TRAD:{idea_id}:{index:03d}"),
847
+ "story_uuid": str(compiled_payload.get("story_uuid") or "") or None,
848
+ "project_id": authoritative_project_id,
849
+ "run_id": run_id,
850
+ "title": title,
851
+ "summary": statement or None,
852
+ "acceptance_criteria": acceptance,
853
+ "status": "ready_for_implementation",
854
+ "plane": str(compiled_payload.get("plane") or "") or None,
855
+ "required_planes": compiled_payload.get("required_planes") or [],
856
+ "devflow_story_set_id": manifest_path.parent.name,
857
+ "source_story_set_id": source_story_set_id,
858
+ "artifact_path": str(trad_path),
859
+ "compiled_story_id": str(compiled_payload.get("story_id") or "") or None,
860
+ "compiled_story_path": str(compiled_path) if compiled_path else None,
861
+ "updated_at": now,
862
+ }
863
+ )
864
+ if story_rows:
865
+ story_pipeline._postgrest_request(
866
+ method="POST",
867
+ url=f"{story_url}/rest/v1/devflow_idea_stories?on_conflict=story_id",
868
+ key=story_key,
869
+ body=story_rows,
870
+ prefer="resolution=merge-duplicates",
871
+ )
872
+ except Exception:
873
+ pass
874
+
875
+ try:
876
+ from ..integration import dag as integration_dag
877
+
878
+ integration_cfg = integration_dag._resolve_supabase_rest_config()
879
+ if integration_cfg is not None:
880
+ integration_url, integration_key = integration_cfg
881
+ integration_rows: list[dict[str, Any]] = []
882
+ ideas_root = repo_root / ".devflow" / "ideas"
883
+ for idea_dir in sorted([path for path in ideas_root.iterdir() if path.is_dir()] if ideas_root.exists() else []):
884
+ idea_id = idea_dir.name
885
+ for run_dir in sorted((idea_dir / "integration" / "runs").glob("*")) if (idea_dir / "integration" / "runs").exists() else []:
886
+ if not run_dir.is_dir():
887
+ continue
888
+
889
+ def _load_json(name: str) -> dict[str, Any] | None:
890
+ path = run_dir / name
891
+ if not path.exists():
892
+ return None
893
+ try:
894
+ return json.loads(path.read_text(encoding="utf-8"))
895
+ except Exception:
896
+ return None
897
+
898
+ side_effects = _load_json("side_effects.json")
899
+ implicated_users = _load_json("implicated_users.json")
900
+ workflow_inventory = _load_json("workflow_inventory.json")
901
+ validation_report = _load_json("validation_gate.json")
902
+ red_package = _load_json("red_package.json")
903
+ red_review = _load_json("red_review.json")
904
+ green_package = _load_json("green_package.json")
905
+ green_enrich = _load_json("green_enrich.json")
906
+ commit_package = _load_json("commit_package.json")
907
+ integration_rows.append(
908
+ {
909
+ "idea_id": idea_id,
910
+ "project_id": authoritative_project_id,
911
+ "run_id": run_dir.name,
912
+ "pipeline_dir": str(run_dir),
913
+ "status": "completed",
914
+ "exit_code": None,
915
+ "iterations_used": None,
916
+ "workflow_count": len((workflow_inventory or {}).get("workflow_ids") or []),
917
+ "side_effect_count": len((side_effects or {}).get("side_effects") or []),
918
+ "side_effects": side_effects,
919
+ "implicated_users": implicated_users,
920
+ "workflow_inventory": workflow_inventory,
921
+ "validation_report": validation_report,
922
+ "red_package": red_package,
923
+ "red_review": red_review,
924
+ "green_package": green_package,
925
+ "green_enrich": green_enrich,
926
+ "commit_package": commit_package,
927
+ "failure_message": None,
928
+ "repair_cycles": (validation_report or {}).get("repair_cycles", 0) if validation_report else 0,
929
+ "repair_patches_count": (validation_report or {}).get("repair_patches_count", 0) if validation_report else 0,
930
+ "repair_summary": (validation_report or {}).get("repair_summary") if validation_report else None,
931
+ "updated_at": now,
932
+ }
933
+ )
934
+ if integration_rows:
935
+ integration_dag._postgrest_request(
936
+ method="POST",
937
+ url=f"{integration_url}/rest/v1/devflow_idea_integrations?on_conflict=idea_id",
938
+ key=integration_key,
939
+ body=integration_rows,
940
+ prefer="resolution=merge-duplicates",
941
+ )
942
+ except Exception:
943
+ pass
944
+
945
+
946
+ class DeriveProjectShellNode(Node):
947
+ async def process(self, task_context: TaskContext) -> TaskContext:
948
+ event = task_context.event
949
+ repo_root = Path(event.repo_root).expanduser().resolve()
950
+ workspace_path = Path(event.workspace_path).expanduser().resolve()
951
+ pipeline_dir = _pipeline_root(repo_root, event.project_id)
952
+ pipeline_dir.mkdir(parents=True, exist_ok=True)
953
+
954
+ store, run_id = _store_run()
955
+ node_exec_id = store.create_node_attempt(
956
+ run_id=run_id,
957
+ node_id="derive_project_shell",
958
+ node_name="DeriveProjectShell",
959
+ attempt=1,
960
+ input={
961
+ "repo_root": str(repo_root),
962
+ "workspace_path": str(workspace_path),
963
+ "remote_url": event.remote_url,
964
+ "project_name": event.project_name,
965
+ },
966
+ )
967
+
968
+ registry = _read_projects_registry()
969
+ projects = list(registry.get("projects", [])) # type: ignore[arg-type]
970
+ found_index, found_entry = _find_existing_project_entry(
971
+ projects=projects,
972
+ repo_root=repo_root,
973
+ workspace_path=workspace_path,
974
+ remote_url=event.remote_url,
975
+ )
976
+ existing_history = found_entry.get("state_history", []) if isinstance(found_entry, dict) else []
977
+ state_history = [StateHistoryRecord.model_validate(item) for item in existing_history if isinstance(item, dict)]
978
+ current_state = str(found_entry.get("registration_state") or "draft_unbound") if isinstance(found_entry, dict) else "draft_unbound"
979
+ owner, repo = _infer_owner_repo(event.remote_url, repo_root, event.project_name)
980
+
981
+ entry: dict[str, object] = dict(found_entry or {})
982
+ entry.update(
983
+ {
984
+ "project_id": str(event.project_id),
985
+ "owner": owner,
986
+ "repo": repo,
987
+ "workspace_path": str(workspace_path),
988
+ "repo_root": str(repo_root),
989
+ "remote_url": event.remote_url,
990
+ "registration_command": event.command_name,
991
+ "registration_artifact": str(pipeline_dir / "project_registration.json"),
992
+ }
993
+ )
994
+
995
+ artifact = ProjectShellArtifact(
996
+ dag_id=DAG_ID,
997
+ project_id=str(entry["project_id"]),
998
+ owner=owner,
999
+ repo=repo,
1000
+ repo_root=str(repo_root),
1001
+ workspace_path=str(workspace_path),
1002
+ remote_url=event.remote_url,
1003
+ registration_command=event.command_name,
1004
+ registration_state=current_state,
1005
+ state_history=state_history,
1006
+ created_at=event.occurred_at,
1007
+ updated_at=event.occurred_at,
1008
+ )
1009
+ shell_path = pipeline_dir / "project_shell.json"
1010
+ _write_json(shell_path, artifact.model_dump())
1011
+ store.add_artifact(
1012
+ run_id=run_id,
1013
+ node_exec_id=node_exec_id,
1014
+ kind="project_registration.project_shell",
1015
+ uri=str(shell_path),
1016
+ metadata=artifact.model_dump(),
1017
+ )
1018
+
1019
+ task_context.metadata["repo_root"] = str(repo_root)
1020
+ task_context.metadata["workspace_path"] = str(workspace_path)
1021
+ task_context.metadata["pipeline_dir"] = str(pipeline_dir)
1022
+ task_context.metadata["registry"] = registry
1023
+ task_context.metadata["projects"] = projects
1024
+ task_context.metadata["registry_index"] = found_index
1025
+ task_context.metadata["registry_entry"] = entry
1026
+ task_context.metadata["state_history"] = _jsonable_history(state_history)
1027
+ task_context.metadata["artifacts"] = {
1028
+ "project_shell_ref": str(shell_path),
1029
+ }
1030
+
1031
+ store.mark_node_finished(
1032
+ node_exec_id=node_exec_id,
1033
+ status="succeeded",
1034
+ output={
1035
+ "project_id": str(entry["project_id"]),
1036
+ "project_shell_ref": str(shell_path),
1037
+ "existing_state_count": len(state_history),
1038
+ },
1039
+ )
1040
+ self.save_output(artifact)
1041
+ return task_context
1042
+
1043
+
1044
+ class PersistBindingRequestedNode(Node):
1045
+ async def process(self, task_context: TaskContext) -> TaskContext:
1046
+ event = task_context.event
1047
+ store, run_id = _store_run()
1048
+ node_exec_id = store.create_node_attempt(
1049
+ run_id=run_id,
1050
+ node_id="persist_binding_requested",
1051
+ node_name="PersistBindingRequested",
1052
+ attempt=1,
1053
+ )
1054
+
1055
+ pipeline_dir = Path(str(task_context.metadata["pipeline_dir"]))
1056
+ registry = dict(task_context.metadata["registry"])
1057
+ projects = list(task_context.metadata["projects"])
1058
+ found_index = task_context.metadata.get("registry_index")
1059
+ entry = dict(task_context.metadata["registry_entry"])
1060
+ state_history = [StateHistoryRecord.model_validate(item) for item in task_context.metadata.get("state_history", [])]
1061
+
1062
+ found_index, entry = _upsert_registry_state(
1063
+ entry=entry,
1064
+ found_index=found_index if isinstance(found_index, int) else None,
1065
+ registry=registry,
1066
+ projects=projects,
1067
+ state_history=state_history,
1068
+ state="draft_unbound",
1069
+ timestamp=event.occurred_at,
1070
+ )
1071
+ found_index, entry = _upsert_registry_state(
1072
+ entry=entry,
1073
+ found_index=found_index,
1074
+ registry=registry,
1075
+ projects=projects,
1076
+ state_history=state_history,
1077
+ state="binding_requested",
1078
+ timestamp=event.occurred_at,
1079
+ )
1080
+
1081
+ state_history_path = pipeline_dir / "state_history.json"
1082
+ state_history_payload = {
1083
+ "project_id": str(entry["project_id"]),
1084
+ "state_history": _jsonable_history(state_history),
1085
+ }
1086
+ _write_json(state_history_path, state_history_payload)
1087
+
1088
+ binding_artifact = BindingRequestArtifact(
1089
+ dag_id=DAG_ID,
1090
+ project_id=str(entry["project_id"]),
1091
+ registration_state="binding_requested",
1092
+ state_history=state_history,
1093
+ project_shell_ref=str(task_context.metadata["artifacts"]["project_shell_ref"]),
1094
+ state_history_ref=str(state_history_path),
1095
+ registry_entry_ref=str(pipeline_dir / "project_registration.json"),
1096
+ created_at=event.occurred_at,
1097
+ updated_at=event.occurred_at,
1098
+ )
1099
+ binding_path = pipeline_dir / "binding_request.json"
1100
+ _write_json(binding_path, binding_artifact.model_dump())
1101
+ store.add_artifact(
1102
+ run_id=run_id,
1103
+ node_exec_id=node_exec_id,
1104
+ kind="project_registration.binding_request",
1105
+ uri=str(binding_path),
1106
+ metadata=binding_artifact.model_dump(),
1107
+ )
1108
+ store.add_artifact(
1109
+ run_id=run_id,
1110
+ node_exec_id=node_exec_id,
1111
+ kind="project_registration.state_history",
1112
+ uri=str(state_history_path),
1113
+ metadata=state_history_payload,
1114
+ )
1115
+
1116
+ task_context.metadata["registry"] = registry
1117
+ task_context.metadata["projects"] = projects
1118
+ task_context.metadata["registry_index"] = found_index
1119
+ task_context.metadata["registry_entry"] = entry
1120
+ task_context.metadata["state_history"] = _jsonable_history(state_history)
1121
+ task_context.metadata["artifacts"]["state_history_ref"] = str(state_history_path)
1122
+ task_context.metadata["artifacts"]["binding_request_ref"] = str(binding_path)
1123
+
1124
+ store.mark_node_finished(
1125
+ node_exec_id=node_exec_id,
1126
+ status="succeeded",
1127
+ output={
1128
+ "registration_state": "binding_requested",
1129
+ "binding_request_ref": str(binding_path),
1130
+ "state_history_ref": str(state_history_path),
1131
+ },
1132
+ )
1133
+ self.save_output(binding_artifact)
1134
+ return task_context
1135
+
1136
+
1137
+ class RegisterEngineNode(Node):
1138
+ async def process(self, task_context: TaskContext) -> TaskContext:
1139
+ event = task_context.event
1140
+ repo_root = Path(str(task_context.metadata["repo_root"]))
1141
+ pipeline_dir = Path(str(task_context.metadata["pipeline_dir"]))
1142
+ store, run_id = _store_run()
1143
+ node_exec_id = store.create_node_attempt(
1144
+ run_id=run_id,
1145
+ node_id="register_engine",
1146
+ node_name="RegisterEngine",
1147
+ attempt=1,
1148
+ input={
1149
+ "repo_root": str(repo_root),
1150
+ "command_name": event.command_name,
1151
+ },
1152
+ )
1153
+
1154
+ registry = dict(task_context.metadata["registry"])
1155
+ projects = list(task_context.metadata["projects"])
1156
+ found_index = task_context.metadata.get("registry_index")
1157
+ entry = dict(task_context.metadata["registry_entry"])
1158
+ state_history = [StateHistoryRecord.model_validate(item) for item in task_context.metadata.get("state_history", [])]
1159
+
1160
+ try:
1161
+ _registration_callback()(repo_root)
1162
+ except Exception as exc:
1163
+ failure_artifact = EngineRegistrationArtifact(
1164
+ dag_id=DAG_ID,
1165
+ project_id=str(entry["project_id"]),
1166
+ registration_state=str(entry.get("registration_state") or "binding_requested"),
1167
+ command_name=event.command_name,
1168
+ repo_root=str(repo_root),
1169
+ workspace_path=str(task_context.metadata["workspace_path"]),
1170
+ remote_url=event.remote_url,
1171
+ state_history=state_history,
1172
+ ok=False,
1173
+ error_message=str(exc),
1174
+ created_at=event.occurred_at,
1175
+ updated_at=event.occurred_at,
1176
+ )
1177
+ failure_path = pipeline_dir / "engine_registration.json"
1178
+ _write_json(failure_path, failure_artifact.model_dump())
1179
+ store.add_artifact(
1180
+ run_id=run_id,
1181
+ node_exec_id=node_exec_id,
1182
+ kind="project_registration.engine_registration",
1183
+ uri=str(failure_path),
1184
+ metadata=failure_artifact.model_dump(),
1185
+ )
1186
+ outcome = {
1187
+ "project_id": str(entry["project_id"]),
1188
+ "registration_state": str(entry.get("registration_state") or "binding_requested"),
1189
+ "pipeline_dir": str(pipeline_dir),
1190
+ "repo_root": str(repo_root),
1191
+ "error": str(exc),
1192
+ }
1193
+ summary_path = _write_summary(
1194
+ pipeline_dir=pipeline_dir,
1195
+ exit_code=2,
1196
+ run_id=run_id,
1197
+ outcome=outcome,
1198
+ message="project registration failed during engine bootstrap",
1199
+ )
1200
+ task_context.metadata["artifacts"]["engine_registration_ref"] = str(failure_path)
1201
+ task_context.metadata["artifacts"]["summary_ref"] = str(summary_path)
1202
+ task_context.metadata["outcome"] = outcome
1203
+ task_context.metadata["message"] = json.dumps(outcome, sort_keys=True) + "\n"
1204
+ task_context.metadata["exit_code"] = 2
1205
+ store.mark_node_finished(
1206
+ node_exec_id=node_exec_id,
1207
+ status="failed",
1208
+ output={
1209
+ "engine_registration_ref": str(failure_path),
1210
+ "summary_ref": str(summary_path),
1211
+ },
1212
+ error={"message": str(exc)},
1213
+ )
1214
+ task_context.stop_workflow()
1215
+ return task_context
1216
+
1217
+ found_index, entry = _upsert_registry_state(
1218
+ entry=entry,
1219
+ found_index=found_index if isinstance(found_index, int) else None,
1220
+ registry=registry,
1221
+ projects=projects,
1222
+ state_history=state_history,
1223
+ state="engine_registered",
1224
+ timestamp=event.occurred_at,
1225
+ )
1226
+ artifact = EngineRegistrationArtifact(
1227
+ dag_id=DAG_ID,
1228
+ project_id=str(entry["project_id"]),
1229
+ registration_state="engine_registered",
1230
+ command_name=event.command_name,
1231
+ repo_root=str(repo_root),
1232
+ workspace_path=str(task_context.metadata["workspace_path"]),
1233
+ remote_url=event.remote_url,
1234
+ state_history=state_history,
1235
+ ok=True,
1236
+ error_message=None,
1237
+ created_at=event.occurred_at,
1238
+ updated_at=event.occurred_at,
1239
+ )
1240
+ engine_path = pipeline_dir / "engine_registration.json"
1241
+ _write_json(engine_path, artifact.model_dump())
1242
+ store.add_artifact(
1243
+ run_id=run_id,
1244
+ node_exec_id=node_exec_id,
1245
+ kind="project_registration.engine_registration",
1246
+ uri=str(engine_path),
1247
+ metadata=artifact.model_dump(),
1248
+ )
1249
+
1250
+ task_context.metadata["registry"] = registry
1251
+ task_context.metadata["projects"] = projects
1252
+ task_context.metadata["registry_index"] = found_index
1253
+ task_context.metadata["registry_entry"] = entry
1254
+ task_context.metadata["state_history"] = _jsonable_history(state_history)
1255
+ task_context.metadata["artifacts"]["engine_registration_ref"] = str(engine_path)
1256
+
1257
+ store.mark_node_finished(
1258
+ node_exec_id=node_exec_id,
1259
+ status="succeeded",
1260
+ output={
1261
+ "registration_state": "engine_registered",
1262
+ "engine_registration_ref": str(engine_path),
1263
+ },
1264
+ )
1265
+ self.save_output(artifact)
1266
+ return task_context
1267
+
1268
+
1269
+ class FinalizeProjectRegistrationNode(Node):
1270
+ async def process(self, task_context: TaskContext) -> TaskContext:
1271
+ event = task_context.event
1272
+ pipeline_dir = Path(str(task_context.metadata["pipeline_dir"]))
1273
+ repo_root = Path(str(task_context.metadata["repo_root"]))
1274
+ store, run_id = _store_run()
1275
+ node_exec_id = store.create_node_attempt(
1276
+ run_id=run_id,
1277
+ node_id="finalize_registration",
1278
+ node_name="FinalizeProjectRegistration",
1279
+ attempt=1,
1280
+ )
1281
+
1282
+ registry = dict(task_context.metadata["registry"])
1283
+ projects = list(task_context.metadata["projects"])
1284
+ found_index = task_context.metadata.get("registry_index")
1285
+ entry = dict(task_context.metadata["registry_entry"])
1286
+ state_history = [StateHistoryRecord.model_validate(item) for item in task_context.metadata.get("state_history", [])]
1287
+
1288
+ found_index, entry = _upsert_registry_state(
1289
+ entry=entry,
1290
+ found_index=found_index if isinstance(found_index, int) else None,
1291
+ registry=registry,
1292
+ projects=projects,
1293
+ state_history=state_history,
1294
+ state="ready_for_source_scope",
1295
+ timestamp=event.occurred_at,
1296
+ )
1297
+
1298
+ scaffold_paths = ensure_source_doc_scaffold(repo_root)
1299
+
1300
+ registration_artifact = ProjectRegistrationArtifact(
1301
+ dag_id=DAG_ID,
1302
+ run_id=run_id,
1303
+ project_id=str(entry["project_id"]),
1304
+ command_name=event.command_name,
1305
+ repo_root=str(repo_root),
1306
+ workspace_path=str(task_context.metadata["workspace_path"]),
1307
+ remote_url=event.remote_url,
1308
+ registration_state="ready_for_source_scope",
1309
+ state_history=state_history,
1310
+ project_shell_ref=str(task_context.metadata["artifacts"]["project_shell_ref"]),
1311
+ state_history_ref=str(task_context.metadata["artifacts"]["state_history_ref"]),
1312
+ engine_registration_ref=str(task_context.metadata["artifacts"]["engine_registration_ref"]),
1313
+ summary_ref=str(pipeline_dir / "summary.json"),
1314
+ created_at=event.occurred_at,
1315
+ updated_at=event.occurred_at,
1316
+ )
1317
+ registration_path = pipeline_dir / "project_registration.json"
1318
+ _write_json(registration_path, registration_artifact.model_dump())
1319
+ store.add_artifact(
1320
+ run_id=run_id,
1321
+ node_exec_id=node_exec_id,
1322
+ kind="project_registration.registration_record",
1323
+ uri=str(registration_path),
1324
+ metadata=registration_artifact.model_dump(),
1325
+ )
1326
+
1327
+ outcome = {
1328
+ "project_id": str(entry["project_id"]),
1329
+ "registration_state": "ready_for_source_scope",
1330
+ "pipeline_dir": str(pipeline_dir),
1331
+ "workspace": str(repo_root),
1332
+ "next_step": "source docs -> scopes",
1333
+ }
1334
+ summary_path = _write_summary(
1335
+ pipeline_dir=pipeline_dir,
1336
+ exit_code=0,
1337
+ run_id=run_id,
1338
+ outcome=outcome,
1339
+ message="project registration complete",
1340
+ )
1341
+
1342
+ task_context.metadata["registry"] = registry
1343
+ task_context.metadata["projects"] = projects
1344
+ task_context.metadata["registry_index"] = found_index
1345
+ task_context.metadata["registry_entry"] = entry
1346
+ task_context.metadata["state_history"] = _jsonable_history(state_history)
1347
+ task_context.metadata["artifacts"]["registration_artifact_ref"] = str(registration_path)
1348
+ task_context.metadata["artifacts"]["summary_ref"] = str(summary_path)
1349
+ task_context.metadata["outcome"] = outcome
1350
+ task_context.metadata["message"] = json.dumps(outcome, sort_keys=True) + "\n"
1351
+ task_context.metadata["exit_code"] = 0
1352
+
1353
+ # Create / update the devflow_state record for this project.
1354
+ # This is a required part of successful registration when Supabase is
1355
+ # configured, so failures should stop the registration flow.
1356
+ _publish_devflow_state_for_registration(
1357
+ project_id=str(entry["project_id"]),
1358
+ run_id=run_id,
1359
+ )
1360
+
1361
+ try:
1362
+ _project_existing_local_artifacts_to_supabase(
1363
+ repo_root=repo_root,
1364
+ authoritative_project_id=str(entry["project_id"]),
1365
+ run_id=run_id,
1366
+ )
1367
+ except Exception:
1368
+ pass
1369
+
1370
+ store.mark_node_finished(
1371
+ node_exec_id=node_exec_id,
1372
+ status="succeeded",
1373
+ output={
1374
+ "registration_state": "ready_for_source_scope",
1375
+ "registration_artifact_ref": str(registration_path),
1376
+ "summary_ref": str(summary_path),
1377
+ "source_docs_dir": str(scaffold_paths.source_docs_dir),
1378
+ "project_docs_dir": str(scaffold_paths.project_docs_dir),
1379
+ },
1380
+ )
1381
+ self.save_output(registration_artifact)
1382
+ return task_context
1383
+
1384
+
1385
+ class ProjectRegistrationWorkflow(Workflow):
1386
+ workflow_schema = WorkflowSchema(
1387
+ description="Project registration DAG (derive shell -> bind state -> engine registration -> ready handoff)",
1388
+ event_schema=ProjectRegistrationDagEvent,
1389
+ start=DeriveProjectShellNode,
1390
+ nodes=[
1391
+ NodeConfig(node=DeriveProjectShellNode, connections=[PersistBindingRequestedNode]),
1392
+ NodeConfig(node=PersistBindingRequestedNode, connections=[RegisterEngineNode]),
1393
+ NodeConfig(node=RegisterEngineNode, connections=[FinalizeProjectRegistrationNode]),
1394
+ NodeConfig(node=FinalizeProjectRegistrationNode, connections=[]),
1395
+ ],
1396
+ )
1397
+
1398
+
1399
+ def run_project_registration_dag(
1400
+ *,
1401
+ repo_root: Path,
1402
+ workspace_path: Path,
1403
+ remote_url: str | None,
1404
+ project_name: str | None,
1405
+ command_name: str,
1406
+ register_repo: RegistrationCallback,
1407
+ supabase_project_id: str | None = None,
1408
+ ) -> ProjectRegistrationDagResult:
1409
+ repo_root = repo_root.expanduser().resolve()
1410
+ workspace_path = workspace_path.expanduser().resolve()
1411
+ repo_root.joinpath(".devflow").mkdir(parents=True, exist_ok=True)
1412
+
1413
+ _owner, _repo, _hash_project_id = _stable_project_identity(
1414
+ repo_root=repo_root,
1415
+ remote_url=remote_url,
1416
+ project_name=project_name,
1417
+ )
1418
+
1419
+ # Prefer the Supabase UUID as the canonical local project_id.
1420
+ # Resolution order (first match wins):
1421
+ # 1. Explicit supabase_project_id kwarg — highest priority override.
1422
+ # 2. Existing UUID already stored in the local registry for this repo —
1423
+ # avoids re-querying Supabase and, critically, avoids creating a SECOND
1424
+ # devflow_projects row (and thus a drifted project identity) on re-import.
1425
+ # 3. Supabase lookup-or-create — used only when the project has never been
1426
+ # registered locally before.
1427
+ # 4. Hash-based fallback — used only when Supabase is not configured at all.
1428
+ _supabase_uuid_resolved: str | None = None
1429
+
1430
+ if supabase_project_id and _is_uuid_like(supabase_project_id):
1431
+ # Explicit override takes highest priority
1432
+ project_id = supabase_project_id
1433
+ _supabase_uuid_resolved = supabase_project_id
1434
+ else:
1435
+ # Check local registry before hitting Supabase — prevents duplicate row creation
1436
+ _local_uuid = _read_existing_local_project_uuid(
1437
+ repo_root=repo_root,
1438
+ workspace_path=workspace_path,
1439
+ remote_url=remote_url,
1440
+ )
1441
+ if _local_uuid:
1442
+ # Already registered — reuse without a Supabase round-trip
1443
+ project_id = _local_uuid
1444
+ _supabase_uuid_resolved = _local_uuid
1445
+ else:
1446
+ # First time this repo is being registered — look up or create in Supabase
1447
+ _supabase_uuid_resolved = _resolve_or_create_supabase_project_uuid(
1448
+ repo_root=repo_root,
1449
+ remote_url=remote_url,
1450
+ project_name=project_name,
1451
+ owner=_owner,
1452
+ repo=_repo,
1453
+ )
1454
+ project_id = _supabase_uuid_resolved if _supabase_uuid_resolved else _hash_project_id
1455
+ pipeline_key = "current"
1456
+ pipeline_dir = _pipeline_root(repo_root, project_id)
1457
+ pipeline_dir.mkdir(parents=True, exist_ok=True)
1458
+ occurred_at = datetime.now(UTC).replace(microsecond=0).isoformat()
1459
+
1460
+ store = ExecutionStore(repo_root / ".devflow" / "execution.sqlite")
1461
+ run_id = store.create_run(
1462
+ dag_id=DAG_ID,
1463
+ dag_version="v2_workflow",
1464
+ root_correlation_id=f"corr_{project_id}",
1465
+ config={
1466
+ "project_id": project_id,
1467
+ "pipeline_key": pipeline_key,
1468
+ "command_name": command_name,
1469
+ "workspace_path": str(workspace_path),
1470
+ "remote_url": remote_url,
1471
+ },
1472
+ )
1473
+ store.mark_run_started(run_id=run_id)
1474
+
1475
+ wf = ProjectRegistrationWorkflow()
1476
+ global _CURRENT_STORE, _CURRENT_RUN_ID, _CURRENT_REGISTER_REPO
1477
+ _CURRENT_STORE = store
1478
+ _CURRENT_RUN_ID = run_id
1479
+ _CURRENT_REGISTER_REPO = register_repo
1480
+ ctx: TaskContext | None = None
1481
+ try:
1482
+ ctx = wf.run(
1483
+ {
1484
+ "repo_root": str(repo_root),
1485
+ "workspace_path": str(workspace_path),
1486
+ "remote_url": remote_url,
1487
+ "project_name": project_name,
1488
+ "command_name": command_name,
1489
+ "project_id": project_id,
1490
+ "pipeline_key": pipeline_key,
1491
+ "occurred_at": occurred_at,
1492
+ }
1493
+ )
1494
+ except Exception as exc:
1495
+ outcome = {
1496
+ "project_id": project_id,
1497
+ "registration_state": "binding_requested",
1498
+ "pipeline_dir": str(pipeline_dir),
1499
+ "repo_root": str(repo_root),
1500
+ "error": str(exc),
1501
+ }
1502
+ _write_summary(
1503
+ pipeline_dir=pipeline_dir,
1504
+ exit_code=2,
1505
+ run_id=run_id,
1506
+ outcome=outcome,
1507
+ message="project registration failed",
1508
+ )
1509
+ store.mark_run_finished(run_id=run_id, status="failed")
1510
+ raise
1511
+ finally:
1512
+ _CURRENT_STORE = None
1513
+ _CURRENT_RUN_ID = None
1514
+ _CURRENT_REGISTER_REPO = None
1515
+
1516
+ assert ctx is not None
1517
+ exit_code = int(ctx.metadata.get("exit_code") or 0)
1518
+ if exit_code == 0:
1519
+ try:
1520
+ _reconcile_execution_store_project_row(
1521
+ repo_root=repo_root,
1522
+ authoritative_project_id=project_id,
1523
+ )
1524
+ if _supabase_uuid_resolved:
1525
+ _supabase_cfg = _resolve_supabase_config_for_registration()
1526
+ if _supabase_cfg:
1527
+ _repair_supabase_project_row(
1528
+ url=_supabase_cfg[0],
1529
+ key=_supabase_cfg[1],
1530
+ supabase_uuid=_supabase_uuid_resolved,
1531
+ repo_root=repo_root,
1532
+ remote_url=remote_url,
1533
+ project_name=project_name,
1534
+ owner=_owner,
1535
+ repo=_repo,
1536
+ registration_status="ready_for_source_scope",
1537
+ )
1538
+ _update_supabase_devflow_project_id(
1539
+ url=_supabase_cfg[0],
1540
+ key=_supabase_cfg[1],
1541
+ supabase_uuid=_supabase_uuid_resolved,
1542
+ )
1543
+ except Exception as exc:
1544
+ outcome = {
1545
+ "project_id": project_id,
1546
+ "registration_state": "ready_for_source_scope",
1547
+ "pipeline_dir": str(pipeline_dir),
1548
+ "repo_root": str(repo_root),
1549
+ "error": str(exc),
1550
+ }
1551
+ _write_summary(
1552
+ pipeline_dir=pipeline_dir,
1553
+ exit_code=2,
1554
+ run_id=run_id,
1555
+ outcome=outcome,
1556
+ message="project registration failed during finalize sync",
1557
+ )
1558
+ store.mark_run_finished(run_id=run_id, status="failed")
1559
+ raise RuntimeError(
1560
+ f"Project registration finalize sync failed for {repo_root}: {exc}"
1561
+ ) from exc
1562
+
1563
+ store.mark_run_finished(run_id=run_id, status="succeeded" if exit_code == 0 else "failed")
1564
+
1565
+ registry_entry = dict(ctx.metadata.get("registry_entry") or {})
1566
+ state_history = [item for item in ctx.metadata.get("state_history", []) if isinstance(item, dict)]
1567
+ registration_state = str(registry_entry.get("registration_state") or ("ready_for_source_scope" if exit_code == 0 else "binding_requested"))
1568
+ message = str(ctx.metadata.get("message") or "")
1569
+
1570
+ return ProjectRegistrationDagResult(
1571
+ exit_code=exit_code,
1572
+ run_id=run_id,
1573
+ project_id=project_id,
1574
+ registration_state=registration_state,
1575
+ pipeline_dir=pipeline_dir,
1576
+ repo_root=repo_root,
1577
+ workspace_path=workspace_path,
1578
+ registry_entry=registry_entry,
1579
+ state_history=state_history,
1580
+ message=message,
1581
+ )