devflow-engine 1.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- devflow_engine/__init__.py +3 -0
- devflow_engine/agentic_prompts.py +100 -0
- devflow_engine/agentic_runtime.py +398 -0
- devflow_engine/api_key_flow_harness.py +539 -0
- devflow_engine/api_keys.py +357 -0
- devflow_engine/bootstrap/__init__.py +2 -0
- devflow_engine/bootstrap/provision_from_template.py +84 -0
- devflow_engine/cli/__init__.py +0 -0
- devflow_engine/cli/app.py +7270 -0
- devflow_engine/core/__init__.py +0 -0
- devflow_engine/core/config.py +86 -0
- devflow_engine/core/logging.py +29 -0
- devflow_engine/core/paths.py +45 -0
- devflow_engine/core/toml_kv.py +33 -0
- devflow_engine/devflow_event_worker.py +1292 -0
- devflow_engine/devflow_state.py +201 -0
- devflow_engine/devin2/__init__.py +9 -0
- devflow_engine/devin2/agent_definition.py +120 -0
- devflow_engine/devin2/pi_runner.py +204 -0
- devflow_engine/devin_orchestration.py +69 -0
- devflow_engine/docs/prompts/anti-patterns.md +42 -0
- devflow_engine/docs/prompts/devin-agent-prompt.md +55 -0
- devflow_engine/docs/prompts/devin2-agent-prompt.md +81 -0
- devflow_engine/docs/prompts/examples/devin-vapi-clone-reference-exchange.json +85 -0
- devflow_engine/doctor/__init__.py +2 -0
- devflow_engine/doctor/triage.py +140 -0
- devflow_engine/error/__init__.py +0 -0
- devflow_engine/error/remediation.py +21 -0
- devflow_engine/errors/error_solver_dag.py +522 -0
- devflow_engine/errors/runtime_observability.py +67 -0
- devflow_engine/idea/__init__.py +4 -0
- devflow_engine/idea/actors.py +481 -0
- devflow_engine/idea/agentic.py +465 -0
- devflow_engine/idea/analyze.py +93 -0
- devflow_engine/idea/devin_chat_dag.py +1 -0
- devflow_engine/idea/diff.py +99 -0
- devflow_engine/idea/drafts.py +446 -0
- devflow_engine/idea/idea_creation_dag.py +643 -0
- devflow_engine/idea/ideation_enrichment.py +355 -0
- devflow_engine/idea/ideation_enrichment_worker.py +19 -0
- devflow_engine/idea/paths.py +28 -0
- devflow_engine/idea/promote.py +53 -0
- devflow_engine/idea/redaction.py +27 -0
- devflow_engine/idea/repo_tools.py +1277 -0
- devflow_engine/idea/response_mode.py +30 -0
- devflow_engine/idea/story_pipeline.py +1585 -0
- devflow_engine/idea/sufficiency.py +376 -0
- devflow_engine/idea/traditional_stories.py +1257 -0
- devflow_engine/implementation/__init__.py +0 -0
- devflow_engine/implementation/alembic_preflight.py +700 -0
- devflow_engine/implementation/dag.py +8450 -0
- devflow_engine/implementation/green_gate.py +93 -0
- devflow_engine/implementation/prompts.py +108 -0
- devflow_engine/implementation/test_runtime.py +623 -0
- devflow_engine/integration/__init__.py +19 -0
- devflow_engine/integration/agentic.py +66 -0
- devflow_engine/integration/dag.py +3539 -0
- devflow_engine/integration/prompts.py +114 -0
- devflow_engine/integration/supabase_schema.sql +31 -0
- devflow_engine/integration/supabase_sync.py +177 -0
- devflow_engine/llm/__init__.py +1 -0
- devflow_engine/llm/cli_one_shot.py +84 -0
- devflow_engine/llm/cli_stream.py +371 -0
- devflow_engine/llm/execution_context.py +26 -0
- devflow_engine/llm/invoke.py +1322 -0
- devflow_engine/llm/provider_api.py +304 -0
- devflow_engine/llm/repo_knowledge.py +588 -0
- devflow_engine/llm_primitives.py +315 -0
- devflow_engine/orchestration.py +62 -0
- devflow_engine/planning/__init__.py +0 -0
- devflow_engine/planning/analyze_repo.py +92 -0
- devflow_engine/planning/render_drafts.py +133 -0
- devflow_engine/playground/__init__.py +0 -0
- devflow_engine/playground/hooks.py +26 -0
- devflow_engine/playwright_workflow/__init__.py +5 -0
- devflow_engine/playwright_workflow/dag.py +1317 -0
- devflow_engine/process/__init__.py +5 -0
- devflow_engine/process/dag.py +59 -0
- devflow_engine/project_registration/__init__.py +3 -0
- devflow_engine/project_registration/dag.py +1581 -0
- devflow_engine/project_registry.py +109 -0
- devflow_engine/prompts/devin/generic/prompt.md +6 -0
- devflow_engine/prompts/devin/ideation/prompt.md +263 -0
- devflow_engine/prompts/devin/ideation/scenarios.md +5 -0
- devflow_engine/prompts/devin/ideation_loop/prompt.md +6 -0
- devflow_engine/prompts/devin/insight/prompt.md +11 -0
- devflow_engine/prompts/devin/insight/scenarios.md +5 -0
- devflow_engine/prompts/devin/intake/prompt.md +15 -0
- devflow_engine/prompts/devin/iterate/prompt.md +12 -0
- devflow_engine/prompts/devin/shared/eval_doctrine.md +9 -0
- devflow_engine/prompts/devin/shared/principles.md +246 -0
- devflow_engine/prompts/devin_eval/assessment/prompt.md +18 -0
- devflow_engine/prompts/idea/api_ideation_agent/prompt.md +8 -0
- devflow_engine/prompts/idea/api_insight_agent/prompt.md +8 -0
- devflow_engine/prompts/idea/response_doctrine/prompt.md +18 -0
- devflow_engine/prompts/implementation/dependency_assessment/prompt.md +12 -0
- devflow_engine/prompts/implementation/green/green/prompt.md +11 -0
- devflow_engine/prompts/implementation/green/node_config/prompt.md +3 -0
- devflow_engine/prompts/implementation/green_review/outcome_review/prompt.md +5 -0
- devflow_engine/prompts/implementation/green_review/prior_run_review/prompt.md +5 -0
- devflow_engine/prompts/implementation/red/prompt.md +27 -0
- devflow_engine/prompts/implementation/redreview/prompt.md +23 -0
- devflow_engine/prompts/implementation/redreview_repair/prompt.md +16 -0
- devflow_engine/prompts/implementation/setupdoc/prompt.md +10 -0
- devflow_engine/prompts/implementation/story_planning/prompt.md +13 -0
- devflow_engine/prompts/implementation/test_design/prompt.md +27 -0
- devflow_engine/prompts/integration/README.md +185 -0
- devflow_engine/prompts/integration/green/example.md +67 -0
- devflow_engine/prompts/integration/green/green/prompt.md +10 -0
- devflow_engine/prompts/integration/green/node_config/prompt.md +42 -0
- devflow_engine/prompts/integration/green/past_prompts/20260417T212300/green/prompt.md +15 -0
- devflow_engine/prompts/integration/green/past_prompts/20260417T212300/node_config/prompt.md +42 -0
- devflow_engine/prompts/integration/green_enrich/example.md +79 -0
- devflow_engine/prompts/integration/green_enrich/green_enrich/prompt.md +9 -0
- devflow_engine/prompts/integration/green_enrich/node_config/prompt.md +41 -0
- devflow_engine/prompts/integration/green_enrich/past_prompts/20260417T212300/green_enrich/prompt.md +14 -0
- devflow_engine/prompts/integration/green_enrich/past_prompts/20260417T212300/node_config/prompt.md +41 -0
- devflow_engine/prompts/integration/red/code_repair/prompt.md +12 -0
- devflow_engine/prompts/integration/red/example.md +152 -0
- devflow_engine/prompts/integration/red/node_config/prompt.md +86 -0
- devflow_engine/prompts/integration/red/past_prompts/20260417T212300/code_repair/prompt.md +19 -0
- devflow_engine/prompts/integration/red/past_prompts/20260417T212300/node_config/prompt.md +84 -0
- devflow_engine/prompts/integration/red/past_prompts/20260417T212300/red/prompt.md +16 -0
- devflow_engine/prompts/integration/red/past_prompts/20260417T212300/red_repair/prompt.md +15 -0
- devflow_engine/prompts/integration/red/past_prompts/20260417T215032/code_repair/prompt.md +10 -0
- devflow_engine/prompts/integration/red/past_prompts/20260417T215032/node_config/prompt.md +84 -0
- devflow_engine/prompts/integration/red/past_prompts/20260417T215032/red_repair/prompt.md +11 -0
- devflow_engine/prompts/integration/red/red/prompt.md +11 -0
- devflow_engine/prompts/integration/red/red_repair/prompt.md +12 -0
- devflow_engine/prompts/integration/red_review/example.md +71 -0
- devflow_engine/prompts/integration/red_review/node_config/prompt.md +41 -0
- devflow_engine/prompts/integration/red_review/past_prompts/20260417T212300/node_config/prompt.md +41 -0
- devflow_engine/prompts/integration/red_review/past_prompts/20260417T212300/red_review/prompt.md +15 -0
- devflow_engine/prompts/integration/red_review/red_review/prompt.md +9 -0
- devflow_engine/prompts/integration/resolve/example.md +111 -0
- devflow_engine/prompts/integration/resolve/node_config/prompt.md +64 -0
- devflow_engine/prompts/integration/resolve/past_prompts/20260417T212300/node_config/prompt.md +64 -0
- devflow_engine/prompts/integration/resolve/past_prompts/20260417T212300/resolve_implicated_users/prompt.md +15 -0
- devflow_engine/prompts/integration/resolve/past_prompts/20260417T212300/resolve_side_effects/prompt.md +15 -0
- devflow_engine/prompts/integration/resolve/resolve_implicated_users/prompt.md +10 -0
- devflow_engine/prompts/integration/resolve/resolve_side_effects/prompt.md +10 -0
- devflow_engine/prompts/integration/validate/build_idea_acceptance_coverage/prompt.md +12 -0
- devflow_engine/prompts/integration/validate/code_repair/prompt.md +13 -0
- devflow_engine/prompts/integration/validate/example.md +143 -0
- devflow_engine/prompts/integration/validate/node_config/prompt.md +87 -0
- devflow_engine/prompts/integration/validate/past_prompts/20260417T212300/code_repair/prompt.md +19 -0
- devflow_engine/prompts/integration/validate/past_prompts/20260417T212300/node_config/prompt.md +67 -0
- devflow_engine/prompts/integration/validate/past_prompts/20260417T212300/validate_enrich_gate/prompt.md +17 -0
- devflow_engine/prompts/integration/validate/past_prompts/20260417T212300/validate_repair/prompt.md +16 -0
- devflow_engine/prompts/integration/validate/past_prompts/20260417T215032/code_repair/prompt.md +10 -0
- devflow_engine/prompts/integration/validate/past_prompts/20260417T215032/node_config/prompt.md +67 -0
- devflow_engine/prompts/integration/validate/past_prompts/20260417T215032/validate_repair/prompt.md +9 -0
- devflow_engine/prompts/integration/validate/validate_enrich_gate/prompt.md +10 -0
- devflow_engine/prompts/integration/validate/validate_repair/prompt.md +20 -0
- devflow_engine/prompts/integration/write_workflows/example.md +100 -0
- devflow_engine/prompts/integration/write_workflows/node_config/prompt.md +44 -0
- devflow_engine/prompts/integration/write_workflows/past_prompts/20260417T212300/node_config/prompt.md +44 -0
- devflow_engine/prompts/integration/write_workflows/past_prompts/20260417T212300/write_workflows/prompt.md +17 -0
- devflow_engine/prompts/integration/write_workflows/write_workflows/prompt.md +11 -0
- devflow_engine/prompts/iterate/README.md +7 -0
- devflow_engine/prompts/iterate/coder/prompt.md +11 -0
- devflow_engine/prompts/iterate/framer/prompt.md +11 -0
- devflow_engine/prompts/iterate/iterator/prompt.md +13 -0
- devflow_engine/prompts/iterate/observer/prompt.md +11 -0
- devflow_engine/prompts/recovery/diagnosis/prompt.md +7 -0
- devflow_engine/prompts/recovery/execution/prompt.md +8 -0
- devflow_engine/prompts/recovery/execution_verification/prompt.md +7 -0
- devflow_engine/prompts/recovery/failure_investigation/prompt.md +10 -0
- devflow_engine/prompts/recovery/preflight_health_repo_repair/prompt.md +8 -0
- devflow_engine/prompts/recovery/remediation_execution/prompt.md +11 -0
- devflow_engine/prompts/recovery/root_cause_investigation/prompt.md +12 -0
- devflow_engine/prompts/scope_idea/doctrine/prompt.md +7 -0
- devflow_engine/prompts/source_doc_eval/document/prompt.md +6 -0
- devflow_engine/prompts/source_doc_eval/targeted_mutation/prompt.md +9 -0
- devflow_engine/prompts/source_doc_mutation/domain_entities/prompt.md +6 -0
- devflow_engine/prompts/source_doc_mutation/product_brief/prompt.md +6 -0
- devflow_engine/prompts/source_doc_mutation/project_doc_coherence/prompt.md +7 -0
- devflow_engine/prompts/source_doc_mutation/project_doc_render/prompt.md +9 -0
- devflow_engine/prompts/source_doc_mutation/source_doc_coherence/prompt.md +5 -0
- devflow_engine/prompts/source_doc_mutation/source_doc_enrichment_coherence/prompt.md +6 -0
- devflow_engine/prompts/source_doc_mutation/user_workflows/prompt.md +6 -0
- devflow_engine/prompts/source_scope/doctrine/prompt.md +10 -0
- devflow_engine/prompts/ui_grounding/doctrine/prompt.md +7 -0
- devflow_engine/recovery/__init__.py +3 -0
- devflow_engine/recovery/dag.py +2609 -0
- devflow_engine/recovery/models.py +220 -0
- devflow_engine/refactor.py +93 -0
- devflow_engine/registry/__init__.py +1 -0
- devflow_engine/registry/cards.py +238 -0
- devflow_engine/registry/domain_normalize.py +60 -0
- devflow_engine/registry/effects.py +65 -0
- devflow_engine/registry/enforce_report.py +150 -0
- devflow_engine/registry/module_cards_classify.py +164 -0
- devflow_engine/registry/module_cards_draft.py +184 -0
- devflow_engine/registry/module_cards_gate.py +59 -0
- devflow_engine/registry/packages.py +347 -0
- devflow_engine/registry/pathways.py +323 -0
- devflow_engine/review/__init__.py +11 -0
- devflow_engine/review/dag.py +588 -0
- devflow_engine/review/review_story.py +67 -0
- devflow_engine/scope_idea/__init__.py +3 -0
- devflow_engine/scope_idea/agentic.py +39 -0
- devflow_engine/scope_idea/dag.py +1069 -0
- devflow_engine/scope_idea/models.py +175 -0
- devflow_engine/skills/builtins/devflow/queue_failure_investigation/SKILL.md +112 -0
- devflow_engine/skills/builtins/devflow/queue_idea_to_story/SKILL.md +120 -0
- devflow_engine/skills/builtins/devflow/queue_integration/SKILL.md +105 -0
- devflow_engine/skills/builtins/devflow/queue_recovery/SKILL.md +108 -0
- devflow_engine/skills/builtins/devflow/queue_runtime_core/SKILL.md +155 -0
- devflow_engine/skills/builtins/devflow/queue_story_implementation/SKILL.md +122 -0
- devflow_engine/skills/builtins/devin/idea_to_story_handoff/SKILL.md +120 -0
- devflow_engine/skills/builtins/devin/ideation/SKILL.md +168 -0
- devflow_engine/skills/builtins/devin/ideation/state-and-phrasing-reference.md +18 -0
- devflow_engine/skills/builtins/devin/insight/SKILL.md +22 -0
- devflow_engine/skills/registry.example.yaml +42 -0
- devflow_engine/source_doc_assumptions.py +291 -0
- devflow_engine/source_doc_mutation_dag.py +1606 -0
- devflow_engine/source_doc_mutation_eval.py +417 -0
- devflow_engine/source_doc_mutation_worker.py +25 -0
- devflow_engine/source_docs_schema.py +207 -0
- devflow_engine/source_docs_updater.py +309 -0
- devflow_engine/source_scope/__init__.py +15 -0
- devflow_engine/source_scope/agentic.py +45 -0
- devflow_engine/source_scope/dag.py +1626 -0
- devflow_engine/source_scope/models.py +177 -0
- devflow_engine/stores/__init__.py +0 -0
- devflow_engine/stores/execution_store.py +3534 -0
- devflow_engine/story/__init__.py +0 -0
- devflow_engine/story/contracts.py +160 -0
- devflow_engine/story/discovery.py +47 -0
- devflow_engine/story/evidence.py +118 -0
- devflow_engine/story/hashing.py +27 -0
- devflow_engine/story/implemented_queue_purge.py +148 -0
- devflow_engine/story/indexer.py +105 -0
- devflow_engine/story/io.py +20 -0
- devflow_engine/story/markdown_contracts.py +298 -0
- devflow_engine/story/reconciliation.py +408 -0
- devflow_engine/story/validate_stories.py +149 -0
- devflow_engine/story/validate_tests_story.py +512 -0
- devflow_engine/story/validation.py +133 -0
- devflow_engine/ui_grounding/__init__.py +11 -0
- devflow_engine/ui_grounding/agentic.py +31 -0
- devflow_engine/ui_grounding/dag.py +874 -0
- devflow_engine/ui_grounding/models.py +224 -0
- devflow_engine/ui_grounding/pencil_bridge.py +247 -0
- devflow_engine/vendor/__init__.py +0 -0
- devflow_engine/vendor/datalumina_genai/__init__.py +11 -0
- devflow_engine/vendor/datalumina_genai/core/__init__.py +0 -0
- devflow_engine/vendor/datalumina_genai/core/exceptions.py +9 -0
- devflow_engine/vendor/datalumina_genai/core/nodes/__init__.py +0 -0
- devflow_engine/vendor/datalumina_genai/core/nodes/agent.py +48 -0
- devflow_engine/vendor/datalumina_genai/core/nodes/agent_streaming_node.py +26 -0
- devflow_engine/vendor/datalumina_genai/core/nodes/base.py +89 -0
- devflow_engine/vendor/datalumina_genai/core/nodes/concurrent.py +30 -0
- devflow_engine/vendor/datalumina_genai/core/nodes/router.py +69 -0
- devflow_engine/vendor/datalumina_genai/core/schema.py +72 -0
- devflow_engine/vendor/datalumina_genai/core/task.py +52 -0
- devflow_engine/vendor/datalumina_genai/core/validate.py +139 -0
- devflow_engine/vendor/datalumina_genai/core/workflow.py +200 -0
- devflow_engine/worker.py +1086 -0
- devflow_engine/worker_guard.py +233 -0
- devflow_engine-1.0.0.dist-info/METADATA +235 -0
- devflow_engine-1.0.0.dist-info/RECORD +393 -0
- devflow_engine-1.0.0.dist-info/WHEEL +4 -0
- devflow_engine-1.0.0.dist-info/entry_points.txt +3 -0
- devin/__init__.py +6 -0
- devin/dag.py +58 -0
- devin/dag_two_arm.py +138 -0
- devin/devin_chat_scenario_catalog.json +588 -0
- devin/devin_eval.py +677 -0
- devin/nodes/__init__.py +0 -0
- devin/nodes/ideation/__init__.py +0 -0
- devin/nodes/ideation/node.py +195 -0
- devin/nodes/ideation/playground.py +267 -0
- devin/nodes/ideation/prompt.md +65 -0
- devin/nodes/ideation/scenarios/continue_refinement.py +13 -0
- devin/nodes/ideation/scenarios/continue_refinement_evals.py +18 -0
- devin/nodes/ideation/scenarios/idea_fits_existing_patterns.py +17 -0
- devin/nodes/ideation/scenarios/idea_fits_existing_patterns_evals.py +16 -0
- devin/nodes/ideation/scenarios/large_idea_split.py +4 -0
- devin/nodes/ideation/scenarios/large_idea_split_evals.py +17 -0
- devin/nodes/ideation/scenarios/source_documentation_added.py +4 -0
- devin/nodes/ideation/scenarios/source_documentation_added_evals.py +16 -0
- devin/nodes/ideation/scenarios/user_says_create_it.py +30 -0
- devin/nodes/ideation/scenarios/user_says_create_it_evals.py +23 -0
- devin/nodes/ideation/scenarios/vague_idea.py +16 -0
- devin/nodes/ideation/scenarios/vague_idea_evals.py +47 -0
- devin/nodes/ideation/tools.json +312 -0
- devin/nodes/insight/__init__.py +0 -0
- devin/nodes/insight/node.py +49 -0
- devin/nodes/insight/playground.py +154 -0
- devin/nodes/insight/prompt.md +61 -0
- devin/nodes/insight/scenarios/architecture_pattern_query.py +15 -0
- devin/nodes/insight/scenarios/architecture_pattern_query_evals.py +25 -0
- devin/nodes/insight/scenarios/codebase_exploration.py +15 -0
- devin/nodes/insight/scenarios/codebase_exploration_evals.py +23 -0
- devin/nodes/insight/scenarios/devin_ideation_routing.py +19 -0
- devin/nodes/insight/scenarios/devin_ideation_routing_evals.py +39 -0
- devin/nodes/insight/scenarios/devin_insight_routing.py +20 -0
- devin/nodes/insight/scenarios/devin_insight_routing_evals.py +40 -0
- devin/nodes/insight/scenarios/operational_debugging.py +15 -0
- devin/nodes/insight/scenarios/operational_debugging_evals.py +23 -0
- devin/nodes/insight/scenarios/operational_question.py +9 -0
- devin/nodes/insight/scenarios/operational_question_evals.py +8 -0
- devin/nodes/insight/scenarios/queue_status.py +15 -0
- devin/nodes/insight/scenarios/queue_status_evals.py +23 -0
- devin/nodes/insight/scenarios/source_doc_explanation.py +14 -0
- devin/nodes/insight/scenarios/source_doc_explanation_evals.py +21 -0
- devin/nodes/insight/scenarios/worker_state_check.py +15 -0
- devin/nodes/insight/scenarios/worker_state_check_evals.py +22 -0
- devin/nodes/insight/tools.json +126 -0
- devin/nodes/intake/__init__.py +0 -0
- devin/nodes/intake/node.py +27 -0
- devin/nodes/intake/playground.py +47 -0
- devin/nodes/intake/prompt.md +12 -0
- devin/nodes/intake/scenarios/ideation_routing.py +4 -0
- devin/nodes/intake/scenarios/ideation_routing_evals.py +5 -0
- devin/nodes/intake/scenarios/insight_routing.py +4 -0
- devin/nodes/intake/scenarios/insight_routing_evals.py +5 -0
- devin/nodes/iterate/README.md +44 -0
- devin/nodes/iterate/__init__.py +1 -0
- devin/nodes/iterate/_archived_design_stages/01-objectives-requirements.md +112 -0
- devin/nodes/iterate/_archived_design_stages/02-evals.md +131 -0
- devin/nodes/iterate/_archived_design_stages/03-tools-and-boundaries.md +110 -0
- devin/nodes/iterate/_archived_design_stages/04-harness-and-playground.md +32 -0
- devin/nodes/iterate/_archived_design_stages/05-prompt-deferred.md +11 -0
- devin/nodes/iterate/_archived_design_stages/coder_agent_design/01-objectives-requirements.md +20 -0
- devin/nodes/iterate/_archived_design_stages/coder_agent_design/02-evals.md +8 -0
- devin/nodes/iterate/_archived_design_stages/coder_agent_design/03-tools-and-boundaries.md +14 -0
- devin/nodes/iterate/_archived_design_stages/coder_agent_design/04-harness-and-playground.md +12 -0
- devin/nodes/iterate/_archived_design_stages/framer_agent_design/01-objectives-requirements.md +20 -0
- devin/nodes/iterate/_archived_design_stages/framer_agent_design/02-evals.md +8 -0
- devin/nodes/iterate/_archived_design_stages/framer_agent_design/03-tools-and-boundaries.md +13 -0
- devin/nodes/iterate/_archived_design_stages/framer_agent_design/04-harness-and-playground.md +12 -0
- devin/nodes/iterate/_archived_design_stages/iterator_agent_design/01-objectives-requirements.md +25 -0
- devin/nodes/iterate/_archived_design_stages/iterator_agent_design/02-evals.md +9 -0
- devin/nodes/iterate/_archived_design_stages/iterator_agent_design/03-tools-and-boundaries.md +14 -0
- devin/nodes/iterate/_archived_design_stages/iterator_agent_design/04-harness-and-playground.md +12 -0
- devin/nodes/iterate/_archived_design_stages/observer_agent_design/01-objectives-requirements.md +20 -0
- devin/nodes/iterate/_archived_design_stages/observer_agent_design/02-evals.md +8 -0
- devin/nodes/iterate/_archived_design_stages/observer_agent_design/03-tools-and-boundaries.md +14 -0
- devin/nodes/iterate/_archived_design_stages/observer_agent_design/04-harness-and-playground.md +13 -0
- devin/nodes/iterate/agent-roles.md +89 -0
- devin/nodes/iterate/agents/README.md +10 -0
- devin/nodes/iterate/artifacts.md +504 -0
- devin/nodes/iterate/contract.md +100 -0
- devin/nodes/iterate/eval-plan.md +74 -0
- devin/nodes/iterate/node.py +100 -0
- devin/nodes/iterate/pipeline/README.md +13 -0
- devin/nodes/iterate/playground-contract.md +76 -0
- devin/nodes/iterate/prompt.md +11 -0
- devin/nodes/iterate/scenarios/README.md +38 -0
- devin/nodes/iterate/scenarios/artifact-and-loop-scenarios.md +101 -0
- devin/nodes/iterate/scenarios/coder_artifact_alignment.py +32 -0
- devin/nodes/iterate/scenarios/coder_artifact_alignment_evals.py +45 -0
- devin/nodes/iterate/scenarios/coder_bounded_fix.py +27 -0
- devin/nodes/iterate/scenarios/coder_bounded_fix_evals.py +45 -0
- devin/nodes/iterate/scenarios/devin_iterate_routing.py +21 -0
- devin/nodes/iterate/scenarios/devin_iterate_routing_evals.py +36 -0
- devin/nodes/iterate/scenarios/framer_scope_boundary.py +25 -0
- devin/nodes/iterate/scenarios/framer_scope_boundary_evals.py +57 -0
- devin/nodes/iterate/scenarios/framer_task_framing.py +25 -0
- devin/nodes/iterate/scenarios/framer_task_framing_evals.py +58 -0
- devin/nodes/iterate/scenarios/iterate_error_fix.py +21 -0
- devin/nodes/iterate/scenarios/iterate_error_fix_evals.py +39 -0
- devin/nodes/iterate/scenarios/iterate_quick_change.py +21 -0
- devin/nodes/iterate/scenarios/iterate_quick_change_evals.py +35 -0
- devin/nodes/iterate/scenarios/iterate_to_idea_promotion.py +23 -0
- devin/nodes/iterate/scenarios/iterate_to_idea_promotion_evals.py +53 -0
- devin/nodes/iterate/scenarios/iterate_to_insight_reroute.py +23 -0
- devin/nodes/iterate/scenarios/iterate_to_insight_reroute_evals.py +53 -0
- devin/nodes/iterate/scenarios/observer_evidence_seam.py +28 -0
- devin/nodes/iterate/scenarios/observer_evidence_seam_evals.py +55 -0
- devin/nodes/iterate/scenarios/observer_repro_creation.py +28 -0
- devin/nodes/iterate/scenarios/observer_repro_creation_evals.py +45 -0
- devin/nodes/iterate/scenarios/routing-matrix.md +45 -0
- devin/nodes/shared/__init__.py +0 -0
- devin/nodes/shared/filemaker_expert.md +80 -0
- devin/nodes/shared/filemaker_expert.py +354 -0
- devin/nodes/shared/filemaker_expert_eval/runner.py +176 -0
- devin/nodes/shared/filemaker_expert_eval/scenarios.json +65 -0
- devin/nodes/shared/goldilocks_advisor_eval/runner.py +214 -0
- devin/nodes/shared/goldilocks_advisor_eval/scenarios.json +58 -0
- devin/nodes/shared/helpers.py +156 -0
- devin/nodes/shared/idea_compliance_advisor_eval/runner.py +252 -0
- devin/nodes/shared/idea_compliance_advisor_eval/scenarios.json +75 -0
- devin/nodes/shared/models.py +44 -0
- devin/nodes/shared/post.py +40 -0
- devin/nodes/shared/router.py +107 -0
- devin/nodes/shared/tools.py +191 -0
- devin/shared/devin-chat-rubric.md +237 -0
- devin/shared/devin-chat-scenario-suite.md +90 -0
- devin/shared/eval_doctrine.md +9 -0
|
@@ -0,0 +1,1292 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import asyncio
|
|
4
|
+
import base64
|
|
5
|
+
import binascii
|
|
6
|
+
import io
|
|
7
|
+
import json
|
|
8
|
+
import logging
|
|
9
|
+
import os
|
|
10
|
+
import shutil
|
|
11
|
+
import subprocess
|
|
12
|
+
import threading
|
|
13
|
+
import time
|
|
14
|
+
import zlib
|
|
15
|
+
from contextlib import redirect_stderr, redirect_stdout, suppress
|
|
16
|
+
from dataclasses import dataclass
|
|
17
|
+
from datetime import UTC, datetime
|
|
18
|
+
from pathlib import Path
|
|
19
|
+
from typing import Any, Callable
|
|
20
|
+
from urllib.parse import quote
|
|
21
|
+
|
|
22
|
+
import typer
|
|
23
|
+
|
|
24
|
+
from .api_keys import (
|
|
25
|
+
DevflowTransportGrantStore,
|
|
26
|
+
bootstrap_provider_api_keys,
|
|
27
|
+
set_runtime_provider_api_key,
|
|
28
|
+
store_provider_api_key,
|
|
29
|
+
unwrap_api_key_event_payload,
|
|
30
|
+
)
|
|
31
|
+
from .devflow_state import _postgrest_request, _resolve_supabase_rest_config
|
|
32
|
+
|
|
33
|
+
logger = logging.getLogger(__name__)
|
|
34
|
+
from devin.dag_two_arm import run_devin_two_arm_dag as run_devin_chat_dag
|
|
35
|
+
from devflow_engine.idea.response_mode import current_response_mode_label
|
|
36
|
+
from devflow_engine.idea.traditional_stories import TraditionalStoryInsufficiencyError, generate_traditional_user_story_set
|
|
37
|
+
from .project_registry import normalize_github_selector, read_projects_registry
|
|
38
|
+
from .source_scope.dag import run_source_to_scope_dag
|
|
39
|
+
from .stores.execution_store import ExecutionStore
|
|
40
|
+
|
|
41
|
+
PRIMARY_DEVFLOW_EVENT_TYPES = {
|
|
42
|
+
"devflow.project.init.request": "project_init",
|
|
43
|
+
"devflow.project.import.request": "project_import",
|
|
44
|
+
"devflow.project.repo_tree.request": "project_repo_tree",
|
|
45
|
+
"devflow.project.source_docs.add.request": "source_docs_add",
|
|
46
|
+
"devflow.source_scope.run.request": "source_scope_run",
|
|
47
|
+
"devflow.idea.intake.request": "idea_intake",
|
|
48
|
+
"devflow.idea.stories.generate.request": "idea_stories_generate",
|
|
49
|
+
"devflow_settings_changed": "settings_changed",
|
|
50
|
+
"devflow_API_KEY": "api_key",
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
_ALREADY_REGISTERED_TOKENS = (
|
|
54
|
+
"already registered",
|
|
55
|
+
"already exists",
|
|
56
|
+
"duplicate",
|
|
57
|
+
"already imported",
|
|
58
|
+
)
|
|
59
|
+
|
|
60
|
+
_REPO_TREE_EXCLUDED_DIR_NAMES = {".git", ".devflow", "node_modules", ".venv", "__pycache__"}
|
|
61
|
+
_SECRET_SETTING_TOKENS = ("key", "token", "secret", "password")
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
@dataclass(frozen=True)
|
|
65
|
+
class DevflowDispatchResult:
|
|
66
|
+
event_id: str
|
|
67
|
+
execution_run_id: int
|
|
68
|
+
workflow_key: str
|
|
69
|
+
command: list[str]
|
|
70
|
+
result: dict[str, Any]
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
@dataclass(frozen=True)
|
|
74
|
+
class DevflowEventWorkerLoopResult:
|
|
75
|
+
processed: int = 0
|
|
76
|
+
failed: int = 0
|
|
77
|
+
idle: bool = False
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
@dataclass(frozen=True)
|
|
81
|
+
class _CliInvocationResult:
|
|
82
|
+
returncode: int
|
|
83
|
+
stdout: str
|
|
84
|
+
stderr: str
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
def _extract_project_id_from_cli_output(output: str) -> str | None:
|
|
88
|
+
for line in output.splitlines():
|
|
89
|
+
if not line.lower().startswith("project_id:"):
|
|
90
|
+
continue
|
|
91
|
+
value = line.split(":", 1)[1].strip()
|
|
92
|
+
if value:
|
|
93
|
+
return value
|
|
94
|
+
return None
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
def _utcnow_iso() -> str:
|
|
98
|
+
return datetime.now(UTC).isoformat()
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
def _project_metadata(project: dict[str, Any]) -> dict[str, Any]:
|
|
102
|
+
metadata = project.get("metadata")
|
|
103
|
+
if isinstance(metadata, dict):
|
|
104
|
+
return dict(metadata)
|
|
105
|
+
metadata = project.get("metadata_json")
|
|
106
|
+
if isinstance(metadata, dict):
|
|
107
|
+
return dict(metadata)
|
|
108
|
+
return {}
|
|
109
|
+
|
|
110
|
+
|
|
111
|
+
def _normalize_planes(raw: Any) -> list[str]:
|
|
112
|
+
if isinstance(raw, str):
|
|
113
|
+
return [item.strip() for item in raw.split(",") if item.strip()]
|
|
114
|
+
if isinstance(raw, list):
|
|
115
|
+
return [str(item).strip() for item in raw if str(item).strip()]
|
|
116
|
+
return []
|
|
117
|
+
|
|
118
|
+
|
|
119
|
+
def _result_message(exc: TraditionalStoryInsufficiencyError) -> str:
|
|
120
|
+
report = exc.report
|
|
121
|
+
return json.dumps(
|
|
122
|
+
{
|
|
123
|
+
"error": "traditional story set insufficient after refinement loop",
|
|
124
|
+
"story_set_id": exc.story_set_id,
|
|
125
|
+
"stories_dir": str(exc.root),
|
|
126
|
+
"report_path": str(exc.report_path),
|
|
127
|
+
"pass_count": report.get("pass_count"),
|
|
128
|
+
"passed": report.get("passed"),
|
|
129
|
+
"final_findings": report.get("final_findings"),
|
|
130
|
+
},
|
|
131
|
+
sort_keys=True,
|
|
132
|
+
)
|
|
133
|
+
|
|
134
|
+
|
|
135
|
+
class DevflowEventWorkerService:
|
|
136
|
+
def __init__(self, *, worker_id: str = "devflow-engine-supabase-event-worker") -> None:
|
|
137
|
+
self.worker_id = worker_id
|
|
138
|
+
bootstrap_provider_api_keys()
|
|
139
|
+
|
|
140
|
+
def fetch_next_requested_event(self) -> dict[str, Any] | None:
|
|
141
|
+
events = self._fetch_candidate_events()
|
|
142
|
+
for event in events:
|
|
143
|
+
event_type = str(event.get("event_type") or "")
|
|
144
|
+
workflow_key = PRIMARY_DEVFLOW_EVENT_TYPES.get(event_type)
|
|
145
|
+
if workflow_key is None:
|
|
146
|
+
continue
|
|
147
|
+
claimed = self._claim_event(event_id=str(event["id"]), workflow_key=workflow_key)
|
|
148
|
+
if claimed is not None:
|
|
149
|
+
return claimed
|
|
150
|
+
return None
|
|
151
|
+
|
|
152
|
+
def dispatch_next_event(self) -> DevflowDispatchResult | None:
|
|
153
|
+
event = self.fetch_next_requested_event()
|
|
154
|
+
if event is None:
|
|
155
|
+
return None
|
|
156
|
+
project = self._fetch_project(project_id=str(event["project_id"]))
|
|
157
|
+
return self.dispatch_event(event=event, project=project)
|
|
158
|
+
|
|
159
|
+
def dispatch_event(self, *, event: dict[str, Any], project: dict[str, Any]) -> DevflowDispatchResult:
|
|
160
|
+
event_id = str(event["id"])
|
|
161
|
+
workflow_key = PRIMARY_DEVFLOW_EVENT_TYPES.get(str(event.get("event_type") or ""))
|
|
162
|
+
if workflow_key is None:
|
|
163
|
+
raise RuntimeError(f"Unsupported DevFlow event type: {event.get('event_type')}")
|
|
164
|
+
|
|
165
|
+
company_payload = self._company_payload(project=project, event=event)
|
|
166
|
+
started_at = _utcnow_iso()
|
|
167
|
+
command: list[str] = []
|
|
168
|
+
run = self._insert_execution_run(
|
|
169
|
+
{
|
|
170
|
+
"event_id": event_id,
|
|
171
|
+
"project_id": str(project["id"]),
|
|
172
|
+
"workflow_key": workflow_key,
|
|
173
|
+
"status": "processing",
|
|
174
|
+
"worker_id": self.worker_id,
|
|
175
|
+
"command": command,
|
|
176
|
+
"company_payload": company_payload,
|
|
177
|
+
"started_at": started_at,
|
|
178
|
+
"last_heartbeat_at": started_at,
|
|
179
|
+
"summary": f"Dispatching {workflow_key}",
|
|
180
|
+
}
|
|
181
|
+
)
|
|
182
|
+
execution_run_id = int(run["id"])
|
|
183
|
+
step = self._insert_execution_step_run(
|
|
184
|
+
{
|
|
185
|
+
"execution_run_id": execution_run_id,
|
|
186
|
+
"step_name": "dispatch_devflow_engine",
|
|
187
|
+
"status": "processing",
|
|
188
|
+
"command": command,
|
|
189
|
+
"started_at": started_at,
|
|
190
|
+
}
|
|
191
|
+
)
|
|
192
|
+
step_id = int(step["id"])
|
|
193
|
+
|
|
194
|
+
self._update_event(
|
|
195
|
+
event_id=event_id,
|
|
196
|
+
fields={
|
|
197
|
+
"run_id": str(execution_run_id),
|
|
198
|
+
"status": "processing",
|
|
199
|
+
"stage": workflow_key,
|
|
200
|
+
"error": None,
|
|
201
|
+
"producer": self.worker_id,
|
|
202
|
+
},
|
|
203
|
+
)
|
|
204
|
+
|
|
205
|
+
try:
|
|
206
|
+
command = self._build_command(event=event, project=project)
|
|
207
|
+
self._update_execution_step_run(step_id=step_id, fields={"command": command})
|
|
208
|
+
self._update_execution_run(execution_run_id=execution_run_id, fields={"command": command})
|
|
209
|
+
result = self._execute_event(event=event, project=project)
|
|
210
|
+
except Exception as exc:
|
|
211
|
+
finished_at = _utcnow_iso()
|
|
212
|
+
error_message = str(exc)
|
|
213
|
+
self._update_execution_step_run(
|
|
214
|
+
step_id=step_id,
|
|
215
|
+
fields={
|
|
216
|
+
"status": "failed",
|
|
217
|
+
"finished_at": finished_at,
|
|
218
|
+
"error": error_message,
|
|
219
|
+
"result": {"error": error_message},
|
|
220
|
+
},
|
|
221
|
+
)
|
|
222
|
+
self._update_execution_run(
|
|
223
|
+
execution_run_id=execution_run_id,
|
|
224
|
+
fields={
|
|
225
|
+
"status": "failed",
|
|
226
|
+
"finished_at": finished_at,
|
|
227
|
+
"last_heartbeat_at": finished_at,
|
|
228
|
+
"error": error_message,
|
|
229
|
+
"result": {"error": error_message},
|
|
230
|
+
},
|
|
231
|
+
)
|
|
232
|
+
self._update_event(
|
|
233
|
+
event_id=event_id,
|
|
234
|
+
fields={
|
|
235
|
+
"status": "error",
|
|
236
|
+
"stage": workflow_key,
|
|
237
|
+
"error": error_message,
|
|
238
|
+
"producer": self.worker_id,
|
|
239
|
+
"payload": self._merge_event_payload(
|
|
240
|
+
event=event,
|
|
241
|
+
extra_payload={
|
|
242
|
+
"workflow_key": workflow_key,
|
|
243
|
+
"dispatch_command": command,
|
|
244
|
+
"execution_result": {"error": error_message},
|
|
245
|
+
},
|
|
246
|
+
),
|
|
247
|
+
},
|
|
248
|
+
)
|
|
249
|
+
raise
|
|
250
|
+
|
|
251
|
+
finished_at = _utcnow_iso()
|
|
252
|
+
local_repo_root = result.get("local_repo_root")
|
|
253
|
+
if isinstance(local_repo_root, str) and local_repo_root.strip():
|
|
254
|
+
self._update_project(
|
|
255
|
+
project_id=str(project["id"]),
|
|
256
|
+
fields={"devflow_repo_root": local_repo_root.strip()},
|
|
257
|
+
)
|
|
258
|
+
|
|
259
|
+
self._update_execution_step_run(
|
|
260
|
+
step_id=step_id,
|
|
261
|
+
fields={
|
|
262
|
+
"status": "completed",
|
|
263
|
+
"finished_at": finished_at,
|
|
264
|
+
"result": result,
|
|
265
|
+
},
|
|
266
|
+
)
|
|
267
|
+
self._update_execution_run(
|
|
268
|
+
execution_run_id=execution_run_id,
|
|
269
|
+
fields={
|
|
270
|
+
"status": "completed",
|
|
271
|
+
"finished_at": finished_at,
|
|
272
|
+
"last_heartbeat_at": finished_at,
|
|
273
|
+
"summary": f"Completed {workflow_key}" + (" (already registered)" if result.get("already_registered") else ""),
|
|
274
|
+
"result": result,
|
|
275
|
+
"error": None,
|
|
276
|
+
},
|
|
277
|
+
)
|
|
278
|
+
self._update_event(
|
|
279
|
+
event_id=event_id,
|
|
280
|
+
fields={
|
|
281
|
+
"status": "completed",
|
|
282
|
+
"stage": workflow_key,
|
|
283
|
+
"error": None,
|
|
284
|
+
"producer": self.worker_id,
|
|
285
|
+
"payload": self._merge_event_payload(
|
|
286
|
+
event=event,
|
|
287
|
+
extra_payload={
|
|
288
|
+
"workflow_key": workflow_key,
|
|
289
|
+
"dispatch_command": command,
|
|
290
|
+
"execution_result": result,
|
|
291
|
+
},
|
|
292
|
+
),
|
|
293
|
+
},
|
|
294
|
+
)
|
|
295
|
+
|
|
296
|
+
return DevflowDispatchResult(
|
|
297
|
+
event_id=event_id,
|
|
298
|
+
execution_run_id=execution_run_id,
|
|
299
|
+
workflow_key=workflow_key,
|
|
300
|
+
command=command,
|
|
301
|
+
result=result,
|
|
302
|
+
)
|
|
303
|
+
|
|
304
|
+
def _execute_event(self, *, event: dict[str, Any], project: dict[str, Any]) -> dict[str, Any]:
|
|
305
|
+
event_type = str(event.get("event_type") or "")
|
|
306
|
+
if event_type == "devflow.project.init.request":
|
|
307
|
+
return self._run_project_init_request(event=event, project=project)
|
|
308
|
+
if event_type == "devflow.project.import.request":
|
|
309
|
+
return self._run_project_import_request(event=event, project=project)
|
|
310
|
+
if event_type == "devflow.project.repo_tree.request":
|
|
311
|
+
return self._run_project_repo_tree_request(event=event, project=project)
|
|
312
|
+
if event_type == "devflow.project.source_docs.add.request":
|
|
313
|
+
return self._run_project_source_docs_add_request(event=event, project=project)
|
|
314
|
+
if event_type == "devflow.source_scope.run.request":
|
|
315
|
+
return self._run_source_scope_request(event=event, project=project)
|
|
316
|
+
if event_type == "devflow.idea.intake.request":
|
|
317
|
+
return self._run_idea_intake_request(event=event, project=project)
|
|
318
|
+
if event_type == "devflow.idea.stories.generate.request":
|
|
319
|
+
return self._run_idea_stories_generate_request(event=event, project=project)
|
|
320
|
+
if event_type == "devflow_settings_changed":
|
|
321
|
+
return self._run_settings_changed_request(event=event, project=project)
|
|
322
|
+
if event_type == "devflow_API_KEY":
|
|
323
|
+
return self._run_api_key_event(event=event, project=project)
|
|
324
|
+
raise RuntimeError(f"Unsupported DevFlow event type: {event_type}")
|
|
325
|
+
|
|
326
|
+
def _run_project_init_request(self, *, event: dict[str, Any], project: dict[str, Any]) -> dict[str, Any]:
|
|
327
|
+
payload = dict(event.get("payload") or {})
|
|
328
|
+
repo_root = self._resolve_local_repo_root(project=project, event=event)
|
|
329
|
+
create_new = self._is_new_project_init_request(payload=payload)
|
|
330
|
+
if repo_root is None and not create_new:
|
|
331
|
+
raise RuntimeError("project init event requires an existing local_repo_root/workspace_path")
|
|
332
|
+
name = str(payload.get("name") or project.get("name") or "").strip()
|
|
333
|
+
if not name:
|
|
334
|
+
raise RuntimeError("project init event requires project name")
|
|
335
|
+
command_result = self._invoke_cli_command(
|
|
336
|
+
cwd=(repo_root or Path.cwd()),
|
|
337
|
+
func_name="project_init",
|
|
338
|
+
kwargs={
|
|
339
|
+
"name": name,
|
|
340
|
+
"new": create_new,
|
|
341
|
+
"template": payload.get("template"),
|
|
342
|
+
"project_type": payload.get("project_type"),
|
|
343
|
+
},
|
|
344
|
+
)
|
|
345
|
+
if command_result.returncode != 0:
|
|
346
|
+
raise RuntimeError(command_result.stderr or command_result.stdout or "project init failed")
|
|
347
|
+
if repo_root is None:
|
|
348
|
+
repo_root = self._resolve_new_project_repo_root(name=name)
|
|
349
|
+
return {
|
|
350
|
+
"returncode": 0,
|
|
351
|
+
"stdout": command_result.stdout,
|
|
352
|
+
"stderr": command_result.stderr,
|
|
353
|
+
"local_repo_root": str(repo_root),
|
|
354
|
+
}
|
|
355
|
+
|
|
356
|
+
def _run_project_import_request(self, *, event: dict[str, Any], project: dict[str, Any]) -> dict[str, Any]:
|
|
357
|
+
payload = dict(event.get("payload") or {})
|
|
358
|
+
repo_spec = self._project_import_repo_spec(payload=payload, project=project)
|
|
359
|
+
authoritative_project_id = str(project.get("id") or "").strip() or str(event.get("project_id") or "").strip()
|
|
360
|
+
command_result = self._invoke_cli_command(
|
|
361
|
+
cwd=Path.cwd(),
|
|
362
|
+
func_name="project_import",
|
|
363
|
+
kwargs={
|
|
364
|
+
"repo_spec": repo_spec,
|
|
365
|
+
"init": payload.get("init"),
|
|
366
|
+
"supabase_project_id": authoritative_project_id or None,
|
|
367
|
+
},
|
|
368
|
+
)
|
|
369
|
+
combined_output = f"{command_result.stdout}\n{command_result.stderr}".lower()
|
|
370
|
+
already_registered = command_result.returncode != 0 and any(token in combined_output for token in _ALREADY_REGISTERED_TOKENS)
|
|
371
|
+
if command_result.returncode != 0 and not already_registered:
|
|
372
|
+
raise RuntimeError(command_result.stderr or command_result.stdout or "project import failed")
|
|
373
|
+
reported_project_id = _extract_project_id_from_cli_output(command_result.stdout)
|
|
374
|
+
if authoritative_project_id and reported_project_id and reported_project_id != authoritative_project_id:
|
|
375
|
+
raise RuntimeError(
|
|
376
|
+
"project import returned mismatched project_id "
|
|
377
|
+
f"(authoritative={authoritative_project_id}, reported={reported_project_id})"
|
|
378
|
+
)
|
|
379
|
+
local_repo_root = self._resolve_imported_repo_root(project=project, event=event, repo_spec=repo_spec, init_template=payload.get("init"))
|
|
380
|
+
return {
|
|
381
|
+
"returncode": 0 if already_registered else command_result.returncode,
|
|
382
|
+
"project_id": authoritative_project_id or reported_project_id,
|
|
383
|
+
"stdout": command_result.stdout,
|
|
384
|
+
"stderr": command_result.stderr,
|
|
385
|
+
"already_registered": already_registered,
|
|
386
|
+
"local_repo_root": None if local_repo_root is None else str(local_repo_root),
|
|
387
|
+
}
|
|
388
|
+
|
|
389
|
+
def _run_project_repo_tree_request(self, *, event: dict[str, Any], project: dict[str, Any]) -> dict[str, Any]:
|
|
390
|
+
repo_root = self._require_repo_root(project=project, event=event)
|
|
391
|
+
entries: list[dict[str, str]] = []
|
|
392
|
+
for current_root, dirnames, filenames in os.walk(repo_root):
|
|
393
|
+
dirnames[:] = [name for name in dirnames if name not in _REPO_TREE_EXCLUDED_DIR_NAMES]
|
|
394
|
+
current_path = Path(current_root)
|
|
395
|
+
for filename in sorted(filenames):
|
|
396
|
+
path = (current_path / filename).resolve()
|
|
397
|
+
rel = path.relative_to(repo_root).as_posix()
|
|
398
|
+
entries.append({"path": rel, "type": "file"})
|
|
399
|
+
return {
|
|
400
|
+
"entry_count": len(entries),
|
|
401
|
+
"entries": entries,
|
|
402
|
+
"local_repo_root": str(repo_root),
|
|
403
|
+
}
|
|
404
|
+
|
|
405
|
+
def _run_project_source_docs_add_request(self, *, event: dict[str, Any], project: dict[str, Any]) -> dict[str, Any]:
|
|
406
|
+
payload = dict(event.get("payload") or {})
|
|
407
|
+
repo_root = self._require_repo_root(project=project, event=event)
|
|
408
|
+
source_mode = str(payload.get("source_mode") or "").strip().lower()
|
|
409
|
+
if source_mode not in {"filesystem", "repo"}:
|
|
410
|
+
raise RuntimeError("source docs add event requires source_mode of 'filesystem' or 'repo'")
|
|
411
|
+
|
|
412
|
+
source_docs_root = repo_root / "ai_docs" / "context" / "source_docs"
|
|
413
|
+
source_docs_root.mkdir(parents=True, exist_ok=True)
|
|
414
|
+
added_files: list[str] = []
|
|
415
|
+
|
|
416
|
+
if source_mode == "repo":
|
|
417
|
+
repo_paths = [str(item).strip() for item in (payload.get("repo_paths") or []) if str(item).strip()]
|
|
418
|
+
if not repo_paths:
|
|
419
|
+
raise RuntimeError("source docs add event requires repo_paths when source_mode=repo")
|
|
420
|
+
for rel in repo_paths:
|
|
421
|
+
source_path = self._resolve_repo_relative_path(base=repo_root, raw=rel)
|
|
422
|
+
if not source_path.exists() or not source_path.is_file():
|
|
423
|
+
raise RuntimeError(f"Selected repo file does not exist: {rel}")
|
|
424
|
+
destination_path = self._resolve_repo_relative_path(base=source_docs_root, raw=rel)
|
|
425
|
+
if source_path == destination_path:
|
|
426
|
+
added_files.append(destination_path.relative_to(repo_root).as_posix())
|
|
427
|
+
continue
|
|
428
|
+
destination_path.parent.mkdir(parents=True, exist_ok=True)
|
|
429
|
+
shutil.copy2(source_path, destination_path)
|
|
430
|
+
added_files.append(destination_path.relative_to(repo_root).as_posix())
|
|
431
|
+
else:
|
|
432
|
+
files = payload.get("files") or []
|
|
433
|
+
if not isinstance(files, list) or not files:
|
|
434
|
+
raise RuntimeError("source docs add event requires files when source_mode=filesystem")
|
|
435
|
+
for item in files:
|
|
436
|
+
if not isinstance(item, dict):
|
|
437
|
+
raise RuntimeError("source docs add event files entries must be objects")
|
|
438
|
+
name = str(item.get("name") or "").strip()
|
|
439
|
+
encoded = str(item.get("content_base64") or "")
|
|
440
|
+
if not name or not encoded:
|
|
441
|
+
raise RuntimeError("source docs add event filesystem files require name and content_base64")
|
|
442
|
+
try:
|
|
443
|
+
raw_bytes = base64.b64decode(encoded, validate=True)
|
|
444
|
+
except (ValueError, binascii.Error) as exc:
|
|
445
|
+
raise RuntimeError(f"Invalid base64 payload for source doc file {name}") from exc
|
|
446
|
+
# Decompress gzip if the file was compressed before upload
|
|
447
|
+
compression = str(item.get("compression") or "").lower()
|
|
448
|
+
if compression == "gzip":
|
|
449
|
+
try:
|
|
450
|
+
raw_bytes = zlib.decompress(raw_bytes, 16 + zlib.MAX_WBITS)
|
|
451
|
+
except Exception as exc:
|
|
452
|
+
raise RuntimeError(f"Failed to decompress gzip payload for {name}: {exc}") from exc
|
|
453
|
+
destination_path = self._resolve_repo_relative_path(base=source_docs_root, raw=Path(name).name)
|
|
454
|
+
destination_path.parent.mkdir(parents=True, exist_ok=True)
|
|
455
|
+
destination_path.write_bytes(raw_bytes)
|
|
456
|
+
added_files.append(destination_path.relative_to(repo_root).as_posix())
|
|
457
|
+
|
|
458
|
+
# After saving, check for DDR (FileMaker DDR XML) files and run ddr-docs for each
|
|
459
|
+
ddr_results: list[dict[str, Any]] = []
|
|
460
|
+
xml_files = [f for f in added_files if f.lower().endswith(".xml")]
|
|
461
|
+
key_layout_name = payload.get("key_layout_layout_name")
|
|
462
|
+
for xml_file in xml_files:
|
|
463
|
+
ddr_input_path = repo_root / xml_file
|
|
464
|
+
# Use the XML filename (without extension) as the output subdirectory name
|
|
465
|
+
ddr_name = Path(xml_file).stem
|
|
466
|
+
ddr_output_dir = repo_root / "ai_docs" / "context" / "source_docs" / "ddr" / ddr_name
|
|
467
|
+
try:
|
|
468
|
+
cmd = [
|
|
469
|
+
"/Users/devflow/repos/FM2Web/.agent-venv/bin/ddr-docs",
|
|
470
|
+
"analyze", "full-analysis",
|
|
471
|
+
"--input", str(ddr_input_path),
|
|
472
|
+
"--output-dir", str(ddr_output_dir),
|
|
473
|
+
"--journey-depth", "5",
|
|
474
|
+
]
|
|
475
|
+
if key_layout_name:
|
|
476
|
+
cmd.extend(["--entry-point-layout", str(key_layout_name)])
|
|
477
|
+
result = subprocess.run(
|
|
478
|
+
cmd,
|
|
479
|
+
capture_output=True,
|
|
480
|
+
text=True,
|
|
481
|
+
timeout=300,
|
|
482
|
+
env={**os.environ, "PATH": "/usr/local/bin:/usr/bin:/bin"},
|
|
483
|
+
)
|
|
484
|
+
if result.returncode == 0:
|
|
485
|
+
ddr_results.append({
|
|
486
|
+
"status": "success",
|
|
487
|
+
"input_file": xml_file,
|
|
488
|
+
"output_dir": str(ddr_output_dir),
|
|
489
|
+
"key_layout": key_layout_name or None,
|
|
490
|
+
"stdout": result.stdout[-500:] if result.stdout else "",
|
|
491
|
+
})
|
|
492
|
+
else:
|
|
493
|
+
ddr_results.append({
|
|
494
|
+
"status": "failed",
|
|
495
|
+
"input_file": xml_file,
|
|
496
|
+
"key_layout": key_layout_name or None,
|
|
497
|
+
"error": result.stderr[-500:] if result.stderr else result.stdout[-500:],
|
|
498
|
+
})
|
|
499
|
+
except Exception as exc:
|
|
500
|
+
ddr_results.append({
|
|
501
|
+
"status": "error",
|
|
502
|
+
"input_file": xml_file,
|
|
503
|
+
"key_layout": key_layout_name or None,
|
|
504
|
+
"error": str(exc),
|
|
505
|
+
})
|
|
506
|
+
|
|
507
|
+
result_data = {
|
|
508
|
+
"source_mode": source_mode,
|
|
509
|
+
"added_count": len(added_files),
|
|
510
|
+
"added_files": added_files,
|
|
511
|
+
"source_docs_root": str(source_docs_root),
|
|
512
|
+
"local_repo_root": str(repo_root),
|
|
513
|
+
}
|
|
514
|
+
if ddr_results:
|
|
515
|
+
result_data["ddr_analysis"] = ddr_results if len(ddr_results) > 1 else ddr_results[0]
|
|
516
|
+
return result_data
|
|
517
|
+
|
|
518
|
+
|
|
519
|
+
def _run_source_scope_request(self, *, event: dict[str, Any], project: dict[str, Any]) -> dict[str, Any]:
|
|
520
|
+
payload = dict(event.get("payload") or {})
|
|
521
|
+
repo_root = self._require_repo_root(project=project, event=event)
|
|
522
|
+
intake_id = str(payload.get("intake_id") or "").strip()
|
|
523
|
+
if not intake_id:
|
|
524
|
+
raise RuntimeError("source-scope event requires intake_id")
|
|
525
|
+
source_refs: list[dict[str, Any]] = []
|
|
526
|
+
for raw in payload.get("sources") or []:
|
|
527
|
+
path = Path(str(raw)).expanduser()
|
|
528
|
+
source_refs.append({"type": "doc", "path": str(path), "title": path.name})
|
|
529
|
+
text = payload.get("text")
|
|
530
|
+
if text:
|
|
531
|
+
source_refs.append({"type": "notes", "title": f"{intake_id}.md", "text": str(text)})
|
|
532
|
+
if not source_refs:
|
|
533
|
+
raise RuntimeError("source-scope event requires sources or text")
|
|
534
|
+
store = self._store_for_repo(repo_root)
|
|
535
|
+
result = run_source_to_scope_dag(
|
|
536
|
+
repo_root=repo_root,
|
|
537
|
+
store=store,
|
|
538
|
+
project_id=str(project["id"]),
|
|
539
|
+
source_intake_id=intake_id,
|
|
540
|
+
source_refs=source_refs,
|
|
541
|
+
requested_by=str(payload.get("requested_by") or "marcus"),
|
|
542
|
+
)
|
|
543
|
+
if result.exit_code != 0:
|
|
544
|
+
raise RuntimeError(result.message or "source-scope run failed")
|
|
545
|
+
return {
|
|
546
|
+
"exit_code": result.exit_code,
|
|
547
|
+
"run_id": result.run_id,
|
|
548
|
+
"pipeline_dir": str(result.pipeline_dir),
|
|
549
|
+
"message": result.message,
|
|
550
|
+
"outcome": result.outcome,
|
|
551
|
+
"local_repo_root": str(repo_root),
|
|
552
|
+
}
|
|
553
|
+
|
|
554
|
+
def _run_idea_intake_request(self, *, event: dict[str, Any], project: dict[str, Any]) -> dict[str, Any]:
|
|
555
|
+
payload = dict(event.get("payload") or {})
|
|
556
|
+
repo_root = self._require_repo_root(project=project, event=event)
|
|
557
|
+
idea_id = str(payload.get("idea_id") or event.get("idea_id") or "").strip()
|
|
558
|
+
if not idea_id:
|
|
559
|
+
raise RuntimeError("idea intake event requires idea_id")
|
|
560
|
+
source_path = payload.get("source_path")
|
|
561
|
+
store = self._store_for_repo(repo_root)
|
|
562
|
+
result = run_devin_chat_dag(
|
|
563
|
+
repo_root=repo_root,
|
|
564
|
+
store=store,
|
|
565
|
+
idea_id=idea_id,
|
|
566
|
+
text=(None if payload.get("text") is None else str(payload.get("text"))),
|
|
567
|
+
source_path=(Path(str(source_path)).expanduser().resolve() if source_path else None),
|
|
568
|
+
max_stories=int(payload.get("max_stories") or 0),
|
|
569
|
+
planes=_normalize_planes(payload.get("planes")),
|
|
570
|
+
response_mode_label=(
|
|
571
|
+
None
|
|
572
|
+
if payload.get("response_mode_label") is None and payload.get("response_mode") is None
|
|
573
|
+
else str(payload.get("response_mode_label") or payload.get("response_mode"))
|
|
574
|
+
),
|
|
575
|
+
)
|
|
576
|
+
if result.exit_code != 0:
|
|
577
|
+
raise RuntimeError(result.message or "idea intake failed")
|
|
578
|
+
return {
|
|
579
|
+
"exit_code": result.exit_code,
|
|
580
|
+
"run_id": result.run_id,
|
|
581
|
+
"pipeline_dir": str(result.pipeline_dir),
|
|
582
|
+
"message": result.message,
|
|
583
|
+
"outcome": result.outcome,
|
|
584
|
+
"local_repo_root": str(repo_root),
|
|
585
|
+
}
|
|
586
|
+
|
|
587
|
+
def _run_idea_stories_generate_request(self, *, event: dict[str, Any], project: dict[str, Any]) -> dict[str, Any]:
|
|
588
|
+
payload = dict(event.get("payload") or {})
|
|
589
|
+
repo_root = self._require_repo_root(project=project, event=event)
|
|
590
|
+
idea_id = str(payload.get("idea_id") or event.get("idea_id") or "").strip()
|
|
591
|
+
if not idea_id:
|
|
592
|
+
raise RuntimeError("idea stories generate event requires idea_id")
|
|
593
|
+
try:
|
|
594
|
+
story_set = generate_traditional_user_story_set(
|
|
595
|
+
repo_root=repo_root,
|
|
596
|
+
idea_id=idea_id,
|
|
597
|
+
max_stories=int(payload.get("max_stories") or 0),
|
|
598
|
+
)
|
|
599
|
+
except FileNotFoundError as exc:
|
|
600
|
+
raise RuntimeError(f"Missing idea artifact for idea_id={idea_id}") from exc
|
|
601
|
+
except ValueError as exc:
|
|
602
|
+
raise RuntimeError(str(exc)) from exc
|
|
603
|
+
except TraditionalStoryInsufficiencyError as exc:
|
|
604
|
+
raise RuntimeError(_result_message(exc)) from exc
|
|
605
|
+
return {
|
|
606
|
+
"story_set_id": story_set.story_set_id,
|
|
607
|
+
"stories_dir": str(story_set.root),
|
|
608
|
+
"story_paths": [str(path) for path in story_set.story_paths],
|
|
609
|
+
"sufficiency_report": story_set.sufficiency_report,
|
|
610
|
+
"local_repo_root": str(repo_root),
|
|
611
|
+
}
|
|
612
|
+
|
|
613
|
+
def _run_settings_changed_request(self, *, event: dict[str, Any], project: dict[str, Any]) -> dict[str, Any]:
|
|
614
|
+
payload = dict(event.get("payload") or {})
|
|
615
|
+
raw_settings = payload.get("settings")
|
|
616
|
+
if not isinstance(raw_settings, dict) or not raw_settings:
|
|
617
|
+
raise RuntimeError("devflow_settings_changed requires payload.settings object")
|
|
618
|
+
|
|
619
|
+
settings = dict(raw_settings)
|
|
620
|
+
applied_keys: list[str] = []
|
|
621
|
+
|
|
622
|
+
llm_mode = settings.pop("llm_mode", None)
|
|
623
|
+
llm_provider = settings.pop("llm_provider", None)
|
|
624
|
+
if llm_mode is not None or llm_provider is not None:
|
|
625
|
+
current_config = self._read_global_config()
|
|
626
|
+
resolved_mode = str(llm_mode if llm_mode is not None else current_config.get("llm_mode") or "").strip().lower()
|
|
627
|
+
resolved_provider = str(llm_provider if llm_provider is not None else current_config.get("llm_provider") or "").strip().lower()
|
|
628
|
+
if resolved_mode not in {"cli", "api"}:
|
|
629
|
+
raise RuntimeError("devflow_settings_changed requires llm_mode to be cli or api for provider updates")
|
|
630
|
+
if not resolved_provider:
|
|
631
|
+
raise RuntimeError("devflow_settings_changed requires llm_provider when updating llm_mode/provider")
|
|
632
|
+
command_result = self._invoke_cli_command(
|
|
633
|
+
cwd=Path.cwd(),
|
|
634
|
+
func_name="config_llm_set_provider",
|
|
635
|
+
kwargs={"mode": resolved_mode, "provider": resolved_provider},
|
|
636
|
+
)
|
|
637
|
+
if command_result.returncode != 0:
|
|
638
|
+
raise RuntimeError(command_result.stderr or command_result.stdout or "config llm-set-provider failed")
|
|
639
|
+
applied_keys.extend(["llm_mode", "llm_provider"])
|
|
640
|
+
|
|
641
|
+
for key, value in settings.items():
|
|
642
|
+
normalized_key = str(key).strip()
|
|
643
|
+
if not normalized_key:
|
|
644
|
+
continue
|
|
645
|
+
if self._is_secret_setting_key(normalized_key):
|
|
646
|
+
raise RuntimeError(
|
|
647
|
+
f"devflow_settings_changed cannot apply secret setting '{normalized_key}'; use devflow_API_KEY"
|
|
648
|
+
)
|
|
649
|
+
command_result = self._invoke_cli_command(
|
|
650
|
+
cwd=Path.cwd(),
|
|
651
|
+
func_name="config_set",
|
|
652
|
+
kwargs={"key": normalized_key, "value": self._stringify_setting_value(value), "project_id": None},
|
|
653
|
+
)
|
|
654
|
+
if command_result.returncode != 0:
|
|
655
|
+
raise RuntimeError(command_result.stderr or command_result.stdout or f"config set failed for {normalized_key}")
|
|
656
|
+
applied_keys.append(normalized_key)
|
|
657
|
+
|
|
658
|
+
return {
|
|
659
|
+
"applied_settings": applied_keys,
|
|
660
|
+
"config_scope": "global",
|
|
661
|
+
"local_repo_root": str(self._resolve_local_repo_root(project=project, event=event) or Path.cwd()),
|
|
662
|
+
}
|
|
663
|
+
|
|
664
|
+
def _run_api_key_event(self, *, event: dict[str, Any], project: dict[str, Any]) -> dict[str, Any]:
|
|
665
|
+
payload = dict(event.get("payload") or {})
|
|
666
|
+
grant_store = self._resolve_transport_grant_store()
|
|
667
|
+
try:
|
|
668
|
+
unwrapped = unwrap_api_key_event_payload(payload=payload, grant_store=grant_store)
|
|
669
|
+
finally:
|
|
670
|
+
grant_store.close()
|
|
671
|
+
|
|
672
|
+
provider = str(unwrapped["provider"] or "")
|
|
673
|
+
api_key = str(unwrapped["api_key"] or "")
|
|
674
|
+
tier = str(unwrapped["tier"] or "").strip() or None
|
|
675
|
+
|
|
676
|
+
store_provider_api_key(provider, api_key)
|
|
677
|
+
env_var = set_runtime_provider_api_key(provider, api_key)
|
|
678
|
+
|
|
679
|
+
return {
|
|
680
|
+
"provider": provider,
|
|
681
|
+
"tier": tier,
|
|
682
|
+
"env_var": env_var,
|
|
683
|
+
"storage": {
|
|
684
|
+
"runtime_env": True,
|
|
685
|
+
"macos_keychain": True,
|
|
686
|
+
},
|
|
687
|
+
"local_repo_root": str(self._resolve_local_repo_root(project=project, event=event) or Path.cwd()),
|
|
688
|
+
}
|
|
689
|
+
|
|
690
|
+
def _build_command(self, *, event: dict[str, Any], project: dict[str, Any]) -> list[str]:
|
|
691
|
+
payload = dict(event.get("payload") or {})
|
|
692
|
+
event_type = str(event.get("event_type") or "")
|
|
693
|
+
if event_type == "devflow.project.init.request":
|
|
694
|
+
command = ["devflow", "project", "init", "--name", str(payload.get("name") or project.get("name") or "")]
|
|
695
|
+
if self._is_new_project_init_request(payload=payload):
|
|
696
|
+
command.append("--new")
|
|
697
|
+
if payload.get("template"):
|
|
698
|
+
command.extend(["--template", str(payload["template"])])
|
|
699
|
+
if payload.get("project_type"):
|
|
700
|
+
command.extend(["--project-type", str(payload["project_type"])])
|
|
701
|
+
return command
|
|
702
|
+
if event_type == "devflow.project.import.request":
|
|
703
|
+
repo_spec = self._project_import_repo_spec(payload=payload, project=project)
|
|
704
|
+
command = ["devflow", "project", "import", repo_spec]
|
|
705
|
+
if payload.get("init"):
|
|
706
|
+
command.extend(["--init", str(payload["init"])])
|
|
707
|
+
return command
|
|
708
|
+
if event_type == "devflow.project.repo_tree.request":
|
|
709
|
+
repo_root = self._require_repo_root(project=project, event=event)
|
|
710
|
+
return ["devflow-event-worker", "project-repo-tree", "--repo", str(repo_root)]
|
|
711
|
+
if event_type == "devflow.project.source_docs.add.request":
|
|
712
|
+
repo_root = self._require_repo_root(project=project, event=event)
|
|
713
|
+
command = ["devflow-event-worker", "source-docs-add", "--repo", str(repo_root), "--source-mode", str(payload.get("source_mode") or "")]
|
|
714
|
+
for repo_path in payload.get("repo_paths") or []:
|
|
715
|
+
command.extend(["--repo-path", str(repo_path)])
|
|
716
|
+
for file_name in payload.get("file_names") or []:
|
|
717
|
+
command.extend(["--file", str(file_name)])
|
|
718
|
+
return command
|
|
719
|
+
if event_type == "devflow.source_scope.run.request":
|
|
720
|
+
intake_id = payload.get("intake_id")
|
|
721
|
+
if not intake_id:
|
|
722
|
+
raise RuntimeError("source-scope event requires intake_id")
|
|
723
|
+
command = ["devflow", "source-scope", "run", "--project-id", str(project["id"]), "--intake-id", str(intake_id)]
|
|
724
|
+
for source in payload.get("sources") or []:
|
|
725
|
+
command.extend(["--source", str(source)])
|
|
726
|
+
if payload.get("text"):
|
|
727
|
+
command.extend(["--text", str(payload["text"])])
|
|
728
|
+
if payload.get("requested_by"):
|
|
729
|
+
command.extend(["--requested-by", str(payload["requested_by"])])
|
|
730
|
+
return command
|
|
731
|
+
if event_type == "devflow.idea.intake.request":
|
|
732
|
+
idea_id = payload.get("idea_id") or event.get("idea_id")
|
|
733
|
+
if not idea_id:
|
|
734
|
+
raise RuntimeError("idea intake event requires idea_id")
|
|
735
|
+
command = ["devflow", "idea", "intake", "--idea", str(idea_id)]
|
|
736
|
+
if payload.get("text"):
|
|
737
|
+
command.extend(["--text", str(payload["text"])])
|
|
738
|
+
if payload.get("source_path"):
|
|
739
|
+
command.extend(["--from", str(payload["source_path"])])
|
|
740
|
+
if payload.get("max_stories") is not None:
|
|
741
|
+
command.extend(["--max-stories", str(payload["max_stories"])])
|
|
742
|
+
if payload.get("planes"):
|
|
743
|
+
command.extend(["--planes", ",".join(_normalize_planes(payload.get("planes")))])
|
|
744
|
+
command.extend(["--response-mode", str(payload.get("response_mode_label") or current_response_mode_label())])
|
|
745
|
+
return command
|
|
746
|
+
if event_type == "devflow.idea.stories.generate.request":
|
|
747
|
+
idea_id = payload.get("idea_id") or event.get("idea_id")
|
|
748
|
+
if not idea_id:
|
|
749
|
+
raise RuntimeError("idea stories generate event requires idea_id")
|
|
750
|
+
command = ["devflow", "idea", "stories", "generate", "--idea", str(idea_id)]
|
|
751
|
+
if payload.get("max_stories") is not None:
|
|
752
|
+
command.extend(["--max-stories", str(payload["max_stories"])])
|
|
753
|
+
return command
|
|
754
|
+
if event_type == "devflow_settings_changed":
|
|
755
|
+
return ["devflow-event-worker", "settings-changed", "--event-type", "devflow_settings_changed"]
|
|
756
|
+
if event_type == "devflow_API_KEY":
|
|
757
|
+
return ["devflow-event-worker", "api-key", "--event-type", "devflow_API_KEY"]
|
|
758
|
+
raise RuntimeError(f"Unsupported DevFlow event type: {event_type}")
|
|
759
|
+
|
|
760
|
+
def _company_payload(self, *, project: dict[str, Any], event: dict[str, Any]) -> dict[str, Any]:
|
|
761
|
+
payload = dict(event.get("payload") or {})
|
|
762
|
+
metadata = _project_metadata(project)
|
|
763
|
+
company_payload = payload.get("company_payload")
|
|
764
|
+
if isinstance(company_payload, dict):
|
|
765
|
+
return company_payload
|
|
766
|
+
if isinstance(metadata.get("company_payload"), dict):
|
|
767
|
+
return dict(metadata["company_payload"])
|
|
768
|
+
return {
|
|
769
|
+
"organization_id": project.get("organization_id"),
|
|
770
|
+
"customer_id": project.get("customer_id"),
|
|
771
|
+
"project_name": project.get("name"),
|
|
772
|
+
"environment": project.get("environment"),
|
|
773
|
+
}
|
|
774
|
+
|
|
775
|
+
def _require_repo_root(self, *, project: dict[str, Any], event: dict[str, Any]) -> Path:
|
|
776
|
+
repo_root = self._resolve_local_repo_root(project=project, event=event)
|
|
777
|
+
if repo_root is None:
|
|
778
|
+
raise RuntimeError(f"Unable to resolve local repo root for project {project.get('id')}")
|
|
779
|
+
return repo_root
|
|
780
|
+
|
|
781
|
+
def _resolve_local_repo_root(self, *, project: dict[str, Any], event: dict[str, Any]) -> Path | None:
|
|
782
|
+
payload = dict(event.get("payload") or {})
|
|
783
|
+
for candidate in (
|
|
784
|
+
payload.get("local_repo_root"),
|
|
785
|
+
payload.get("workspace_path"),
|
|
786
|
+
project.get("devflow_repo_root"),
|
|
787
|
+
):
|
|
788
|
+
path = self._coerce_existing_path(candidate)
|
|
789
|
+
if path is not None:
|
|
790
|
+
return path
|
|
791
|
+
|
|
792
|
+
repo_url = project.get("repo_url") or payload.get("repo_spec")
|
|
793
|
+
normalized_repo_url = normalize_github_selector(str(repo_url)) if isinstance(repo_url, str) else None
|
|
794
|
+
registry = read_projects_registry()
|
|
795
|
+
for item in registry.get("projects", []):
|
|
796
|
+
if not isinstance(item, dict):
|
|
797
|
+
continue
|
|
798
|
+
item_remote = str(item.get("remote_url") or "").strip()
|
|
799
|
+
if repo_url and item_remote and item_remote in {str(repo_url), str(normalized_repo_url or "")}:
|
|
800
|
+
path = self._coerce_existing_path(item.get("repo_root") or item.get("workspace_path"))
|
|
801
|
+
if path is not None:
|
|
802
|
+
return path
|
|
803
|
+
if str(item.get("project_id") or "").strip() == str(project.get("id") or "").strip():
|
|
804
|
+
path = self._coerce_existing_path(item.get("repo_root") or item.get("workspace_path"))
|
|
805
|
+
if path is not None:
|
|
806
|
+
return path
|
|
807
|
+
return None
|
|
808
|
+
|
|
809
|
+
def _resolve_imported_repo_root(self, *, project: dict[str, Any], event: dict[str, Any], repo_spec: str, init_template: Any) -> Path | None:
|
|
810
|
+
if init_template:
|
|
811
|
+
return Path(repo_spec).expanduser().resolve()
|
|
812
|
+
path = self._coerce_existing_path(project.get("devflow_repo_root"))
|
|
813
|
+
if path is not None:
|
|
814
|
+
return path
|
|
815
|
+
path = self._resolve_local_repo_root(project=project, event=event)
|
|
816
|
+
if path is not None:
|
|
817
|
+
return path
|
|
818
|
+
spec_path = self._coerce_existing_path(repo_spec)
|
|
819
|
+
if spec_path is not None:
|
|
820
|
+
return spec_path
|
|
821
|
+
return None
|
|
822
|
+
|
|
823
|
+
def _project_import_repo_spec(self, *, payload: dict[str, Any], project: dict[str, Any]) -> str:
|
|
824
|
+
raw_repo_spec = payload.get("repo_spec") or project.get("repo_url") or project.get("devflow_repo_root")
|
|
825
|
+
if not isinstance(raw_repo_spec, str) or not raw_repo_spec.strip():
|
|
826
|
+
raise RuntimeError("project import event requires repo_spec or project repo_url")
|
|
827
|
+
repo_spec = raw_repo_spec.strip()
|
|
828
|
+
if payload.get("init"):
|
|
829
|
+
return repo_spec
|
|
830
|
+
if normalize_github_selector(repo_spec) is not None:
|
|
831
|
+
return repo_spec
|
|
832
|
+
trimmed_repo_spec = repo_spec.strip("/")
|
|
833
|
+
if trimmed_repo_spec and trimmed_repo_spec != repo_spec and normalize_github_selector(trimmed_repo_spec) is not None:
|
|
834
|
+
return trimmed_repo_spec
|
|
835
|
+
return repo_spec
|
|
836
|
+
|
|
837
|
+
def _is_new_project_init_request(self, *, payload: dict[str, Any]) -> bool:
|
|
838
|
+
mode = str(payload.get("init_mode") or payload.get("mode") or "").strip().lower()
|
|
839
|
+
if mode in {"new", "create"}:
|
|
840
|
+
return True
|
|
841
|
+
if bool(payload.get("new")) or bool(payload.get("create")) or bool(payload.get("create_project")):
|
|
842
|
+
return True
|
|
843
|
+
return bool(payload.get("template") or payload.get("project_type"))
|
|
844
|
+
|
|
845
|
+
def _resolve_new_project_repo_root(self, *, name: str) -> Path:
|
|
846
|
+
from .cli import app as cli_app
|
|
847
|
+
|
|
848
|
+
return cli_app.resolve_canonical_repo_root(name, None)
|
|
849
|
+
|
|
850
|
+
def _coerce_existing_path(self, raw: Any) -> Path | None:
|
|
851
|
+
if not isinstance(raw, str) or not raw.strip():
|
|
852
|
+
return None
|
|
853
|
+
path = Path(raw).expanduser().resolve()
|
|
854
|
+
if path.exists():
|
|
855
|
+
return path
|
|
856
|
+
return None
|
|
857
|
+
|
|
858
|
+
def _resolve_repo_relative_path(self, *, base: Path, raw: str) -> Path:
|
|
859
|
+
candidate = (base / raw).resolve()
|
|
860
|
+
try:
|
|
861
|
+
candidate.relative_to(base.resolve())
|
|
862
|
+
except ValueError as exc:
|
|
863
|
+
raise RuntimeError(f"Path escapes allowed root: {raw}") from exc
|
|
864
|
+
return candidate
|
|
865
|
+
|
|
866
|
+
def _store_for_repo(self, repo_root: Path) -> ExecutionStore:
|
|
867
|
+
repo_root = repo_root.expanduser().resolve()
|
|
868
|
+
(repo_root / ".devflow").mkdir(parents=True, exist_ok=True)
|
|
869
|
+
return ExecutionStore(repo_root / ".devflow" / "execution.sqlite")
|
|
870
|
+
|
|
871
|
+
def _read_global_config(self) -> dict[str, Any]:
|
|
872
|
+
from .cli import app as cli_app
|
|
873
|
+
|
|
874
|
+
return cli_app._read_toml(cli_app._global_devflow_dir() / "config.toml")
|
|
875
|
+
|
|
876
|
+
def _is_secret_setting_key(self, key: str) -> bool:
|
|
877
|
+
lowered = key.strip().lower()
|
|
878
|
+
return any(token in lowered for token in _SECRET_SETTING_TOKENS)
|
|
879
|
+
|
|
880
|
+
def _stringify_setting_value(self, value: Any) -> str:
|
|
881
|
+
if isinstance(value, bool):
|
|
882
|
+
return "true" if value else "false"
|
|
883
|
+
if value is None:
|
|
884
|
+
return ""
|
|
885
|
+
return str(value)
|
|
886
|
+
|
|
887
|
+
def _resolve_transport_grant_store(self) -> DevflowTransportGrantStore:
|
|
888
|
+
return DevflowTransportGrantStore.from_env()
|
|
889
|
+
|
|
890
|
+
def _invoke_cli_command(self, *, cwd: Path, func_name: str, kwargs: dict[str, Any]) -> _CliInvocationResult:
|
|
891
|
+
from .cli import app as cli_app
|
|
892
|
+
|
|
893
|
+
target = getattr(cli_app, func_name)
|
|
894
|
+
stdout_buffer = io.StringIO()
|
|
895
|
+
stderr_buffer = io.StringIO()
|
|
896
|
+
prior_cwd = Path.cwd()
|
|
897
|
+
try:
|
|
898
|
+
os.chdir(cwd)
|
|
899
|
+
with redirect_stdout(stdout_buffer), redirect_stderr(stderr_buffer):
|
|
900
|
+
try:
|
|
901
|
+
target(**kwargs)
|
|
902
|
+
exit_code = 0
|
|
903
|
+
except typer.Exit as exc:
|
|
904
|
+
exit_code = 0 if exc.exit_code in (None, 0) else int(exc.exit_code)
|
|
905
|
+
finally:
|
|
906
|
+
os.chdir(prior_cwd)
|
|
907
|
+
return _CliInvocationResult(
|
|
908
|
+
returncode=exit_code,
|
|
909
|
+
stdout=stdout_buffer.getvalue(),
|
|
910
|
+
stderr=stderr_buffer.getvalue(),
|
|
911
|
+
)
|
|
912
|
+
|
|
913
|
+
def _fetch_candidate_events(self) -> list[dict[str, Any]]:
|
|
914
|
+
url, key = self._require_supabase_config()
|
|
915
|
+
rows = _postgrest_request(
|
|
916
|
+
method="GET",
|
|
917
|
+
url=f"{url}/rest/v1/devflow_execution_events?select=*&status=eq.queued&run_id=is.null&order=occurred_at.asc&limit=25",
|
|
918
|
+
key=key,
|
|
919
|
+
)
|
|
920
|
+
if not isinstance(rows, list):
|
|
921
|
+
return []
|
|
922
|
+
return [dict(row) for row in rows if isinstance(row, dict)]
|
|
923
|
+
|
|
924
|
+
def _claim_event(self, *, event_id: str, workflow_key: str) -> dict[str, Any] | None:
|
|
925
|
+
url, key = self._require_supabase_config()
|
|
926
|
+
rows = _postgrest_request(
|
|
927
|
+
method="PATCH",
|
|
928
|
+
url=f"{url}/rest/v1/devflow_execution_events?id=eq.{quote(event_id)}&status=eq.queued&run_id=is.null",
|
|
929
|
+
key=key,
|
|
930
|
+
body={
|
|
931
|
+
"status": "processing",
|
|
932
|
+
"stage": workflow_key,
|
|
933
|
+
"error": None,
|
|
934
|
+
},
|
|
935
|
+
prefer="return=representation",
|
|
936
|
+
)
|
|
937
|
+
if not isinstance(rows, list) or not rows:
|
|
938
|
+
return None
|
|
939
|
+
row = rows[0]
|
|
940
|
+
if not isinstance(row, dict):
|
|
941
|
+
return None
|
|
942
|
+
return dict(row)
|
|
943
|
+
|
|
944
|
+
def _fetch_project(self, *, project_id: str) -> dict[str, Any]:
|
|
945
|
+
url, key = self._require_supabase_config()
|
|
946
|
+
rows = _postgrest_request(
|
|
947
|
+
method="GET",
|
|
948
|
+
url=f"{url}/rest/v1/devflow_projects?select=*&id=eq.{quote(project_id)}&limit=1",
|
|
949
|
+
key=key,
|
|
950
|
+
)
|
|
951
|
+
if not isinstance(rows, list) or not rows or not isinstance(rows[0], dict):
|
|
952
|
+
raise RuntimeError(f"DevFlow project {project_id} not found")
|
|
953
|
+
return dict(rows[0])
|
|
954
|
+
|
|
955
|
+
def _insert_execution_run(self, payload: dict[str, Any]) -> dict[str, Any]:
|
|
956
|
+
url, key = self._require_supabase_config()
|
|
957
|
+
rows = _postgrest_request(
|
|
958
|
+
method="POST",
|
|
959
|
+
url=f"{url}/rest/v1/devflow_execution_runs",
|
|
960
|
+
key=key,
|
|
961
|
+
body=payload,
|
|
962
|
+
prefer="return=representation",
|
|
963
|
+
)
|
|
964
|
+
if not isinstance(rows, list) or not rows or not isinstance(rows[0], dict):
|
|
965
|
+
raise RuntimeError("Failed to insert devflow_execution_runs row")
|
|
966
|
+
return dict(rows[0])
|
|
967
|
+
|
|
968
|
+
def _insert_execution_step_run(self, payload: dict[str, Any]) -> dict[str, Any]:
|
|
969
|
+
url, key = self._require_supabase_config()
|
|
970
|
+
rows = _postgrest_request(
|
|
971
|
+
method="POST",
|
|
972
|
+
url=f"{url}/rest/v1/devflow_execution_step_runs",
|
|
973
|
+
key=key,
|
|
974
|
+
body=payload,
|
|
975
|
+
prefer="return=representation",
|
|
976
|
+
)
|
|
977
|
+
if not isinstance(rows, list) or not rows or not isinstance(rows[0], dict):
|
|
978
|
+
raise RuntimeError("Failed to insert devflow_execution_step_runs row")
|
|
979
|
+
return dict(rows[0])
|
|
980
|
+
|
|
981
|
+
def _update_execution_run(self, *, execution_run_id: int, fields: dict[str, Any]) -> None:
|
|
982
|
+
url, key = self._require_supabase_config()
|
|
983
|
+
_postgrest_request(
|
|
984
|
+
method="PATCH",
|
|
985
|
+
url=f"{url}/rest/v1/devflow_execution_runs?id=eq.{execution_run_id}",
|
|
986
|
+
key=key,
|
|
987
|
+
body=fields,
|
|
988
|
+
)
|
|
989
|
+
|
|
990
|
+
def _update_execution_step_run(self, *, step_id: int, fields: dict[str, Any]) -> None:
|
|
991
|
+
url, key = self._require_supabase_config()
|
|
992
|
+
_postgrest_request(
|
|
993
|
+
method="PATCH",
|
|
994
|
+
url=f"{url}/rest/v1/devflow_execution_step_runs?id=eq.{step_id}",
|
|
995
|
+
key=key,
|
|
996
|
+
body=fields,
|
|
997
|
+
)
|
|
998
|
+
|
|
999
|
+
def _update_event(self, *, event_id: str, fields: dict[str, Any]) -> None:
|
|
1000
|
+
url, key = self._require_supabase_config()
|
|
1001
|
+
_postgrest_request(
|
|
1002
|
+
method="PATCH",
|
|
1003
|
+
url=f"{url}/rest/v1/devflow_execution_events?id=eq.{quote(event_id)}",
|
|
1004
|
+
key=key,
|
|
1005
|
+
body=fields,
|
|
1006
|
+
)
|
|
1007
|
+
|
|
1008
|
+
def _update_project(self, *, project_id: str, fields: dict[str, Any]) -> None:
|
|
1009
|
+
url, key = self._require_supabase_config()
|
|
1010
|
+
_postgrest_request(
|
|
1011
|
+
method="PATCH",
|
|
1012
|
+
url=f"{url}/rest/v1/devflow_projects?id=eq.{quote(project_id)}",
|
|
1013
|
+
key=key,
|
|
1014
|
+
body=fields,
|
|
1015
|
+
)
|
|
1016
|
+
|
|
1017
|
+
def _merge_event_payload(self, *, event: dict[str, Any], extra_payload: dict[str, Any]) -> dict[str, Any]:
|
|
1018
|
+
merged = dict(event.get("payload") or {})
|
|
1019
|
+
merged.update(extra_payload)
|
|
1020
|
+
return merged
|
|
1021
|
+
|
|
1022
|
+
def _require_supabase_config(self) -> tuple[str, str]:
|
|
1023
|
+
config = _resolve_supabase_rest_config()
|
|
1024
|
+
if config is None:
|
|
1025
|
+
raise RuntimeError("Supabase REST config unavailable for DevFlow event worker")
|
|
1026
|
+
return config
|
|
1027
|
+
|
|
1028
|
+
|
|
1029
|
+
class SupabaseRealtimeListener:
|
|
1030
|
+
"""Supabase Realtime WebSocket listener for a single table's INSERT events.
|
|
1031
|
+
|
|
1032
|
+
Uses the Phoenix channel protocol (Supabase Realtime v1/v2 wire format).
|
|
1033
|
+
Runs in a background daemon thread. When an INSERT is received on the
|
|
1034
|
+
subscribed table, the ``on_event`` callback is invoked from the listener
|
|
1035
|
+
thread.
|
|
1036
|
+
|
|
1037
|
+
If the connection cannot be established or drops, the listener retries
|
|
1038
|
+
after a back-off delay without raising — the caller should treat
|
|
1039
|
+
``is_connected`` as informational only and always maintain a polling
|
|
1040
|
+
fallback.
|
|
1041
|
+
|
|
1042
|
+
Usage::
|
|
1043
|
+
|
|
1044
|
+
listener = SupabaseRealtimeListener(
|
|
1045
|
+
url="https://...supabase.co",
|
|
1046
|
+
key="service-role-key",
|
|
1047
|
+
table="devflow_execution_events",
|
|
1048
|
+
on_event=my_callback,
|
|
1049
|
+
)
|
|
1050
|
+
listener.start()
|
|
1051
|
+
...
|
|
1052
|
+
listener.stop()
|
|
1053
|
+
"""
|
|
1054
|
+
|
|
1055
|
+
_HEARTBEAT_INTERVAL_S: float = 25.0
|
|
1056
|
+
_RECONNECT_DELAY_S: float = 5.0
|
|
1057
|
+
_RECV_TIMEOUT_S: float = 1.0
|
|
1058
|
+
|
|
1059
|
+
def __init__(
|
|
1060
|
+
self,
|
|
1061
|
+
*,
|
|
1062
|
+
url: str,
|
|
1063
|
+
key: str,
|
|
1064
|
+
table: str = "devflow_execution_events",
|
|
1065
|
+
on_event: Callable[[], None],
|
|
1066
|
+
) -> None:
|
|
1067
|
+
self._url = url.rstrip("/")
|
|
1068
|
+
self._key = key
|
|
1069
|
+
self._table = table
|
|
1070
|
+
self._on_event = on_event
|
|
1071
|
+
self._stop_event = threading.Event()
|
|
1072
|
+
self._thread: threading.Thread | None = None
|
|
1073
|
+
self._connected = False
|
|
1074
|
+
self._ref_counter = 0
|
|
1075
|
+
|
|
1076
|
+
# ------------------------------------------------------------------
|
|
1077
|
+
# Public API
|
|
1078
|
+
# ------------------------------------------------------------------
|
|
1079
|
+
|
|
1080
|
+
def start(self) -> None:
|
|
1081
|
+
"""Start the listener in a background daemon thread."""
|
|
1082
|
+
if self._thread is not None and self._thread.is_alive():
|
|
1083
|
+
return
|
|
1084
|
+
self._stop_event.clear()
|
|
1085
|
+
self._thread = threading.Thread(target=self._run, daemon=True, name="supabase-realtime-listener")
|
|
1086
|
+
self._thread.start()
|
|
1087
|
+
|
|
1088
|
+
def stop(self, timeout: float = 5.0) -> None:
|
|
1089
|
+
"""Signal the listener to stop and wait for the thread to exit."""
|
|
1090
|
+
self._stop_event.set()
|
|
1091
|
+
if self._thread is not None:
|
|
1092
|
+
self._thread.join(timeout=timeout)
|
|
1093
|
+
|
|
1094
|
+
@property
|
|
1095
|
+
def is_connected(self) -> bool:
|
|
1096
|
+
return self._connected
|
|
1097
|
+
|
|
1098
|
+
# ------------------------------------------------------------------
|
|
1099
|
+
# Internal
|
|
1100
|
+
# ------------------------------------------------------------------
|
|
1101
|
+
|
|
1102
|
+
def _next_ref(self) -> str:
|
|
1103
|
+
self._ref_counter += 1
|
|
1104
|
+
return str(self._ref_counter)
|
|
1105
|
+
|
|
1106
|
+
def _ws_url(self) -> str:
|
|
1107
|
+
base = self._url
|
|
1108
|
+
if base.startswith("https://"):
|
|
1109
|
+
base = "wss://" + base[8:]
|
|
1110
|
+
elif base.startswith("http://"):
|
|
1111
|
+
base = "ws://" + base[7:]
|
|
1112
|
+
return f"{base}/realtime/v1/websocket?apikey={self._key}&vsn=1.0.0"
|
|
1113
|
+
|
|
1114
|
+
def _run(self) -> None:
|
|
1115
|
+
"""Thread entry-point: runs the asyncio event loop."""
|
|
1116
|
+
asyncio.run(self._listen_loop())
|
|
1117
|
+
|
|
1118
|
+
async def _listen_loop(self) -> None:
|
|
1119
|
+
"""Outer reconnect loop — keeps retrying until stop() is called."""
|
|
1120
|
+
try:
|
|
1121
|
+
import websockets # type: ignore[import-untyped]
|
|
1122
|
+
except ImportError:
|
|
1123
|
+
logger.warning("devflow-realtime: websockets package not available; falling back to polling-only mode")
|
|
1124
|
+
return
|
|
1125
|
+
|
|
1126
|
+
ws_url = self._ws_url()
|
|
1127
|
+
while not self._stop_event.is_set():
|
|
1128
|
+
try:
|
|
1129
|
+
async with websockets.connect(
|
|
1130
|
+
ws_url,
|
|
1131
|
+
open_timeout=10,
|
|
1132
|
+
close_timeout=5,
|
|
1133
|
+
additional_headers={"apikey": self._key, "Authorization": f"Bearer {self._key}"},
|
|
1134
|
+
) as ws:
|
|
1135
|
+
self._connected = True
|
|
1136
|
+
logger.debug("devflow-realtime: connected to %s", ws_url)
|
|
1137
|
+
await self._session(ws)
|
|
1138
|
+
except Exception as exc:
|
|
1139
|
+
logger.debug("devflow-realtime: connection error: %s", exc)
|
|
1140
|
+
finally:
|
|
1141
|
+
self._connected = False
|
|
1142
|
+
|
|
1143
|
+
if not self._stop_event.is_set():
|
|
1144
|
+
# Wait before reconnecting, but wake immediately if stop() is called.
|
|
1145
|
+
await asyncio.sleep(self._RECONNECT_DELAY_S)
|
|
1146
|
+
|
|
1147
|
+
async def _session(self, ws: Any) -> None:
|
|
1148
|
+
"""Single WebSocket session: join channel, handle events, send heartbeats."""
|
|
1149
|
+
channel_topic = f"realtime:public:{self._table}"
|
|
1150
|
+
join_ref = self._next_ref()
|
|
1151
|
+
|
|
1152
|
+
join_msg = json.dumps(
|
|
1153
|
+
{
|
|
1154
|
+
"topic": channel_topic,
|
|
1155
|
+
"event": "phx_join",
|
|
1156
|
+
"payload": {
|
|
1157
|
+
"config": {
|
|
1158
|
+
"broadcast": {"ack": False, "self": False},
|
|
1159
|
+
"presence": {"key": ""},
|
|
1160
|
+
"postgres_changes": [
|
|
1161
|
+
{"event": "INSERT", "schema": "public", "table": self._table}
|
|
1162
|
+
],
|
|
1163
|
+
}
|
|
1164
|
+
},
|
|
1165
|
+
"ref": join_ref,
|
|
1166
|
+
"join_ref": join_ref,
|
|
1167
|
+
}
|
|
1168
|
+
)
|
|
1169
|
+
await ws.send(join_msg)
|
|
1170
|
+
|
|
1171
|
+
heartbeat_task = asyncio.create_task(self._heartbeat(ws))
|
|
1172
|
+
try:
|
|
1173
|
+
await self._receive_loop(ws)
|
|
1174
|
+
finally:
|
|
1175
|
+
heartbeat_task.cancel()
|
|
1176
|
+
with suppress(Exception):
|
|
1177
|
+
await heartbeat_task
|
|
1178
|
+
|
|
1179
|
+
async def _heartbeat(self, ws: Any) -> None:
|
|
1180
|
+
"""Send Phoenix heartbeat messages every HEARTBEAT_INTERVAL_S seconds."""
|
|
1181
|
+
while True:
|
|
1182
|
+
await asyncio.sleep(self._HEARTBEAT_INTERVAL_S)
|
|
1183
|
+
if self._stop_event.is_set():
|
|
1184
|
+
break
|
|
1185
|
+
try:
|
|
1186
|
+
hb = json.dumps({"topic": "phoenix", "event": "heartbeat", "payload": {}, "ref": self._next_ref()})
|
|
1187
|
+
await ws.send(hb)
|
|
1188
|
+
except Exception:
|
|
1189
|
+
break
|
|
1190
|
+
|
|
1191
|
+
async def _receive_loop(self, ws: Any) -> None:
|
|
1192
|
+
"""Read messages from the WebSocket and dispatch callbacks."""
|
|
1193
|
+
while not self._stop_event.is_set():
|
|
1194
|
+
try:
|
|
1195
|
+
raw = await asyncio.wait_for(ws.recv(), timeout=self._RECV_TIMEOUT_S)
|
|
1196
|
+
except asyncio.TimeoutError:
|
|
1197
|
+
continue
|
|
1198
|
+
except Exception:
|
|
1199
|
+
# Connection dropped — exit so the outer loop can reconnect.
|
|
1200
|
+
return
|
|
1201
|
+
|
|
1202
|
+
try:
|
|
1203
|
+
msg = json.loads(raw)
|
|
1204
|
+
except Exception:
|
|
1205
|
+
continue
|
|
1206
|
+
|
|
1207
|
+
event = msg.get("event")
|
|
1208
|
+
if event == "postgres_changes":
|
|
1209
|
+
# Supabase Realtime v2: payload.data.type == "INSERT"
|
|
1210
|
+
data = (msg.get("payload") or {}).get("data") or {}
|
|
1211
|
+
change_type = str(data.get("type") or "").upper()
|
|
1212
|
+
if change_type == "INSERT":
|
|
1213
|
+
logger.debug("devflow-realtime: INSERT on %s — waking dispatch loop", self._table)
|
|
1214
|
+
try:
|
|
1215
|
+
self._on_event()
|
|
1216
|
+
except Exception:
|
|
1217
|
+
pass
|
|
1218
|
+
elif event == "phx_error":
|
|
1219
|
+
logger.debug("devflow-realtime: received phx_error — reconnecting")
|
|
1220
|
+
return
|
|
1221
|
+
elif event == "system":
|
|
1222
|
+
# Supabase Realtime v2 system messages (subscription confirmations etc.)
|
|
1223
|
+
status = str((msg.get("payload") or {}).get("status") or "").lower()
|
|
1224
|
+
if status == "error":
|
|
1225
|
+
logger.debug("devflow-realtime: system error — reconnecting")
|
|
1226
|
+
return
|
|
1227
|
+
|
|
1228
|
+
|
|
1229
|
+
class DevflowEventWorkerLoopService:
|
|
1230
|
+
def __init__(
|
|
1231
|
+
self,
|
|
1232
|
+
*,
|
|
1233
|
+
service: DevflowEventWorkerService | None = None,
|
|
1234
|
+
sleep_seconds: float = 3.0,
|
|
1235
|
+
max_iterations: int | None = None,
|
|
1236
|
+
use_realtime: bool = True,
|
|
1237
|
+
) -> None:
|
|
1238
|
+
self.service = service or DevflowEventWorkerService()
|
|
1239
|
+
self.sleep_seconds = sleep_seconds
|
|
1240
|
+
self.max_iterations = max_iterations
|
|
1241
|
+
self.use_realtime = use_realtime
|
|
1242
|
+
# Event set by the realtime listener to wake the dispatch loop immediately.
|
|
1243
|
+
self._wakeup = threading.Event()
|
|
1244
|
+
|
|
1245
|
+
def _on_realtime_event(self) -> None:
|
|
1246
|
+
"""Called from the realtime listener thread when a new INSERT arrives."""
|
|
1247
|
+
self._wakeup.set()
|
|
1248
|
+
|
|
1249
|
+
def _build_realtime_listener(self) -> SupabaseRealtimeListener | None:
|
|
1250
|
+
"""Attempt to create a realtime listener; return None if config is unavailable."""
|
|
1251
|
+
if not self.use_realtime:
|
|
1252
|
+
return None
|
|
1253
|
+
config = _resolve_supabase_rest_config()
|
|
1254
|
+
if config is None:
|
|
1255
|
+
return None
|
|
1256
|
+
url, key = config
|
|
1257
|
+
return SupabaseRealtimeListener(url=url, key=key, on_event=self._on_realtime_event)
|
|
1258
|
+
|
|
1259
|
+
def run_once(self) -> DevflowEventWorkerLoopResult:
|
|
1260
|
+
try:
|
|
1261
|
+
result = self.service.dispatch_next_event()
|
|
1262
|
+
except Exception:
|
|
1263
|
+
return DevflowEventWorkerLoopResult(failed=1, idle=False)
|
|
1264
|
+
if result is None:
|
|
1265
|
+
return DevflowEventWorkerLoopResult(idle=True)
|
|
1266
|
+
return DevflowEventWorkerLoopResult(processed=1, idle=False)
|
|
1267
|
+
|
|
1268
|
+
def run_forever(self) -> DevflowEventWorkerLoopResult:
|
|
1269
|
+
processed = 0
|
|
1270
|
+
failed = 0
|
|
1271
|
+
iterations = 0
|
|
1272
|
+
|
|
1273
|
+
listener = self._build_realtime_listener()
|
|
1274
|
+
if listener is not None:
|
|
1275
|
+
listener.start()
|
|
1276
|
+
logger.debug("devflow-realtime: listener started (realtime-enabled loop)")
|
|
1277
|
+
|
|
1278
|
+
try:
|
|
1279
|
+
while True:
|
|
1280
|
+
outcome = self.run_once()
|
|
1281
|
+
iterations += 1
|
|
1282
|
+
processed += outcome.processed
|
|
1283
|
+
failed += outcome.failed
|
|
1284
|
+
if self.max_iterations is not None and iterations >= self.max_iterations:
|
|
1285
|
+
return DevflowEventWorkerLoopResult(processed=processed, failed=failed, idle=outcome.idle)
|
|
1286
|
+
if outcome.idle:
|
|
1287
|
+
# Wait up to sleep_seconds, but wake early if realtime delivers an event.
|
|
1288
|
+
self._wakeup.wait(timeout=self.sleep_seconds)
|
|
1289
|
+
self._wakeup.clear()
|
|
1290
|
+
finally:
|
|
1291
|
+
if listener is not None:
|
|
1292
|
+
listener.stop()
|