hexdag 0.5.0.dev1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- hexdag/__init__.py +116 -0
- hexdag/__main__.py +30 -0
- hexdag/adapters/executors/__init__.py +5 -0
- hexdag/adapters/executors/local_executor.py +316 -0
- hexdag/builtin/__init__.py +6 -0
- hexdag/builtin/adapters/__init__.py +51 -0
- hexdag/builtin/adapters/anthropic/__init__.py +5 -0
- hexdag/builtin/adapters/anthropic/anthropic_adapter.py +151 -0
- hexdag/builtin/adapters/database/__init__.py +6 -0
- hexdag/builtin/adapters/database/csv/csv_adapter.py +249 -0
- hexdag/builtin/adapters/database/pgvector/__init__.py +5 -0
- hexdag/builtin/adapters/database/pgvector/pgvector_adapter.py +478 -0
- hexdag/builtin/adapters/database/sqlalchemy/sqlalchemy_adapter.py +252 -0
- hexdag/builtin/adapters/database/sqlite/__init__.py +5 -0
- hexdag/builtin/adapters/database/sqlite/sqlite_adapter.py +410 -0
- hexdag/builtin/adapters/local/README.md +59 -0
- hexdag/builtin/adapters/local/__init__.py +7 -0
- hexdag/builtin/adapters/local/local_observer_manager.py +696 -0
- hexdag/builtin/adapters/memory/__init__.py +47 -0
- hexdag/builtin/adapters/memory/file_memory_adapter.py +297 -0
- hexdag/builtin/adapters/memory/in_memory_memory.py +216 -0
- hexdag/builtin/adapters/memory/schemas.py +57 -0
- hexdag/builtin/adapters/memory/session_memory.py +178 -0
- hexdag/builtin/adapters/memory/sqlite_memory_adapter.py +215 -0
- hexdag/builtin/adapters/memory/state_memory.py +280 -0
- hexdag/builtin/adapters/mock/README.md +89 -0
- hexdag/builtin/adapters/mock/__init__.py +15 -0
- hexdag/builtin/adapters/mock/hexdag.toml +50 -0
- hexdag/builtin/adapters/mock/mock_database.py +225 -0
- hexdag/builtin/adapters/mock/mock_embedding.py +223 -0
- hexdag/builtin/adapters/mock/mock_llm.py +177 -0
- hexdag/builtin/adapters/mock/mock_tool_adapter.py +192 -0
- hexdag/builtin/adapters/mock/mock_tool_router.py +232 -0
- hexdag/builtin/adapters/openai/__init__.py +5 -0
- hexdag/builtin/adapters/openai/openai_adapter.py +634 -0
- hexdag/builtin/adapters/secret/__init__.py +7 -0
- hexdag/builtin/adapters/secret/local_secret_adapter.py +248 -0
- hexdag/builtin/adapters/unified_tool_router.py +280 -0
- hexdag/builtin/macros/__init__.py +17 -0
- hexdag/builtin/macros/conversation_agent.py +390 -0
- hexdag/builtin/macros/llm_macro.py +151 -0
- hexdag/builtin/macros/reasoning_agent.py +423 -0
- hexdag/builtin/macros/tool_macro.py +380 -0
- hexdag/builtin/nodes/__init__.py +38 -0
- hexdag/builtin/nodes/_discovery.py +123 -0
- hexdag/builtin/nodes/agent_node.py +696 -0
- hexdag/builtin/nodes/base_node_factory.py +242 -0
- hexdag/builtin/nodes/composite_node.py +926 -0
- hexdag/builtin/nodes/data_node.py +201 -0
- hexdag/builtin/nodes/expression_node.py +487 -0
- hexdag/builtin/nodes/function_node.py +454 -0
- hexdag/builtin/nodes/llm_node.py +491 -0
- hexdag/builtin/nodes/loop_node.py +920 -0
- hexdag/builtin/nodes/mapped_input.py +518 -0
- hexdag/builtin/nodes/port_call_node.py +269 -0
- hexdag/builtin/nodes/tool_call_node.py +195 -0
- hexdag/builtin/nodes/tool_utils.py +390 -0
- hexdag/builtin/prompts/__init__.py +68 -0
- hexdag/builtin/prompts/base.py +422 -0
- hexdag/builtin/prompts/chat_prompts.py +303 -0
- hexdag/builtin/prompts/error_correction_prompts.py +320 -0
- hexdag/builtin/prompts/tool_prompts.py +160 -0
- hexdag/builtin/tools/builtin_tools.py +84 -0
- hexdag/builtin/tools/database_tools.py +164 -0
- hexdag/cli/__init__.py +17 -0
- hexdag/cli/__main__.py +7 -0
- hexdag/cli/commands/__init__.py +27 -0
- hexdag/cli/commands/build_cmd.py +812 -0
- hexdag/cli/commands/create_cmd.py +208 -0
- hexdag/cli/commands/docs_cmd.py +293 -0
- hexdag/cli/commands/generate_types_cmd.py +252 -0
- hexdag/cli/commands/init_cmd.py +188 -0
- hexdag/cli/commands/pipeline_cmd.py +494 -0
- hexdag/cli/commands/plugin_dev_cmd.py +529 -0
- hexdag/cli/commands/plugins_cmd.py +441 -0
- hexdag/cli/commands/studio_cmd.py +101 -0
- hexdag/cli/commands/validate_cmd.py +221 -0
- hexdag/cli/main.py +84 -0
- hexdag/core/__init__.py +83 -0
- hexdag/core/config/__init__.py +20 -0
- hexdag/core/config/loader.py +479 -0
- hexdag/core/config/models.py +150 -0
- hexdag/core/configurable.py +294 -0
- hexdag/core/context/__init__.py +37 -0
- hexdag/core/context/execution_context.py +378 -0
- hexdag/core/docs/__init__.py +26 -0
- hexdag/core/docs/extractors.py +678 -0
- hexdag/core/docs/generators.py +890 -0
- hexdag/core/docs/models.py +120 -0
- hexdag/core/domain/__init__.py +10 -0
- hexdag/core/domain/dag.py +1225 -0
- hexdag/core/exceptions.py +234 -0
- hexdag/core/expression_parser.py +569 -0
- hexdag/core/logging.py +449 -0
- hexdag/core/models/__init__.py +17 -0
- hexdag/core/models/base.py +138 -0
- hexdag/core/orchestration/__init__.py +46 -0
- hexdag/core/orchestration/body_executor.py +481 -0
- hexdag/core/orchestration/components/__init__.py +97 -0
- hexdag/core/orchestration/components/adapter_lifecycle_manager.py +113 -0
- hexdag/core/orchestration/components/checkpoint_manager.py +134 -0
- hexdag/core/orchestration/components/execution_coordinator.py +360 -0
- hexdag/core/orchestration/components/health_check_manager.py +176 -0
- hexdag/core/orchestration/components/input_mapper.py +143 -0
- hexdag/core/orchestration/components/lifecycle_manager.py +583 -0
- hexdag/core/orchestration/components/node_executor.py +377 -0
- hexdag/core/orchestration/components/secret_manager.py +202 -0
- hexdag/core/orchestration/components/wave_executor.py +158 -0
- hexdag/core/orchestration/constants.py +17 -0
- hexdag/core/orchestration/events/README.md +312 -0
- hexdag/core/orchestration/events/__init__.py +104 -0
- hexdag/core/orchestration/events/batching.py +330 -0
- hexdag/core/orchestration/events/decorators.py +139 -0
- hexdag/core/orchestration/events/events.py +573 -0
- hexdag/core/orchestration/events/observers/__init__.py +30 -0
- hexdag/core/orchestration/events/observers/core_observers.py +690 -0
- hexdag/core/orchestration/events/observers/models.py +111 -0
- hexdag/core/orchestration/events/taxonomy.py +269 -0
- hexdag/core/orchestration/hook_context.py +237 -0
- hexdag/core/orchestration/hooks.py +437 -0
- hexdag/core/orchestration/models.py +418 -0
- hexdag/core/orchestration/orchestrator.py +910 -0
- hexdag/core/orchestration/orchestrator_factory.py +275 -0
- hexdag/core/orchestration/port_wrappers.py +327 -0
- hexdag/core/orchestration/prompt/__init__.py +32 -0
- hexdag/core/orchestration/prompt/template.py +332 -0
- hexdag/core/pipeline_builder/__init__.py +21 -0
- hexdag/core/pipeline_builder/component_instantiator.py +386 -0
- hexdag/core/pipeline_builder/include_tag.py +265 -0
- hexdag/core/pipeline_builder/pipeline_config.py +133 -0
- hexdag/core/pipeline_builder/py_tag.py +223 -0
- hexdag/core/pipeline_builder/tag_discovery.py +268 -0
- hexdag/core/pipeline_builder/yaml_builder.py +1196 -0
- hexdag/core/pipeline_builder/yaml_validator.py +569 -0
- hexdag/core/ports/__init__.py +65 -0
- hexdag/core/ports/api_call.py +133 -0
- hexdag/core/ports/database.py +489 -0
- hexdag/core/ports/embedding.py +215 -0
- hexdag/core/ports/executor.py +237 -0
- hexdag/core/ports/file_storage.py +117 -0
- hexdag/core/ports/healthcheck.py +87 -0
- hexdag/core/ports/llm.py +551 -0
- hexdag/core/ports/memory.py +70 -0
- hexdag/core/ports/observer_manager.py +130 -0
- hexdag/core/ports/secret.py +145 -0
- hexdag/core/ports/tool_router.py +94 -0
- hexdag/core/ports_builder.py +623 -0
- hexdag/core/protocols.py +273 -0
- hexdag/core/resolver.py +304 -0
- hexdag/core/schema/__init__.py +9 -0
- hexdag/core/schema/generator.py +742 -0
- hexdag/core/secrets.py +242 -0
- hexdag/core/types.py +413 -0
- hexdag/core/utils/async_warnings.py +206 -0
- hexdag/core/utils/schema_conversion.py +78 -0
- hexdag/core/utils/sql_validation.py +86 -0
- hexdag/core/validation/secure_json.py +148 -0
- hexdag/core/yaml_macro.py +517 -0
- hexdag/mcp_server.py +3120 -0
- hexdag/studio/__init__.py +10 -0
- hexdag/studio/build_ui.py +92 -0
- hexdag/studio/server/__init__.py +1 -0
- hexdag/studio/server/main.py +100 -0
- hexdag/studio/server/routes/__init__.py +9 -0
- hexdag/studio/server/routes/execute.py +208 -0
- hexdag/studio/server/routes/export.py +558 -0
- hexdag/studio/server/routes/files.py +207 -0
- hexdag/studio/server/routes/plugins.py +419 -0
- hexdag/studio/server/routes/validate.py +220 -0
- hexdag/studio/ui/index.html +13 -0
- hexdag/studio/ui/package-lock.json +2992 -0
- hexdag/studio/ui/package.json +31 -0
- hexdag/studio/ui/postcss.config.js +6 -0
- hexdag/studio/ui/public/hexdag.svg +5 -0
- hexdag/studio/ui/src/App.tsx +251 -0
- hexdag/studio/ui/src/components/Canvas.tsx +408 -0
- hexdag/studio/ui/src/components/ContextMenu.tsx +187 -0
- hexdag/studio/ui/src/components/FileBrowser.tsx +123 -0
- hexdag/studio/ui/src/components/Header.tsx +181 -0
- hexdag/studio/ui/src/components/HexdagNode.tsx +193 -0
- hexdag/studio/ui/src/components/NodeInspector.tsx +512 -0
- hexdag/studio/ui/src/components/NodePalette.tsx +262 -0
- hexdag/studio/ui/src/components/NodePortsSection.tsx +403 -0
- hexdag/studio/ui/src/components/PluginManager.tsx +347 -0
- hexdag/studio/ui/src/components/PortsEditor.tsx +481 -0
- hexdag/studio/ui/src/components/PythonEditor.tsx +195 -0
- hexdag/studio/ui/src/components/ValidationPanel.tsx +105 -0
- hexdag/studio/ui/src/components/YamlEditor.tsx +196 -0
- hexdag/studio/ui/src/components/index.ts +8 -0
- hexdag/studio/ui/src/index.css +92 -0
- hexdag/studio/ui/src/main.tsx +10 -0
- hexdag/studio/ui/src/types/index.ts +123 -0
- hexdag/studio/ui/src/vite-env.d.ts +1 -0
- hexdag/studio/ui/tailwind.config.js +29 -0
- hexdag/studio/ui/tsconfig.json +37 -0
- hexdag/studio/ui/tsconfig.node.json +13 -0
- hexdag/studio/ui/vite.config.ts +35 -0
- hexdag/visualization/__init__.py +69 -0
- hexdag/visualization/dag_visualizer.py +1020 -0
- hexdag-0.5.0.dev1.dist-info/METADATA +369 -0
- hexdag-0.5.0.dev1.dist-info/RECORD +261 -0
- hexdag-0.5.0.dev1.dist-info/WHEEL +4 -0
- hexdag-0.5.0.dev1.dist-info/entry_points.txt +4 -0
- hexdag-0.5.0.dev1.dist-info/licenses/LICENSE +190 -0
- hexdag_plugins/.gitignore +43 -0
- hexdag_plugins/README.md +73 -0
- hexdag_plugins/__init__.py +1 -0
- hexdag_plugins/azure/LICENSE +21 -0
- hexdag_plugins/azure/README.md +414 -0
- hexdag_plugins/azure/__init__.py +21 -0
- hexdag_plugins/azure/azure_blob_adapter.py +450 -0
- hexdag_plugins/azure/azure_cosmos_adapter.py +383 -0
- hexdag_plugins/azure/azure_keyvault_adapter.py +314 -0
- hexdag_plugins/azure/azure_openai_adapter.py +415 -0
- hexdag_plugins/azure/pyproject.toml +107 -0
- hexdag_plugins/azure/tests/__init__.py +1 -0
- hexdag_plugins/azure/tests/test_azure_blob_adapter.py +350 -0
- hexdag_plugins/azure/tests/test_azure_cosmos_adapter.py +323 -0
- hexdag_plugins/azure/tests/test_azure_keyvault_adapter.py +330 -0
- hexdag_plugins/azure/tests/test_azure_openai_adapter.py +329 -0
- hexdag_plugins/hexdag_etl/README.md +168 -0
- hexdag_plugins/hexdag_etl/__init__.py +53 -0
- hexdag_plugins/hexdag_etl/examples/01_simple_pandas_transform.py +270 -0
- hexdag_plugins/hexdag_etl/examples/02_simple_pandas_only.py +149 -0
- hexdag_plugins/hexdag_etl/examples/03_file_io_pipeline.py +109 -0
- hexdag_plugins/hexdag_etl/examples/test_pandas_transform.py +84 -0
- hexdag_plugins/hexdag_etl/hexdag.toml +25 -0
- hexdag_plugins/hexdag_etl/hexdag_etl/__init__.py +48 -0
- hexdag_plugins/hexdag_etl/hexdag_etl/nodes/__init__.py +13 -0
- hexdag_plugins/hexdag_etl/hexdag_etl/nodes/api_extract.py +230 -0
- hexdag_plugins/hexdag_etl/hexdag_etl/nodes/base_node_factory.py +181 -0
- hexdag_plugins/hexdag_etl/hexdag_etl/nodes/file_io.py +415 -0
- hexdag_plugins/hexdag_etl/hexdag_etl/nodes/outlook.py +492 -0
- hexdag_plugins/hexdag_etl/hexdag_etl/nodes/pandas_transform.py +563 -0
- hexdag_plugins/hexdag_etl/hexdag_etl/nodes/sql_extract_load.py +112 -0
- hexdag_plugins/hexdag_etl/pyproject.toml +82 -0
- hexdag_plugins/hexdag_etl/test_transform.py +54 -0
- hexdag_plugins/hexdag_etl/tests/test_plugin_integration.py +62 -0
- hexdag_plugins/mysql_adapter/LICENSE +21 -0
- hexdag_plugins/mysql_adapter/README.md +224 -0
- hexdag_plugins/mysql_adapter/__init__.py +6 -0
- hexdag_plugins/mysql_adapter/mysql_adapter.py +408 -0
- hexdag_plugins/mysql_adapter/pyproject.toml +93 -0
- hexdag_plugins/mysql_adapter/tests/test_mysql_adapter.py +259 -0
- hexdag_plugins/storage/README.md +184 -0
- hexdag_plugins/storage/__init__.py +19 -0
- hexdag_plugins/storage/file/__init__.py +5 -0
- hexdag_plugins/storage/file/local.py +325 -0
- hexdag_plugins/storage/ports/__init__.py +5 -0
- hexdag_plugins/storage/ports/vector_store.py +236 -0
- hexdag_plugins/storage/sql/__init__.py +7 -0
- hexdag_plugins/storage/sql/base.py +187 -0
- hexdag_plugins/storage/sql/mysql.py +27 -0
- hexdag_plugins/storage/sql/postgresql.py +27 -0
- hexdag_plugins/storage/tests/__init__.py +1 -0
- hexdag_plugins/storage/tests/test_local_file_storage.py +161 -0
- hexdag_plugins/storage/tests/test_sql_adapters.py +212 -0
- hexdag_plugins/storage/vector/__init__.py +7 -0
- hexdag_plugins/storage/vector/chromadb.py +223 -0
- hexdag_plugins/storage/vector/in_memory.py +285 -0
- hexdag_plugins/storage/vector/pgvector.py +502 -0
|
@@ -0,0 +1,926 @@
|
|
|
1
|
+
"""CompositeNode - Unified control flow node for hexDAG.
|
|
2
|
+
|
|
3
|
+
This module provides a single unified node for all control flow patterns:
|
|
4
|
+
- while: Condition-based loop
|
|
5
|
+
- for-each: Collection iteration
|
|
6
|
+
- times: Fixed count iteration
|
|
7
|
+
- if-else: Single condition branch
|
|
8
|
+
- switch: Multi-branch conditions
|
|
9
|
+
|
|
10
|
+
All modes support two execution patterns:
|
|
11
|
+
- Inline body: When `body` or `body_pipeline` specified → execute within node
|
|
12
|
+
- Yield to downstream: When no body → yield state to downstream nodes
|
|
13
|
+
|
|
14
|
+
Examples
|
|
15
|
+
--------
|
|
16
|
+
YAML usage - while loop with inline body::
|
|
17
|
+
|
|
18
|
+
- kind: composite_node
|
|
19
|
+
metadata:
|
|
20
|
+
name: retry_loop
|
|
21
|
+
spec:
|
|
22
|
+
mode: while
|
|
23
|
+
condition: "state.attempts < 3 and not state.success"
|
|
24
|
+
initial_state:
|
|
25
|
+
attempts: 0
|
|
26
|
+
success: false
|
|
27
|
+
body: "myapp.attempt_operation"
|
|
28
|
+
collect: last
|
|
29
|
+
|
|
30
|
+
YAML usage - for-each with inline nodes::
|
|
31
|
+
|
|
32
|
+
- kind: composite_node
|
|
33
|
+
metadata:
|
|
34
|
+
name: process_items
|
|
35
|
+
spec:
|
|
36
|
+
mode: for-each
|
|
37
|
+
items: "$input.items"
|
|
38
|
+
concurrency: 5
|
|
39
|
+
body:
|
|
40
|
+
- kind: expression_node
|
|
41
|
+
spec:
|
|
42
|
+
expressions:
|
|
43
|
+
result: "$item * 2"
|
|
44
|
+
|
|
45
|
+
YAML usage - switch for routing (no body)::
|
|
46
|
+
|
|
47
|
+
- kind: composite_node
|
|
48
|
+
metadata:
|
|
49
|
+
name: router
|
|
50
|
+
spec:
|
|
51
|
+
mode: switch
|
|
52
|
+
branches:
|
|
53
|
+
- condition: "status == 'urgent'"
|
|
54
|
+
action: "urgent_path"
|
|
55
|
+
else_action: "default_path"
|
|
56
|
+
"""
|
|
57
|
+
|
|
58
|
+
import asyncio
|
|
59
|
+
import time
|
|
60
|
+
from collections.abc import Callable
|
|
61
|
+
from pathlib import Path
|
|
62
|
+
from typing import Any, Literal
|
|
63
|
+
|
|
64
|
+
from hexdag.builtin.nodes.base_node_factory import BaseNodeFactory
|
|
65
|
+
from hexdag.core.domain.dag import NodeSpec
|
|
66
|
+
from hexdag.core.expression_parser import compile_expression, evaluate_expression
|
|
67
|
+
from hexdag.core.logging import get_logger
|
|
68
|
+
from hexdag.core.orchestration.body_executor import BodyExecutor
|
|
69
|
+
from hexdag.core.orchestration.models import NodeExecutionContext
|
|
70
|
+
|
|
71
|
+
logger = get_logger(__name__)
|
|
72
|
+
|
|
73
|
+
# Type aliases
|
|
74
|
+
Mode = Literal["while", "for-each", "times", "if-else", "switch"]
|
|
75
|
+
CollectMode = Literal["list", "last", "first", "dict", "reduce"]
|
|
76
|
+
ErrorHandling = Literal["fail_fast", "continue", "collect"]
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
class CompositeNode(BaseNodeFactory):
|
|
80
|
+
"""Unified control flow node supporting while, for-each, times, if-else, switch.
|
|
81
|
+
|
|
82
|
+
All modes support two execution patterns:
|
|
83
|
+
- Inline body: When body/body_pipeline specified → execute within node
|
|
84
|
+
- Yield to downstream: When no body → yield state to dependent nodes
|
|
85
|
+
|
|
86
|
+
The YAML schema for this node is auto-generated from the ``__call__`` signature
|
|
87
|
+
and docstrings using ``SchemaGenerator``.
|
|
88
|
+
|
|
89
|
+
See Also
|
|
90
|
+
--------
|
|
91
|
+
LoopNode : Deprecated, use CompositeNode with mode='while'
|
|
92
|
+
ConditionalNode : Deprecated, use CompositeNode with mode='switch'
|
|
93
|
+
"""
|
|
94
|
+
|
|
95
|
+
# Schema is auto-generated from __call__ signature by SchemaGenerator
|
|
96
|
+
|
|
97
|
+
def __init__(self, base_path: Path | None = None) -> None:
|
|
98
|
+
"""Initialize CompositeNode factory.
|
|
99
|
+
|
|
100
|
+
Parameters
|
|
101
|
+
----------
|
|
102
|
+
base_path : Path | None
|
|
103
|
+
Base directory for resolving pipeline references
|
|
104
|
+
"""
|
|
105
|
+
super().__init__()
|
|
106
|
+
self.base_path = base_path or Path.cwd()
|
|
107
|
+
|
|
108
|
+
def __call__(
|
|
109
|
+
self,
|
|
110
|
+
name: str,
|
|
111
|
+
mode: Mode,
|
|
112
|
+
# Body specification (optional - if omitted, yields to downstream)
|
|
113
|
+
body: str | list[dict[str, Any]] | Callable[..., Any] | None = None,
|
|
114
|
+
body_pipeline: str | None = None,
|
|
115
|
+
# Mode-specific params
|
|
116
|
+
condition: str | None = None,
|
|
117
|
+
items: str | None = None,
|
|
118
|
+
item_var: str = "item",
|
|
119
|
+
index_var: str = "index",
|
|
120
|
+
count: int | None = None,
|
|
121
|
+
branches: list[dict[str, Any]] | None = None,
|
|
122
|
+
else_body: str | list[dict[str, Any]] | None = None,
|
|
123
|
+
else_action: str | None = None,
|
|
124
|
+
# Loop state management
|
|
125
|
+
initial_state: dict[str, Any] | None = None,
|
|
126
|
+
state_update: dict[str, str] | None = None,
|
|
127
|
+
max_iterations: int = 100,
|
|
128
|
+
# Concurrency (for-each, times only)
|
|
129
|
+
concurrency: int = 1,
|
|
130
|
+
# Result collection
|
|
131
|
+
collect: CollectMode = "list",
|
|
132
|
+
key_field: str | None = None,
|
|
133
|
+
reducer: str | None = None,
|
|
134
|
+
# Error handling
|
|
135
|
+
error_handling: ErrorHandling = "fail_fast",
|
|
136
|
+
# Sub-orchestrator configuration (for inline nodes and pipelines)
|
|
137
|
+
max_concurrent_nodes: int = 10,
|
|
138
|
+
strict_validation: bool = False,
|
|
139
|
+
default_node_timeout: float | None = None,
|
|
140
|
+
# Dependencies
|
|
141
|
+
deps: list[str] | None = None,
|
|
142
|
+
input_mapping: dict[str, str] | None = None,
|
|
143
|
+
**kwargs: Any,
|
|
144
|
+
) -> NodeSpec:
|
|
145
|
+
"""Create a CompositeNode NodeSpec.
|
|
146
|
+
|
|
147
|
+
Parameters
|
|
148
|
+
----------
|
|
149
|
+
name : str
|
|
150
|
+
Node name (unique identifier in the pipeline)
|
|
151
|
+
mode : Mode
|
|
152
|
+
Control flow mode: while, for-each, times, if-else, switch
|
|
153
|
+
body : str | list[dict] | Callable | None
|
|
154
|
+
Body to execute. Can be:
|
|
155
|
+
- Module path string (e.g., "myapp.process")
|
|
156
|
+
- Callable (compiled from !py tag)
|
|
157
|
+
- List of node configs (inline nodes/sub-DAG)
|
|
158
|
+
- None for yield-to-downstream pattern
|
|
159
|
+
body_pipeline : str | None
|
|
160
|
+
Path to external pipeline YAML file
|
|
161
|
+
condition : str | None
|
|
162
|
+
Condition expression for while, if-else, or switch branches
|
|
163
|
+
items : str | None
|
|
164
|
+
Expression resolving to iterable for for-each mode
|
|
165
|
+
item_var : str
|
|
166
|
+
Variable name for current item (default: "item")
|
|
167
|
+
index_var : str
|
|
168
|
+
Variable name for current index (default: "index")
|
|
169
|
+
count : int | None
|
|
170
|
+
Number of iterations for times mode
|
|
171
|
+
branches : list[dict] | None
|
|
172
|
+
List of condition branches for switch mode
|
|
173
|
+
else_body : str | list[dict] | None
|
|
174
|
+
Body for else branch (if-else, switch with inline execution)
|
|
175
|
+
else_action : str | None
|
|
176
|
+
Action label for else branch (switch routing mode)
|
|
177
|
+
initial_state : dict | None
|
|
178
|
+
Initial state dict for while mode
|
|
179
|
+
state_update : dict[str, str] | None
|
|
180
|
+
State update expressions for while mode
|
|
181
|
+
max_iterations : int
|
|
182
|
+
Safety limit for while loops (default: 100)
|
|
183
|
+
concurrency : int
|
|
184
|
+
Max concurrent iterations for for-each/times (default: 1)
|
|
185
|
+
collect : CollectMode
|
|
186
|
+
Result collection mode (default: "list")
|
|
187
|
+
key_field : str | None
|
|
188
|
+
Field to use as key for dict collection
|
|
189
|
+
reducer : str | None
|
|
190
|
+
Module path to reducer function for reduce collection
|
|
191
|
+
error_handling : ErrorHandling
|
|
192
|
+
Error handling strategy (default: "fail_fast")
|
|
193
|
+
deps : list[str] | None
|
|
194
|
+
Dependency node names
|
|
195
|
+
input_mapping : dict[str, str] | None
|
|
196
|
+
Field extraction mapping for orchestrator
|
|
197
|
+
**kwargs : Any
|
|
198
|
+
Additional parameters passed to NodeSpec
|
|
199
|
+
|
|
200
|
+
Returns
|
|
201
|
+
-------
|
|
202
|
+
NodeSpec
|
|
203
|
+
Configured node specification ready for execution
|
|
204
|
+
"""
|
|
205
|
+
# Store input_mapping in params for orchestrator
|
|
206
|
+
if input_mapping is not None:
|
|
207
|
+
kwargs["input_mapping"] = input_mapping
|
|
208
|
+
|
|
209
|
+
# Validate mode-specific requirements
|
|
210
|
+
self._validate_mode_params(
|
|
211
|
+
mode=mode,
|
|
212
|
+
condition=condition,
|
|
213
|
+
items=items,
|
|
214
|
+
count=count,
|
|
215
|
+
branches=branches,
|
|
216
|
+
body=body,
|
|
217
|
+
body_pipeline=body_pipeline,
|
|
218
|
+
)
|
|
219
|
+
|
|
220
|
+
# Compile conditions
|
|
221
|
+
compiled_condition = compile_expression(condition) if condition else None
|
|
222
|
+
compiled_branches = self._compile_branches(branches) if branches else None
|
|
223
|
+
compiled_state_update = (
|
|
224
|
+
{k: compile_expression(v) for k, v in state_update.items()} if state_update else None
|
|
225
|
+
)
|
|
226
|
+
|
|
227
|
+
# Capture for closure
|
|
228
|
+
_mode = mode
|
|
229
|
+
_body = body
|
|
230
|
+
_body_pipeline = body_pipeline
|
|
231
|
+
_condition = compiled_condition
|
|
232
|
+
_items = items
|
|
233
|
+
_item_var = item_var
|
|
234
|
+
_index_var = index_var
|
|
235
|
+
_count = count
|
|
236
|
+
_branches = compiled_branches
|
|
237
|
+
_else_body = else_body
|
|
238
|
+
_else_action = else_action
|
|
239
|
+
_initial_state = initial_state or {}
|
|
240
|
+
_state_update = compiled_state_update
|
|
241
|
+
_max_iterations = max_iterations
|
|
242
|
+
_concurrency = concurrency
|
|
243
|
+
_collect = collect
|
|
244
|
+
_key_field = key_field
|
|
245
|
+
_reducer = reducer
|
|
246
|
+
_error_handling = error_handling
|
|
247
|
+
_base_path = self.base_path
|
|
248
|
+
_max_concurrent_nodes = max_concurrent_nodes
|
|
249
|
+
_strict_validation = strict_validation
|
|
250
|
+
_default_node_timeout = default_node_timeout
|
|
251
|
+
|
|
252
|
+
async def composite_fn(input_data: Any, **ports: Any) -> dict[str, Any]:
|
|
253
|
+
"""Execute control flow logic based on mode."""
|
|
254
|
+
node_logger = logger.bind(node=name, node_type="composite_node", mode=_mode)
|
|
255
|
+
start_time = time.perf_counter()
|
|
256
|
+
|
|
257
|
+
# Normalize input
|
|
258
|
+
if hasattr(input_data, "model_dump"):
|
|
259
|
+
data = input_data.model_dump()
|
|
260
|
+
elif isinstance(input_data, dict):
|
|
261
|
+
data = dict(input_data)
|
|
262
|
+
else:
|
|
263
|
+
data = {"input": input_data}
|
|
264
|
+
|
|
265
|
+
# Create execution context
|
|
266
|
+
context = NodeExecutionContext(dag_id=name, node_id=name)
|
|
267
|
+
|
|
268
|
+
# Create body executor with orchestrator configuration
|
|
269
|
+
executor = BodyExecutor(
|
|
270
|
+
base_path=_base_path,
|
|
271
|
+
max_concurrent_nodes=_max_concurrent_nodes,
|
|
272
|
+
strict_validation=_strict_validation,
|
|
273
|
+
default_node_timeout=_default_node_timeout,
|
|
274
|
+
)
|
|
275
|
+
|
|
276
|
+
# Check if we have a body to execute (inline mode) or yield to downstream
|
|
277
|
+
has_body = _body is not None or _body_pipeline is not None
|
|
278
|
+
|
|
279
|
+
node_logger.info(
|
|
280
|
+
"Starting control flow",
|
|
281
|
+
has_body=has_body,
|
|
282
|
+
collect=_collect,
|
|
283
|
+
)
|
|
284
|
+
|
|
285
|
+
# Dispatch to mode handler
|
|
286
|
+
match _mode:
|
|
287
|
+
case "while":
|
|
288
|
+
result = await _execute_while(
|
|
289
|
+
data=data,
|
|
290
|
+
condition=_condition,
|
|
291
|
+
initial_state=_initial_state,
|
|
292
|
+
state_update=_state_update,
|
|
293
|
+
max_iterations=_max_iterations,
|
|
294
|
+
body=_body,
|
|
295
|
+
body_pipeline=_body_pipeline,
|
|
296
|
+
executor=executor,
|
|
297
|
+
context=context,
|
|
298
|
+
ports=ports,
|
|
299
|
+
collect=_collect,
|
|
300
|
+
key_field=_key_field,
|
|
301
|
+
error_handling=_error_handling,
|
|
302
|
+
node_logger=node_logger,
|
|
303
|
+
)
|
|
304
|
+
case "for-each":
|
|
305
|
+
result = await _execute_foreach(
|
|
306
|
+
data=data,
|
|
307
|
+
items_expr=_items,
|
|
308
|
+
item_var=_item_var,
|
|
309
|
+
index_var=_index_var,
|
|
310
|
+
concurrency=_concurrency,
|
|
311
|
+
body=_body,
|
|
312
|
+
body_pipeline=_body_pipeline,
|
|
313
|
+
executor=executor,
|
|
314
|
+
context=context,
|
|
315
|
+
ports=ports,
|
|
316
|
+
collect=_collect,
|
|
317
|
+
key_field=_key_field,
|
|
318
|
+
error_handling=_error_handling,
|
|
319
|
+
node_logger=node_logger,
|
|
320
|
+
)
|
|
321
|
+
case "times":
|
|
322
|
+
result = await _execute_times(
|
|
323
|
+
data=data,
|
|
324
|
+
count=_count or 0,
|
|
325
|
+
index_var=_index_var,
|
|
326
|
+
concurrency=_concurrency,
|
|
327
|
+
body=_body,
|
|
328
|
+
body_pipeline=_body_pipeline,
|
|
329
|
+
executor=executor,
|
|
330
|
+
context=context,
|
|
331
|
+
ports=ports,
|
|
332
|
+
collect=_collect,
|
|
333
|
+
key_field=_key_field,
|
|
334
|
+
error_handling=_error_handling,
|
|
335
|
+
node_logger=node_logger,
|
|
336
|
+
)
|
|
337
|
+
case "if-else":
|
|
338
|
+
result = await _execute_if_else(
|
|
339
|
+
data=data,
|
|
340
|
+
condition=_condition,
|
|
341
|
+
body=_body,
|
|
342
|
+
body_pipeline=_body_pipeline,
|
|
343
|
+
else_body=_else_body,
|
|
344
|
+
executor=executor,
|
|
345
|
+
context=context,
|
|
346
|
+
ports=ports,
|
|
347
|
+
node_logger=node_logger,
|
|
348
|
+
)
|
|
349
|
+
case "switch":
|
|
350
|
+
result = await _execute_switch(
|
|
351
|
+
data=data,
|
|
352
|
+
branches=_branches,
|
|
353
|
+
else_body=_else_body,
|
|
354
|
+
else_action=_else_action,
|
|
355
|
+
executor=executor,
|
|
356
|
+
context=context,
|
|
357
|
+
ports=ports,
|
|
358
|
+
node_logger=node_logger,
|
|
359
|
+
)
|
|
360
|
+
case _:
|
|
361
|
+
raise ValueError(f"Unknown mode: {_mode}")
|
|
362
|
+
|
|
363
|
+
duration_ms = (time.perf_counter() - start_time) * 1000
|
|
364
|
+
result["metadata"]["duration_ms"] = duration_ms
|
|
365
|
+
|
|
366
|
+
node_logger.info(
|
|
367
|
+
"Control flow completed",
|
|
368
|
+
duration_ms=f"{duration_ms:.2f}",
|
|
369
|
+
stopped_by=result["metadata"].get("stopped_by"),
|
|
370
|
+
)
|
|
371
|
+
|
|
372
|
+
return result
|
|
373
|
+
|
|
374
|
+
# Preserve function metadata
|
|
375
|
+
composite_fn.__name__ = f"composite_{name}"
|
|
376
|
+
composite_fn.__doc__ = f"Composite node: {name} (mode={mode})"
|
|
377
|
+
|
|
378
|
+
# Extract framework-level parameters
|
|
379
|
+
framework = self.extract_framework_params(kwargs)
|
|
380
|
+
|
|
381
|
+
return NodeSpec(
|
|
382
|
+
name=name,
|
|
383
|
+
fn=composite_fn,
|
|
384
|
+
in_model=None,
|
|
385
|
+
out_model=None,
|
|
386
|
+
deps=frozenset(deps or []),
|
|
387
|
+
params=kwargs,
|
|
388
|
+
timeout=framework["timeout"],
|
|
389
|
+
max_retries=framework["max_retries"],
|
|
390
|
+
when=framework["when"],
|
|
391
|
+
)
|
|
392
|
+
|
|
393
|
+
def _validate_mode_params(
|
|
394
|
+
self,
|
|
395
|
+
mode: Mode,
|
|
396
|
+
condition: str | None,
|
|
397
|
+
items: str | None,
|
|
398
|
+
count: int | None,
|
|
399
|
+
branches: list[dict[str, Any]] | None,
|
|
400
|
+
body: Any,
|
|
401
|
+
body_pipeline: str | None,
|
|
402
|
+
) -> None:
|
|
403
|
+
"""Validate mode-specific parameter requirements."""
|
|
404
|
+
match mode:
|
|
405
|
+
case "while":
|
|
406
|
+
if not condition:
|
|
407
|
+
raise ValueError("while mode requires 'condition' parameter")
|
|
408
|
+
case "for-each":
|
|
409
|
+
if not items:
|
|
410
|
+
raise ValueError("for-each mode requires 'items' parameter")
|
|
411
|
+
case "times":
|
|
412
|
+
if count is None or count < 0:
|
|
413
|
+
raise ValueError("times mode requires positive 'count' parameter")
|
|
414
|
+
case "if-else":
|
|
415
|
+
if not condition:
|
|
416
|
+
raise ValueError("if-else mode requires 'condition' parameter")
|
|
417
|
+
case "switch":
|
|
418
|
+
if not branches:
|
|
419
|
+
raise ValueError("switch mode requires 'branches' parameter")
|
|
420
|
+
case _:
|
|
421
|
+
raise ValueError(f"Unknown mode: {mode}")
|
|
422
|
+
|
|
423
|
+
# Validate body/body_pipeline mutual exclusivity
|
|
424
|
+
if body is not None and body_pipeline is not None:
|
|
425
|
+
raise ValueError("Cannot specify both 'body' and 'body_pipeline'")
|
|
426
|
+
|
|
427
|
+
def _compile_branches(self, branches: list[dict[str, Any]]) -> list[dict[str, Any]]:
|
|
428
|
+
"""Compile branch conditions."""
|
|
429
|
+
compiled = []
|
|
430
|
+
for branch in branches:
|
|
431
|
+
condition = branch.get("condition")
|
|
432
|
+
if not condition:
|
|
433
|
+
raise ValueError("Each branch must have a 'condition' field")
|
|
434
|
+
|
|
435
|
+
compiled_branch = {
|
|
436
|
+
"condition_fn": compile_expression(condition),
|
|
437
|
+
"condition_str": condition,
|
|
438
|
+
"body": branch.get("body"),
|
|
439
|
+
"action": branch.get("action"),
|
|
440
|
+
}
|
|
441
|
+
compiled.append(compiled_branch)
|
|
442
|
+
return compiled
|
|
443
|
+
|
|
444
|
+
|
|
445
|
+
# =============================================================================
|
|
446
|
+
# Mode Execution Functions
|
|
447
|
+
# =============================================================================
|
|
448
|
+
|
|
449
|
+
|
|
450
|
+
async def _execute_while(
|
|
451
|
+
data: dict[str, Any],
|
|
452
|
+
condition: Callable[[dict[str, Any], dict[str, Any]], bool] | None,
|
|
453
|
+
initial_state: dict[str, Any],
|
|
454
|
+
state_update: dict[str, Callable[..., Any]] | None,
|
|
455
|
+
max_iterations: int,
|
|
456
|
+
body: str | list[dict[str, Any]] | Callable[..., Any] | None,
|
|
457
|
+
body_pipeline: str | None,
|
|
458
|
+
executor: BodyExecutor,
|
|
459
|
+
context: NodeExecutionContext,
|
|
460
|
+
ports: dict[str, Any],
|
|
461
|
+
collect: CollectMode,
|
|
462
|
+
key_field: str | None,
|
|
463
|
+
error_handling: ErrorHandling,
|
|
464
|
+
node_logger: Any,
|
|
465
|
+
) -> dict[str, Any]:
|
|
466
|
+
"""Execute while loop mode."""
|
|
467
|
+
state = dict(initial_state)
|
|
468
|
+
results: list[Any] = []
|
|
469
|
+
errors: list[dict[str, Any]] = []
|
|
470
|
+
iteration = 0
|
|
471
|
+
stopped_by = "exhausted"
|
|
472
|
+
has_body = body is not None or body_pipeline is not None
|
|
473
|
+
|
|
474
|
+
while iteration < max_iterations:
|
|
475
|
+
# Check condition
|
|
476
|
+
if condition:
|
|
477
|
+
try:
|
|
478
|
+
should_continue = condition(data, state)
|
|
479
|
+
except Exception as e:
|
|
480
|
+
node_logger.warning("Condition evaluation failed", error=str(e))
|
|
481
|
+
stopped_by = "condition_error"
|
|
482
|
+
break
|
|
483
|
+
|
|
484
|
+
if not should_continue:
|
|
485
|
+
stopped_by = "condition"
|
|
486
|
+
break
|
|
487
|
+
|
|
488
|
+
node_logger.debug("While iteration", iteration=iteration, state_keys=list(state.keys()))
|
|
489
|
+
|
|
490
|
+
if has_body:
|
|
491
|
+
# Execute body
|
|
492
|
+
iteration_context = {
|
|
493
|
+
"$index": iteration,
|
|
494
|
+
"$item": None,
|
|
495
|
+
"state": state,
|
|
496
|
+
"iteration": iteration,
|
|
497
|
+
}
|
|
498
|
+
|
|
499
|
+
result: Any = None
|
|
500
|
+
try:
|
|
501
|
+
result = await executor.execute(
|
|
502
|
+
body=body,
|
|
503
|
+
body_pipeline=body_pipeline,
|
|
504
|
+
input_data=data,
|
|
505
|
+
context=context,
|
|
506
|
+
ports=ports,
|
|
507
|
+
iteration_context=iteration_context,
|
|
508
|
+
)
|
|
509
|
+
results.append(result)
|
|
510
|
+
except Exception as e:
|
|
511
|
+
if error_handling == "fail_fast":
|
|
512
|
+
raise
|
|
513
|
+
errors.append({"iteration": iteration, "error": str(e)})
|
|
514
|
+
if error_handling == "continue":
|
|
515
|
+
results.append(None)
|
|
516
|
+
|
|
517
|
+
# Update state
|
|
518
|
+
if state_update:
|
|
519
|
+
for key, update_fn in state_update.items():
|
|
520
|
+
try:
|
|
521
|
+
# Build context for state update (includes body result)
|
|
522
|
+
update_context = {**data, "state": state, "$body": result}
|
|
523
|
+
state[key] = update_fn(update_context, state)
|
|
524
|
+
except Exception as e:
|
|
525
|
+
node_logger.warning(f"State update for '{key}' failed: {e}")
|
|
526
|
+
else:
|
|
527
|
+
# Yield to downstream - just track iteration
|
|
528
|
+
results.append({
|
|
529
|
+
"$index": iteration,
|
|
530
|
+
"state": dict(state),
|
|
531
|
+
"iteration": iteration,
|
|
532
|
+
})
|
|
533
|
+
|
|
534
|
+
iteration += 1
|
|
535
|
+
state["iteration"] = iteration
|
|
536
|
+
|
|
537
|
+
if iteration >= max_iterations:
|
|
538
|
+
stopped_by = "limit"
|
|
539
|
+
|
|
540
|
+
return {
|
|
541
|
+
"result": _collect_results(results, collect, key_field),
|
|
542
|
+
"metadata": {
|
|
543
|
+
"mode": "while",
|
|
544
|
+
"iterations": iteration,
|
|
545
|
+
"stopped_by": stopped_by,
|
|
546
|
+
"final_state": state,
|
|
547
|
+
"successful": len(results) - len(errors),
|
|
548
|
+
"failed": len(errors),
|
|
549
|
+
"errors": errors if errors else None,
|
|
550
|
+
},
|
|
551
|
+
}
|
|
552
|
+
|
|
553
|
+
|
|
554
|
+
async def _execute_foreach(
|
|
555
|
+
data: dict[str, Any],
|
|
556
|
+
items_expr: str | None,
|
|
557
|
+
item_var: str,
|
|
558
|
+
index_var: str,
|
|
559
|
+
concurrency: int,
|
|
560
|
+
body: str | list[dict[str, Any]] | Callable[..., Any] | None,
|
|
561
|
+
body_pipeline: str | None,
|
|
562
|
+
executor: BodyExecutor,
|
|
563
|
+
context: NodeExecutionContext,
|
|
564
|
+
ports: dict[str, Any],
|
|
565
|
+
collect: CollectMode,
|
|
566
|
+
key_field: str | None,
|
|
567
|
+
error_handling: ErrorHandling,
|
|
568
|
+
node_logger: Any,
|
|
569
|
+
) -> dict[str, Any]:
|
|
570
|
+
"""Execute for-each loop mode."""
|
|
571
|
+
# Resolve items
|
|
572
|
+
items = evaluate_expression(items_expr, data, state={}) if items_expr else []
|
|
573
|
+
|
|
574
|
+
if not hasattr(items, "__iter__"):
|
|
575
|
+
raise ValueError(f"items expression must resolve to iterable, got {type(items)}")
|
|
576
|
+
|
|
577
|
+
items_list = list(items)
|
|
578
|
+
total = len(items_list)
|
|
579
|
+
has_body = body is not None or body_pipeline is not None
|
|
580
|
+
|
|
581
|
+
node_logger.info("For-each iteration", total=total, concurrency=concurrency)
|
|
582
|
+
|
|
583
|
+
results: list[Any] = []
|
|
584
|
+
errors: list[dict[str, Any]] = []
|
|
585
|
+
|
|
586
|
+
# Use semaphore for concurrency control
|
|
587
|
+
semaphore = asyncio.Semaphore(concurrency)
|
|
588
|
+
|
|
589
|
+
async def process_item(idx: int, item: Any) -> tuple[int, Any, Exception | None]:
|
|
590
|
+
async with semaphore:
|
|
591
|
+
iteration_context = {
|
|
592
|
+
"$item": item,
|
|
593
|
+
"$index": idx,
|
|
594
|
+
"$total": total,
|
|
595
|
+
"$is_first": idx == 0,
|
|
596
|
+
"$is_last": idx == total - 1,
|
|
597
|
+
item_var: item,
|
|
598
|
+
index_var: idx,
|
|
599
|
+
}
|
|
600
|
+
|
|
601
|
+
if has_body:
|
|
602
|
+
try:
|
|
603
|
+
result = await executor.execute(
|
|
604
|
+
body=body,
|
|
605
|
+
body_pipeline=body_pipeline,
|
|
606
|
+
input_data=data,
|
|
607
|
+
context=context,
|
|
608
|
+
ports=ports,
|
|
609
|
+
iteration_context=iteration_context,
|
|
610
|
+
)
|
|
611
|
+
return idx, result, None
|
|
612
|
+
except Exception as e:
|
|
613
|
+
return idx, None, e
|
|
614
|
+
else:
|
|
615
|
+
# Yield to downstream
|
|
616
|
+
return idx, iteration_context, None
|
|
617
|
+
|
|
618
|
+
# Execute all items
|
|
619
|
+
tasks = [process_item(i, item) for i, item in enumerate(items_list)]
|
|
620
|
+
task_results = await asyncio.gather(*tasks, return_exceptions=True)
|
|
621
|
+
|
|
622
|
+
# Process results in order
|
|
623
|
+
for task_result in task_results:
|
|
624
|
+
if isinstance(task_result, BaseException):
|
|
625
|
+
if error_handling == "fail_fast":
|
|
626
|
+
raise task_result
|
|
627
|
+
errors.append({"error": str(task_result)})
|
|
628
|
+
results.append(None)
|
|
629
|
+
else:
|
|
630
|
+
# task_result is tuple[int, Any, Exception | None]
|
|
631
|
+
idx, result, error = task_result
|
|
632
|
+
if error:
|
|
633
|
+
if error_handling == "fail_fast":
|
|
634
|
+
raise error
|
|
635
|
+
errors.append({"index": idx, "error": str(error)})
|
|
636
|
+
results.append(None)
|
|
637
|
+
else:
|
|
638
|
+
results.append(result)
|
|
639
|
+
|
|
640
|
+
return {
|
|
641
|
+
"result": _collect_results(results, collect, key_field),
|
|
642
|
+
"metadata": {
|
|
643
|
+
"mode": "for-each",
|
|
644
|
+
"iterations": total,
|
|
645
|
+
"stopped_by": "exhausted",
|
|
646
|
+
"successful": total - len(errors),
|
|
647
|
+
"failed": len(errors),
|
|
648
|
+
"errors": errors if errors else None,
|
|
649
|
+
},
|
|
650
|
+
}
|
|
651
|
+
|
|
652
|
+
|
|
653
|
+
async def _execute_times(
|
|
654
|
+
data: dict[str, Any],
|
|
655
|
+
count: int,
|
|
656
|
+
index_var: str,
|
|
657
|
+
concurrency: int,
|
|
658
|
+
body: str | list[dict[str, Any]] | Callable[..., Any] | None,
|
|
659
|
+
body_pipeline: str | None,
|
|
660
|
+
executor: BodyExecutor,
|
|
661
|
+
context: NodeExecutionContext,
|
|
662
|
+
ports: dict[str, Any],
|
|
663
|
+
collect: CollectMode,
|
|
664
|
+
key_field: str | None,
|
|
665
|
+
error_handling: ErrorHandling,
|
|
666
|
+
node_logger: Any,
|
|
667
|
+
) -> dict[str, Any]:
|
|
668
|
+
"""Execute times loop mode."""
|
|
669
|
+
has_body = body is not None or body_pipeline is not None
|
|
670
|
+
|
|
671
|
+
node_logger.info("Times iteration", count=count, concurrency=concurrency)
|
|
672
|
+
|
|
673
|
+
results: list[Any] = []
|
|
674
|
+
errors: list[dict[str, Any]] = []
|
|
675
|
+
|
|
676
|
+
semaphore = asyncio.Semaphore(concurrency)
|
|
677
|
+
|
|
678
|
+
async def process_iteration(idx: int) -> tuple[int, Any, Exception | None]:
|
|
679
|
+
async with semaphore:
|
|
680
|
+
iteration_context = {
|
|
681
|
+
"$index": idx,
|
|
682
|
+
"$total": count,
|
|
683
|
+
"$is_first": idx == 0,
|
|
684
|
+
"$is_last": idx == count - 1,
|
|
685
|
+
index_var: idx,
|
|
686
|
+
}
|
|
687
|
+
|
|
688
|
+
if has_body:
|
|
689
|
+
try:
|
|
690
|
+
result = await executor.execute(
|
|
691
|
+
body=body,
|
|
692
|
+
body_pipeline=body_pipeline,
|
|
693
|
+
input_data=data,
|
|
694
|
+
context=context,
|
|
695
|
+
ports=ports,
|
|
696
|
+
iteration_context=iteration_context,
|
|
697
|
+
)
|
|
698
|
+
return idx, result, None
|
|
699
|
+
except Exception as e:
|
|
700
|
+
return idx, None, e
|
|
701
|
+
else:
|
|
702
|
+
return idx, iteration_context, None
|
|
703
|
+
|
|
704
|
+
tasks = [process_iteration(i) for i in range(count)]
|
|
705
|
+
task_results = await asyncio.gather(*tasks, return_exceptions=True)
|
|
706
|
+
|
|
707
|
+
for task_result in task_results:
|
|
708
|
+
if isinstance(task_result, BaseException):
|
|
709
|
+
if error_handling == "fail_fast":
|
|
710
|
+
raise task_result
|
|
711
|
+
errors.append({"error": str(task_result)})
|
|
712
|
+
results.append(None)
|
|
713
|
+
else:
|
|
714
|
+
# task_result is tuple[int, Any, Exception | None]
|
|
715
|
+
idx, result, error = task_result
|
|
716
|
+
if error:
|
|
717
|
+
if error_handling == "fail_fast":
|
|
718
|
+
raise error
|
|
719
|
+
errors.append({"index": idx, "error": str(error)})
|
|
720
|
+
results.append(None)
|
|
721
|
+
else:
|
|
722
|
+
results.append(result)
|
|
723
|
+
|
|
724
|
+
return {
|
|
725
|
+
"result": _collect_results(results, collect, key_field),
|
|
726
|
+
"metadata": {
|
|
727
|
+
"mode": "times",
|
|
728
|
+
"iterations": count,
|
|
729
|
+
"stopped_by": "exhausted",
|
|
730
|
+
"successful": count - len(errors),
|
|
731
|
+
"failed": len(errors),
|
|
732
|
+
"errors": errors if errors else None,
|
|
733
|
+
},
|
|
734
|
+
}
|
|
735
|
+
|
|
736
|
+
|
|
737
|
+
async def _execute_if_else(
|
|
738
|
+
data: dict[str, Any],
|
|
739
|
+
condition: Callable[[dict[str, Any], dict[str, Any]], bool] | None,
|
|
740
|
+
body: str | list[dict[str, Any]] | Callable[..., Any] | None,
|
|
741
|
+
body_pipeline: str | None,
|
|
742
|
+
else_body: str | list[dict[str, Any]] | None,
|
|
743
|
+
executor: BodyExecutor,
|
|
744
|
+
context: NodeExecutionContext,
|
|
745
|
+
ports: dict[str, Any],
|
|
746
|
+
node_logger: Any,
|
|
747
|
+
) -> dict[str, Any]:
|
|
748
|
+
"""Execute if-else mode."""
|
|
749
|
+
# Evaluate condition
|
|
750
|
+
condition_met = False
|
|
751
|
+
if condition:
|
|
752
|
+
try:
|
|
753
|
+
condition_met = bool(condition(data, {}))
|
|
754
|
+
except Exception as e:
|
|
755
|
+
node_logger.warning("Condition evaluation failed", error=str(e))
|
|
756
|
+
condition_met = False
|
|
757
|
+
|
|
758
|
+
node_logger.debug("If-else evaluation", condition_met=condition_met)
|
|
759
|
+
|
|
760
|
+
has_body = body is not None or body_pipeline is not None
|
|
761
|
+
has_else = else_body is not None
|
|
762
|
+
|
|
763
|
+
if condition_met and has_body:
|
|
764
|
+
result = await executor.execute(
|
|
765
|
+
body=body,
|
|
766
|
+
body_pipeline=body_pipeline,
|
|
767
|
+
input_data=data,
|
|
768
|
+
context=context,
|
|
769
|
+
ports=ports,
|
|
770
|
+
)
|
|
771
|
+
branch = "if"
|
|
772
|
+
elif not condition_met and has_else:
|
|
773
|
+
result = await executor.execute(
|
|
774
|
+
body=else_body,
|
|
775
|
+
body_pipeline=None,
|
|
776
|
+
input_data=data,
|
|
777
|
+
context=context,
|
|
778
|
+
ports=ports,
|
|
779
|
+
)
|
|
780
|
+
branch = "else"
|
|
781
|
+
else:
|
|
782
|
+
# Yield to downstream
|
|
783
|
+
result = {"condition_met": condition_met}
|
|
784
|
+
branch = "if" if condition_met else "else"
|
|
785
|
+
|
|
786
|
+
return {
|
|
787
|
+
"result": result,
|
|
788
|
+
"metadata": {
|
|
789
|
+
"mode": "if-else",
|
|
790
|
+
"condition_met": condition_met,
|
|
791
|
+
"branch_taken": branch,
|
|
792
|
+
"stopped_by": "condition",
|
|
793
|
+
},
|
|
794
|
+
}
|
|
795
|
+
|
|
796
|
+
|
|
797
|
+
async def _execute_switch(
|
|
798
|
+
data: dict[str, Any],
|
|
799
|
+
branches: list[dict[str, Any]] | None,
|
|
800
|
+
else_body: str | list[dict[str, Any]] | None,
|
|
801
|
+
else_action: str | None,
|
|
802
|
+
executor: BodyExecutor,
|
|
803
|
+
context: NodeExecutionContext,
|
|
804
|
+
ports: dict[str, Any],
|
|
805
|
+
node_logger: Any,
|
|
806
|
+
) -> dict[str, Any]:
|
|
807
|
+
"""Execute switch mode."""
|
|
808
|
+
matched_branch: int | None = None
|
|
809
|
+
matched_action: str | None = None
|
|
810
|
+
evaluations: list[bool] = []
|
|
811
|
+
|
|
812
|
+
# Evaluate branches
|
|
813
|
+
if branches:
|
|
814
|
+
for idx, branch in enumerate(branches):
|
|
815
|
+
try:
|
|
816
|
+
condition_fn = branch["condition_fn"]
|
|
817
|
+
is_match = bool(condition_fn(data, {}))
|
|
818
|
+
except Exception as e:
|
|
819
|
+
node_logger.warning(f"Branch {idx} condition failed: {e}")
|
|
820
|
+
is_match = False
|
|
821
|
+
|
|
822
|
+
evaluations.append(is_match)
|
|
823
|
+
|
|
824
|
+
if is_match and matched_branch is None:
|
|
825
|
+
matched_branch = idx
|
|
826
|
+
matched_action = branch.get("action")
|
|
827
|
+
branch_body = branch.get("body")
|
|
828
|
+
|
|
829
|
+
# Execute body if present
|
|
830
|
+
if branch_body is not None:
|
|
831
|
+
result = await executor.execute(
|
|
832
|
+
body=branch_body,
|
|
833
|
+
body_pipeline=None,
|
|
834
|
+
input_data=data,
|
|
835
|
+
context=context,
|
|
836
|
+
ports=ports,
|
|
837
|
+
)
|
|
838
|
+
return {
|
|
839
|
+
"result": result,
|
|
840
|
+
"metadata": {
|
|
841
|
+
"mode": "switch",
|
|
842
|
+
"matched_branch": matched_branch,
|
|
843
|
+
"matched_action": matched_action,
|
|
844
|
+
"evaluations": evaluations,
|
|
845
|
+
"stopped_by": "branch_matched",
|
|
846
|
+
},
|
|
847
|
+
}
|
|
848
|
+
|
|
849
|
+
# No branch matched or branch was routing-only (action)
|
|
850
|
+
if matched_branch is None:
|
|
851
|
+
# Use else
|
|
852
|
+
if else_body is not None:
|
|
853
|
+
result = await executor.execute(
|
|
854
|
+
body=else_body,
|
|
855
|
+
body_pipeline=None,
|
|
856
|
+
input_data=data,
|
|
857
|
+
context=context,
|
|
858
|
+
ports=ports,
|
|
859
|
+
)
|
|
860
|
+
return {
|
|
861
|
+
"result": result,
|
|
862
|
+
"metadata": {
|
|
863
|
+
"mode": "switch",
|
|
864
|
+
"matched_branch": None,
|
|
865
|
+
"matched_action": else_action,
|
|
866
|
+
"evaluations": evaluations,
|
|
867
|
+
"stopped_by": "else",
|
|
868
|
+
},
|
|
869
|
+
}
|
|
870
|
+
matched_action = else_action
|
|
871
|
+
|
|
872
|
+
# Routing mode - return action label
|
|
873
|
+
node_logger.info(
|
|
874
|
+
"Switch routing",
|
|
875
|
+
matched_branch=matched_branch,
|
|
876
|
+
action=matched_action,
|
|
877
|
+
)
|
|
878
|
+
|
|
879
|
+
return {
|
|
880
|
+
"result": matched_action,
|
|
881
|
+
"metadata": {
|
|
882
|
+
"mode": "switch",
|
|
883
|
+
"matched_branch": matched_branch,
|
|
884
|
+
"matched_action": matched_action,
|
|
885
|
+
"evaluations": evaluations,
|
|
886
|
+
"has_else": else_action is not None or else_body is not None,
|
|
887
|
+
"stopped_by": "branch_matched" if matched_branch is not None else "else",
|
|
888
|
+
},
|
|
889
|
+
}
|
|
890
|
+
|
|
891
|
+
|
|
892
|
+
def _collect_results(
|
|
893
|
+
results: list[Any],
|
|
894
|
+
mode: CollectMode,
|
|
895
|
+
key_field: str | None = None,
|
|
896
|
+
) -> Any:
|
|
897
|
+
"""Collect results according to mode."""
|
|
898
|
+
if not results:
|
|
899
|
+
return [] if mode == "list" else None
|
|
900
|
+
|
|
901
|
+
match mode:
|
|
902
|
+
case "list":
|
|
903
|
+
return results
|
|
904
|
+
case "last":
|
|
905
|
+
# Find last non-None result
|
|
906
|
+
for r in reversed(results):
|
|
907
|
+
if r is not None:
|
|
908
|
+
return r
|
|
909
|
+
return None
|
|
910
|
+
case "first":
|
|
911
|
+
# Find first non-None result
|
|
912
|
+
for r in results:
|
|
913
|
+
if r is not None:
|
|
914
|
+
return r
|
|
915
|
+
return None
|
|
916
|
+
case "dict":
|
|
917
|
+
if not key_field:
|
|
918
|
+
return dict(enumerate(results))
|
|
919
|
+
result_dict = {}
|
|
920
|
+
for r in results:
|
|
921
|
+
if isinstance(r, dict) and key_field in r:
|
|
922
|
+
result_dict[r[key_field]] = r
|
|
923
|
+
return result_dict
|
|
924
|
+
case "reduce":
|
|
925
|
+
# For reduce, return all results (reducer should be applied by caller)
|
|
926
|
+
return results
|