hexdag 0.5.0.dev1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (261) hide show
  1. hexdag/__init__.py +116 -0
  2. hexdag/__main__.py +30 -0
  3. hexdag/adapters/executors/__init__.py +5 -0
  4. hexdag/adapters/executors/local_executor.py +316 -0
  5. hexdag/builtin/__init__.py +6 -0
  6. hexdag/builtin/adapters/__init__.py +51 -0
  7. hexdag/builtin/adapters/anthropic/__init__.py +5 -0
  8. hexdag/builtin/adapters/anthropic/anthropic_adapter.py +151 -0
  9. hexdag/builtin/adapters/database/__init__.py +6 -0
  10. hexdag/builtin/adapters/database/csv/csv_adapter.py +249 -0
  11. hexdag/builtin/adapters/database/pgvector/__init__.py +5 -0
  12. hexdag/builtin/adapters/database/pgvector/pgvector_adapter.py +478 -0
  13. hexdag/builtin/adapters/database/sqlalchemy/sqlalchemy_adapter.py +252 -0
  14. hexdag/builtin/adapters/database/sqlite/__init__.py +5 -0
  15. hexdag/builtin/adapters/database/sqlite/sqlite_adapter.py +410 -0
  16. hexdag/builtin/adapters/local/README.md +59 -0
  17. hexdag/builtin/adapters/local/__init__.py +7 -0
  18. hexdag/builtin/adapters/local/local_observer_manager.py +696 -0
  19. hexdag/builtin/adapters/memory/__init__.py +47 -0
  20. hexdag/builtin/adapters/memory/file_memory_adapter.py +297 -0
  21. hexdag/builtin/adapters/memory/in_memory_memory.py +216 -0
  22. hexdag/builtin/adapters/memory/schemas.py +57 -0
  23. hexdag/builtin/adapters/memory/session_memory.py +178 -0
  24. hexdag/builtin/adapters/memory/sqlite_memory_adapter.py +215 -0
  25. hexdag/builtin/adapters/memory/state_memory.py +280 -0
  26. hexdag/builtin/adapters/mock/README.md +89 -0
  27. hexdag/builtin/adapters/mock/__init__.py +15 -0
  28. hexdag/builtin/adapters/mock/hexdag.toml +50 -0
  29. hexdag/builtin/adapters/mock/mock_database.py +225 -0
  30. hexdag/builtin/adapters/mock/mock_embedding.py +223 -0
  31. hexdag/builtin/adapters/mock/mock_llm.py +177 -0
  32. hexdag/builtin/adapters/mock/mock_tool_adapter.py +192 -0
  33. hexdag/builtin/adapters/mock/mock_tool_router.py +232 -0
  34. hexdag/builtin/adapters/openai/__init__.py +5 -0
  35. hexdag/builtin/adapters/openai/openai_adapter.py +634 -0
  36. hexdag/builtin/adapters/secret/__init__.py +7 -0
  37. hexdag/builtin/adapters/secret/local_secret_adapter.py +248 -0
  38. hexdag/builtin/adapters/unified_tool_router.py +280 -0
  39. hexdag/builtin/macros/__init__.py +17 -0
  40. hexdag/builtin/macros/conversation_agent.py +390 -0
  41. hexdag/builtin/macros/llm_macro.py +151 -0
  42. hexdag/builtin/macros/reasoning_agent.py +423 -0
  43. hexdag/builtin/macros/tool_macro.py +380 -0
  44. hexdag/builtin/nodes/__init__.py +38 -0
  45. hexdag/builtin/nodes/_discovery.py +123 -0
  46. hexdag/builtin/nodes/agent_node.py +696 -0
  47. hexdag/builtin/nodes/base_node_factory.py +242 -0
  48. hexdag/builtin/nodes/composite_node.py +926 -0
  49. hexdag/builtin/nodes/data_node.py +201 -0
  50. hexdag/builtin/nodes/expression_node.py +487 -0
  51. hexdag/builtin/nodes/function_node.py +454 -0
  52. hexdag/builtin/nodes/llm_node.py +491 -0
  53. hexdag/builtin/nodes/loop_node.py +920 -0
  54. hexdag/builtin/nodes/mapped_input.py +518 -0
  55. hexdag/builtin/nodes/port_call_node.py +269 -0
  56. hexdag/builtin/nodes/tool_call_node.py +195 -0
  57. hexdag/builtin/nodes/tool_utils.py +390 -0
  58. hexdag/builtin/prompts/__init__.py +68 -0
  59. hexdag/builtin/prompts/base.py +422 -0
  60. hexdag/builtin/prompts/chat_prompts.py +303 -0
  61. hexdag/builtin/prompts/error_correction_prompts.py +320 -0
  62. hexdag/builtin/prompts/tool_prompts.py +160 -0
  63. hexdag/builtin/tools/builtin_tools.py +84 -0
  64. hexdag/builtin/tools/database_tools.py +164 -0
  65. hexdag/cli/__init__.py +17 -0
  66. hexdag/cli/__main__.py +7 -0
  67. hexdag/cli/commands/__init__.py +27 -0
  68. hexdag/cli/commands/build_cmd.py +812 -0
  69. hexdag/cli/commands/create_cmd.py +208 -0
  70. hexdag/cli/commands/docs_cmd.py +293 -0
  71. hexdag/cli/commands/generate_types_cmd.py +252 -0
  72. hexdag/cli/commands/init_cmd.py +188 -0
  73. hexdag/cli/commands/pipeline_cmd.py +494 -0
  74. hexdag/cli/commands/plugin_dev_cmd.py +529 -0
  75. hexdag/cli/commands/plugins_cmd.py +441 -0
  76. hexdag/cli/commands/studio_cmd.py +101 -0
  77. hexdag/cli/commands/validate_cmd.py +221 -0
  78. hexdag/cli/main.py +84 -0
  79. hexdag/core/__init__.py +83 -0
  80. hexdag/core/config/__init__.py +20 -0
  81. hexdag/core/config/loader.py +479 -0
  82. hexdag/core/config/models.py +150 -0
  83. hexdag/core/configurable.py +294 -0
  84. hexdag/core/context/__init__.py +37 -0
  85. hexdag/core/context/execution_context.py +378 -0
  86. hexdag/core/docs/__init__.py +26 -0
  87. hexdag/core/docs/extractors.py +678 -0
  88. hexdag/core/docs/generators.py +890 -0
  89. hexdag/core/docs/models.py +120 -0
  90. hexdag/core/domain/__init__.py +10 -0
  91. hexdag/core/domain/dag.py +1225 -0
  92. hexdag/core/exceptions.py +234 -0
  93. hexdag/core/expression_parser.py +569 -0
  94. hexdag/core/logging.py +449 -0
  95. hexdag/core/models/__init__.py +17 -0
  96. hexdag/core/models/base.py +138 -0
  97. hexdag/core/orchestration/__init__.py +46 -0
  98. hexdag/core/orchestration/body_executor.py +481 -0
  99. hexdag/core/orchestration/components/__init__.py +97 -0
  100. hexdag/core/orchestration/components/adapter_lifecycle_manager.py +113 -0
  101. hexdag/core/orchestration/components/checkpoint_manager.py +134 -0
  102. hexdag/core/orchestration/components/execution_coordinator.py +360 -0
  103. hexdag/core/orchestration/components/health_check_manager.py +176 -0
  104. hexdag/core/orchestration/components/input_mapper.py +143 -0
  105. hexdag/core/orchestration/components/lifecycle_manager.py +583 -0
  106. hexdag/core/orchestration/components/node_executor.py +377 -0
  107. hexdag/core/orchestration/components/secret_manager.py +202 -0
  108. hexdag/core/orchestration/components/wave_executor.py +158 -0
  109. hexdag/core/orchestration/constants.py +17 -0
  110. hexdag/core/orchestration/events/README.md +312 -0
  111. hexdag/core/orchestration/events/__init__.py +104 -0
  112. hexdag/core/orchestration/events/batching.py +330 -0
  113. hexdag/core/orchestration/events/decorators.py +139 -0
  114. hexdag/core/orchestration/events/events.py +573 -0
  115. hexdag/core/orchestration/events/observers/__init__.py +30 -0
  116. hexdag/core/orchestration/events/observers/core_observers.py +690 -0
  117. hexdag/core/orchestration/events/observers/models.py +111 -0
  118. hexdag/core/orchestration/events/taxonomy.py +269 -0
  119. hexdag/core/orchestration/hook_context.py +237 -0
  120. hexdag/core/orchestration/hooks.py +437 -0
  121. hexdag/core/orchestration/models.py +418 -0
  122. hexdag/core/orchestration/orchestrator.py +910 -0
  123. hexdag/core/orchestration/orchestrator_factory.py +275 -0
  124. hexdag/core/orchestration/port_wrappers.py +327 -0
  125. hexdag/core/orchestration/prompt/__init__.py +32 -0
  126. hexdag/core/orchestration/prompt/template.py +332 -0
  127. hexdag/core/pipeline_builder/__init__.py +21 -0
  128. hexdag/core/pipeline_builder/component_instantiator.py +386 -0
  129. hexdag/core/pipeline_builder/include_tag.py +265 -0
  130. hexdag/core/pipeline_builder/pipeline_config.py +133 -0
  131. hexdag/core/pipeline_builder/py_tag.py +223 -0
  132. hexdag/core/pipeline_builder/tag_discovery.py +268 -0
  133. hexdag/core/pipeline_builder/yaml_builder.py +1196 -0
  134. hexdag/core/pipeline_builder/yaml_validator.py +569 -0
  135. hexdag/core/ports/__init__.py +65 -0
  136. hexdag/core/ports/api_call.py +133 -0
  137. hexdag/core/ports/database.py +489 -0
  138. hexdag/core/ports/embedding.py +215 -0
  139. hexdag/core/ports/executor.py +237 -0
  140. hexdag/core/ports/file_storage.py +117 -0
  141. hexdag/core/ports/healthcheck.py +87 -0
  142. hexdag/core/ports/llm.py +551 -0
  143. hexdag/core/ports/memory.py +70 -0
  144. hexdag/core/ports/observer_manager.py +130 -0
  145. hexdag/core/ports/secret.py +145 -0
  146. hexdag/core/ports/tool_router.py +94 -0
  147. hexdag/core/ports_builder.py +623 -0
  148. hexdag/core/protocols.py +273 -0
  149. hexdag/core/resolver.py +304 -0
  150. hexdag/core/schema/__init__.py +9 -0
  151. hexdag/core/schema/generator.py +742 -0
  152. hexdag/core/secrets.py +242 -0
  153. hexdag/core/types.py +413 -0
  154. hexdag/core/utils/async_warnings.py +206 -0
  155. hexdag/core/utils/schema_conversion.py +78 -0
  156. hexdag/core/utils/sql_validation.py +86 -0
  157. hexdag/core/validation/secure_json.py +148 -0
  158. hexdag/core/yaml_macro.py +517 -0
  159. hexdag/mcp_server.py +3120 -0
  160. hexdag/studio/__init__.py +10 -0
  161. hexdag/studio/build_ui.py +92 -0
  162. hexdag/studio/server/__init__.py +1 -0
  163. hexdag/studio/server/main.py +100 -0
  164. hexdag/studio/server/routes/__init__.py +9 -0
  165. hexdag/studio/server/routes/execute.py +208 -0
  166. hexdag/studio/server/routes/export.py +558 -0
  167. hexdag/studio/server/routes/files.py +207 -0
  168. hexdag/studio/server/routes/plugins.py +419 -0
  169. hexdag/studio/server/routes/validate.py +220 -0
  170. hexdag/studio/ui/index.html +13 -0
  171. hexdag/studio/ui/package-lock.json +2992 -0
  172. hexdag/studio/ui/package.json +31 -0
  173. hexdag/studio/ui/postcss.config.js +6 -0
  174. hexdag/studio/ui/public/hexdag.svg +5 -0
  175. hexdag/studio/ui/src/App.tsx +251 -0
  176. hexdag/studio/ui/src/components/Canvas.tsx +408 -0
  177. hexdag/studio/ui/src/components/ContextMenu.tsx +187 -0
  178. hexdag/studio/ui/src/components/FileBrowser.tsx +123 -0
  179. hexdag/studio/ui/src/components/Header.tsx +181 -0
  180. hexdag/studio/ui/src/components/HexdagNode.tsx +193 -0
  181. hexdag/studio/ui/src/components/NodeInspector.tsx +512 -0
  182. hexdag/studio/ui/src/components/NodePalette.tsx +262 -0
  183. hexdag/studio/ui/src/components/NodePortsSection.tsx +403 -0
  184. hexdag/studio/ui/src/components/PluginManager.tsx +347 -0
  185. hexdag/studio/ui/src/components/PortsEditor.tsx +481 -0
  186. hexdag/studio/ui/src/components/PythonEditor.tsx +195 -0
  187. hexdag/studio/ui/src/components/ValidationPanel.tsx +105 -0
  188. hexdag/studio/ui/src/components/YamlEditor.tsx +196 -0
  189. hexdag/studio/ui/src/components/index.ts +8 -0
  190. hexdag/studio/ui/src/index.css +92 -0
  191. hexdag/studio/ui/src/main.tsx +10 -0
  192. hexdag/studio/ui/src/types/index.ts +123 -0
  193. hexdag/studio/ui/src/vite-env.d.ts +1 -0
  194. hexdag/studio/ui/tailwind.config.js +29 -0
  195. hexdag/studio/ui/tsconfig.json +37 -0
  196. hexdag/studio/ui/tsconfig.node.json +13 -0
  197. hexdag/studio/ui/vite.config.ts +35 -0
  198. hexdag/visualization/__init__.py +69 -0
  199. hexdag/visualization/dag_visualizer.py +1020 -0
  200. hexdag-0.5.0.dev1.dist-info/METADATA +369 -0
  201. hexdag-0.5.0.dev1.dist-info/RECORD +261 -0
  202. hexdag-0.5.0.dev1.dist-info/WHEEL +4 -0
  203. hexdag-0.5.0.dev1.dist-info/entry_points.txt +4 -0
  204. hexdag-0.5.0.dev1.dist-info/licenses/LICENSE +190 -0
  205. hexdag_plugins/.gitignore +43 -0
  206. hexdag_plugins/README.md +73 -0
  207. hexdag_plugins/__init__.py +1 -0
  208. hexdag_plugins/azure/LICENSE +21 -0
  209. hexdag_plugins/azure/README.md +414 -0
  210. hexdag_plugins/azure/__init__.py +21 -0
  211. hexdag_plugins/azure/azure_blob_adapter.py +450 -0
  212. hexdag_plugins/azure/azure_cosmos_adapter.py +383 -0
  213. hexdag_plugins/azure/azure_keyvault_adapter.py +314 -0
  214. hexdag_plugins/azure/azure_openai_adapter.py +415 -0
  215. hexdag_plugins/azure/pyproject.toml +107 -0
  216. hexdag_plugins/azure/tests/__init__.py +1 -0
  217. hexdag_plugins/azure/tests/test_azure_blob_adapter.py +350 -0
  218. hexdag_plugins/azure/tests/test_azure_cosmos_adapter.py +323 -0
  219. hexdag_plugins/azure/tests/test_azure_keyvault_adapter.py +330 -0
  220. hexdag_plugins/azure/tests/test_azure_openai_adapter.py +329 -0
  221. hexdag_plugins/hexdag_etl/README.md +168 -0
  222. hexdag_plugins/hexdag_etl/__init__.py +53 -0
  223. hexdag_plugins/hexdag_etl/examples/01_simple_pandas_transform.py +270 -0
  224. hexdag_plugins/hexdag_etl/examples/02_simple_pandas_only.py +149 -0
  225. hexdag_plugins/hexdag_etl/examples/03_file_io_pipeline.py +109 -0
  226. hexdag_plugins/hexdag_etl/examples/test_pandas_transform.py +84 -0
  227. hexdag_plugins/hexdag_etl/hexdag.toml +25 -0
  228. hexdag_plugins/hexdag_etl/hexdag_etl/__init__.py +48 -0
  229. hexdag_plugins/hexdag_etl/hexdag_etl/nodes/__init__.py +13 -0
  230. hexdag_plugins/hexdag_etl/hexdag_etl/nodes/api_extract.py +230 -0
  231. hexdag_plugins/hexdag_etl/hexdag_etl/nodes/base_node_factory.py +181 -0
  232. hexdag_plugins/hexdag_etl/hexdag_etl/nodes/file_io.py +415 -0
  233. hexdag_plugins/hexdag_etl/hexdag_etl/nodes/outlook.py +492 -0
  234. hexdag_plugins/hexdag_etl/hexdag_etl/nodes/pandas_transform.py +563 -0
  235. hexdag_plugins/hexdag_etl/hexdag_etl/nodes/sql_extract_load.py +112 -0
  236. hexdag_plugins/hexdag_etl/pyproject.toml +82 -0
  237. hexdag_plugins/hexdag_etl/test_transform.py +54 -0
  238. hexdag_plugins/hexdag_etl/tests/test_plugin_integration.py +62 -0
  239. hexdag_plugins/mysql_adapter/LICENSE +21 -0
  240. hexdag_plugins/mysql_adapter/README.md +224 -0
  241. hexdag_plugins/mysql_adapter/__init__.py +6 -0
  242. hexdag_plugins/mysql_adapter/mysql_adapter.py +408 -0
  243. hexdag_plugins/mysql_adapter/pyproject.toml +93 -0
  244. hexdag_plugins/mysql_adapter/tests/test_mysql_adapter.py +259 -0
  245. hexdag_plugins/storage/README.md +184 -0
  246. hexdag_plugins/storage/__init__.py +19 -0
  247. hexdag_plugins/storage/file/__init__.py +5 -0
  248. hexdag_plugins/storage/file/local.py +325 -0
  249. hexdag_plugins/storage/ports/__init__.py +5 -0
  250. hexdag_plugins/storage/ports/vector_store.py +236 -0
  251. hexdag_plugins/storage/sql/__init__.py +7 -0
  252. hexdag_plugins/storage/sql/base.py +187 -0
  253. hexdag_plugins/storage/sql/mysql.py +27 -0
  254. hexdag_plugins/storage/sql/postgresql.py +27 -0
  255. hexdag_plugins/storage/tests/__init__.py +1 -0
  256. hexdag_plugins/storage/tests/test_local_file_storage.py +161 -0
  257. hexdag_plugins/storage/tests/test_sql_adapters.py +212 -0
  258. hexdag_plugins/storage/vector/__init__.py +7 -0
  259. hexdag_plugins/storage/vector/chromadb.py +223 -0
  260. hexdag_plugins/storage/vector/in_memory.py +285 -0
  261. hexdag_plugins/storage/vector/pgvector.py +502 -0
@@ -0,0 +1,1020 @@
1
+ """DAG visualization using Graphviz for hexdag pipelines.
2
+
3
+ This module provides utilities to export DirectedGraph objects to Graphviz DOT format for
4
+ visualization and debugging purposes.
5
+ """
6
+
7
+ import contextlib
8
+ import pathlib
9
+ import platform
10
+ import shutil
11
+ import subprocess # nosec B404
12
+ import tempfile
13
+ import threading
14
+ import time
15
+ from pathlib import Path
16
+ from typing import Any
17
+
18
+ try:
19
+ import graphviz
20
+ except ImportError as e:
21
+ raise ImportError(
22
+ "Graphviz is not installed. Please install with:\n"
23
+ " pip install hexdag[viz]\n"
24
+ " or\n"
25
+ " uv pip install hexdag[viz]"
26
+ ) from e
27
+
28
+ from hexdag.core.domain.dag import DirectedGraph
29
+ from hexdag.core.logging import get_logger
30
+
31
+ logger = get_logger(__name__)
32
+
33
+
34
+ class DAGVisualizer:
35
+ """Visualizes DirectedGraph objects using Graphviz."""
36
+
37
+ def __init__(self, graph: DirectedGraph):
38
+ """Initialize visualizer with a DAG.
39
+
40
+ Args
41
+ ----
42
+ graph: The DirectedGraph to visualize
43
+ """
44
+ self.graph = graph
45
+ self._dot = None
46
+
47
+ def to_dot(
48
+ self,
49
+ title: str = "Pipeline DAG",
50
+ node_attributes: dict[str, dict[str, Any]] | None = None,
51
+ edge_attributes: dict[tuple[str, str], dict[str, Any]] | None = None,
52
+ show_io_nodes: bool = True,
53
+ input_schema: Any = None,
54
+ output_schema: Any = None,
55
+ _enhance_with_generated_schemas: bool = True,
56
+ show_node_schemas: bool = True,
57
+ show_intermediate_input: bool = False,
58
+ show_intermediate_output: bool = False,
59
+ basic_node_types: dict[str, str] | None = None,
60
+ basic_node_schemas: dict[str, dict[str, Any]] | None = None,
61
+ ) -> str:
62
+ """Export DAG to DOT format string with enhanced schema display options.
63
+
64
+ Args
65
+ ----
66
+ title: Title for the graph
67
+ node_attributes: Optional custom attributes for nodes
68
+ edge_attributes: Optional custom attributes for edges
69
+ show_io_nodes: Whether to show input/output nodes
70
+ input_schema: Input schema information
71
+ output_schema: Output schema information
72
+ enhance_with_generated_schemas: Whether to try loading auto-generated schema files
73
+ show_node_schemas: Whether to show input/output schemas on nodes
74
+ show_intermediate_input: Whether to show input schemas on intermediate nodes
75
+ show_intermediate_output: Whether to show output schemas on intermediate nodes
76
+ basic_node_types: Basic node type information from YAML (fallback mode)
77
+ basic_node_schemas: Basic schema information from YAML (fallback mode)
78
+
79
+ Returns
80
+ -------
81
+ DOT format string for the graph
82
+ """
83
+ dot = graphviz.Digraph(comment=title)
84
+ dot.attr(rankdir="TB", style="filled", bgcolor="white")
85
+ dot.attr("node", shape="box", style="filled,rounded", fontname="Arial")
86
+ dot.attr("edge", fontname="Arial")
87
+
88
+ compiled_schemas: dict[str, dict[str, Any]] = {}
89
+ pipeline_input_schema = input_schema
90
+
91
+ # Try to load compiled schema information first
92
+ pipeline_name = getattr(self.graph, "_pipeline_name", None)
93
+ if pipeline_name and (
94
+ show_node_schemas or show_intermediate_input or show_intermediate_output
95
+ ):
96
+ try:
97
+ compiled_schemas, found_input_schema = self._load_compiled_schemas(pipeline_name)
98
+ if found_input_schema and not pipeline_input_schema:
99
+ pipeline_input_schema = found_input_schema
100
+ except Exception:
101
+ # Compilation failed, use basic node information if available
102
+ if basic_node_types:
103
+ compiled_schemas = {}
104
+ for node_id, node_type in basic_node_types.items():
105
+ node_schema_info = (
106
+ basic_node_schemas.get(node_id, {}) if basic_node_schemas else {}
107
+ )
108
+ compiled_schemas[node_id] = {
109
+ "node_type": node_type,
110
+ "input_schema": node_schema_info.get("input_schema"),
111
+ "output_schema": node_schema_info.get("output_schema"),
112
+ }
113
+ # Auto-assign default output for LLM/Agent nodes if not explicit
114
+ if node_type in ["llm", "agent"] and not node_schema_info.get(
115
+ "output_schema"
116
+ ):
117
+ compiled_schemas[node_id]["output_schema"] = {"result": "str"}
118
+
119
+ first_nodes, last_nodes = self._find_terminal_nodes()
120
+
121
+ if show_io_nodes and first_nodes:
122
+ input_label = self._format_schema_label("🔵 PIPELINE INPUT", pipeline_input_schema)
123
+ dot.node("__INPUT__", input_label, color="lightblue", fillcolor="lightblue")
124
+ for first_node in first_nodes:
125
+ dot.edge("__INPUT__", first_node)
126
+
127
+ # Show pipeline output from final nodes
128
+ if show_io_nodes and last_nodes:
129
+ # Collect output schemas from all final nodes
130
+ pipeline_output_schemas = {}
131
+ for last_node in last_nodes:
132
+ node_schemas = compiled_schemas.get(last_node, {})
133
+ if node_schemas.get("output_schema"):
134
+ pipeline_output_schemas[last_node] = node_schemas["output_schema"]
135
+
136
+ if pipeline_output_schemas:
137
+ # If single output node, show its schema directly
138
+ if len(pipeline_output_schemas) == 1:
139
+ output_node, output_schema_data = next(iter(pipeline_output_schemas.items()))
140
+ output_label = self._format_schema_label(
141
+ f"🟢 PIPELINE OUTPUT\\n({output_node})", output_schema_data
142
+ )
143
+ else:
144
+ # Multiple output nodes - show combined
145
+ combined_output = {}
146
+ for node, schema in pipeline_output_schemas.items():
147
+ combined_output[f"{node}_output"] = schema
148
+ output_label = self._format_schema_label("🟢 PIPELINE OUTPUT", combined_output)
149
+ elif output_schema:
150
+ # Fallback to provided output schema
151
+ output_label = self._format_schema_label("🟢 PIPELINE OUTPUT", output_schema)
152
+ else:
153
+ output_label = "🟢 PIPELINE OUTPUT"
154
+
155
+ dot.node("__OUTPUT__", output_label, color="lightgreen", fillcolor="lightgreen")
156
+ for last_node in last_nodes:
157
+ dot.edge(last_node, "__OUTPUT__")
158
+
159
+ for node_name, node_spec in self.graph.nodes.items():
160
+ # Determine if this is an intermediate node
161
+ is_first_node = node_name in first_nodes
162
+ is_last_node = node_name in last_nodes
163
+ is_intermediate = not (is_first_node and is_last_node)
164
+
165
+ node_schemas = compiled_schemas.get(node_name, {})
166
+
167
+ # Decide what schemas to show based on options and availability
168
+ # Only show schemas when explicitly requested, not by default
169
+ has_compiled_schemas = bool(
170
+ node_schemas.get("input_schema") or node_schemas.get("output_schema")
171
+ )
172
+
173
+ if has_compiled_schemas:
174
+ # Only show schemas when explicitly requested
175
+ show_input_for_node = show_intermediate_input and is_intermediate
176
+ show_output_for_node = show_intermediate_output and is_intermediate
177
+ else:
178
+ # Only show for intermediate nodes when explicitly requested (fallback mode)
179
+ show_input_for_node = show_intermediate_input and is_intermediate
180
+ show_output_for_node = show_intermediate_output and is_intermediate
181
+
182
+ if (show_node_schemas or show_input_for_node or show_output_for_node) and node_schemas:
183
+ input_schema_to_show = (
184
+ node_schemas.get("input_schema") if show_input_for_node else None
185
+ )
186
+ output_schema_to_show = (
187
+ node_schemas.get("output_schema") if show_output_for_node else None
188
+ )
189
+
190
+ label = self._create_enhanced_node_label(
191
+ node_name,
192
+ node_spec,
193
+ input_schema_to_show,
194
+ output_schema_to_show,
195
+ node_schemas.get("type"),
196
+ node_schemas.get("function_name"),
197
+ )
198
+ elif node_schemas and node_schemas.get("type"):
199
+ # Show node type even without schemas when compiled data is available
200
+ detected_node_type: str | None = node_schemas.get("type")
201
+ function_name: str | None = node_schemas.get("function_name")
202
+
203
+ if function_name:
204
+ label = f"📦 {node_name}\\n({detected_node_type}: {function_name})"
205
+ else:
206
+ label = f"📦 {node_name}\\n({detected_node_type})"
207
+ else:
208
+ # Fallback to basic node label with node type if available
209
+ basic_node_type: str | None = (
210
+ basic_node_types.get(node_name) if basic_node_types else None
211
+ )
212
+ if basic_node_type:
213
+ label = f"{node_name}\\n({basic_node_type})"
214
+ else:
215
+ label = self._format_node_label(node_name, node_spec)
216
+
217
+ # Apply custom attributes if provided
218
+ node_attrs = node_attributes.get(node_name, {}) if node_attributes else {}
219
+
220
+ styling_node_type: str | None = node_schemas.get("type") or (
221
+ basic_node_types.get(node_name) if basic_node_types else None
222
+ )
223
+ default_attrs = self._get_node_style(node_spec, styling_node_type)
224
+ default_attrs.update(node_attrs)
225
+
226
+ dot.node(node_name, label, **default_attrs)
227
+
228
+ for node_name, node_spec in self.graph.nodes.items():
229
+ for dep in node_spec.deps:
230
+ # Apply custom edge attributes if provided
231
+ edge_key = (dep, node_name)
232
+ edge_attrs = edge_attributes.get(edge_key, {}) if edge_attributes else {}
233
+ dot.edge(dep, node_name, **edge_attrs)
234
+
235
+ return dot.source # type: ignore[no-any-return]
236
+
237
+ def _extract_compiled_schemas(
238
+ self, node_configs: list[dict[str, Any]]
239
+ ) -> dict[str, dict[str, Any]]:
240
+ """Extract schema information from compiled NODE_CONFIGS.
241
+
242
+ Args
243
+ ----
244
+ node_configs: List of compiled node configurations
245
+
246
+ Returns
247
+ -------
248
+ Dictionary mapping node_id to {input_schema, output_schema, node_type}
249
+ """
250
+ schemas = {}
251
+
252
+ for node_config in node_configs:
253
+ node_id = node_config.get("id")
254
+ if not node_id:
255
+ continue
256
+
257
+ params = node_config.get("params", {})
258
+ schemas[node_id] = {
259
+ "input_schema": params.get("input_schema"),
260
+ "output_schema": params.get("output_schema"),
261
+ "type": node_config.get("type"),
262
+ "function_name": (
263
+ params.get("fn") if node_config.get("type") == "function" else None
264
+ ),
265
+ }
266
+
267
+ return schemas
268
+
269
+ def _extract_node_input_schema(self, node_spec: Any) -> dict[str, str] | None:
270
+ """Extract input schema information from a node specification.
271
+
272
+ Args
273
+ ----
274
+ node_spec: Node specification object
275
+
276
+ Returns
277
+ -------
278
+ Dictionary of input schema fields or None
279
+ """
280
+ if hasattr(node_spec, "in_model") and node_spec.in_model:
281
+ return self._convert_type_to_schema_dict(node_spec.in_model)
282
+
283
+ # Check for function-specific schema info
284
+ if hasattr(node_spec, "fn") and hasattr(node_spec.fn, "__annotations__"):
285
+ return self._extract_function_input_schema(node_spec.fn)
286
+
287
+ return None
288
+
289
+ def _extract_node_output_schema(self, node_spec: Any) -> dict[str, str] | None:
290
+ """Extract output schema information from a node specification.
291
+
292
+ Args
293
+ ----
294
+ node_spec: Node specification object
295
+
296
+ Returns
297
+ -------
298
+ Dictionary of output schema fields or None
299
+ """
300
+ if hasattr(node_spec, "out_model") and node_spec.out_model:
301
+ return self._convert_type_to_schema_dict(node_spec.out_model)
302
+
303
+ # Check for function-specific schema info
304
+ if hasattr(node_spec, "fn") and hasattr(node_spec.fn, "__annotations__"):
305
+ return self._extract_function_output_schema(node_spec.fn)
306
+
307
+ return None
308
+
309
+ def _convert_type_to_schema_dict(self, type_obj: Any) -> dict[str, str] | None:
310
+ """Convert a type object to a schema dictionary.
311
+
312
+ Args
313
+ ----
314
+ type_obj: Type object to convert
315
+
316
+ Returns
317
+ -------
318
+ Dictionary representation of the type
319
+ """
320
+ try:
321
+ if hasattr(type_obj, "model_fields"):
322
+ schema = {}
323
+ for field_name, field_info in type_obj.model_fields.items():
324
+ field_type = getattr(
325
+ field_info.annotation, "__name__", str(field_info.annotation)
326
+ )
327
+ schema[field_name] = field_type
328
+ return schema
329
+
330
+ if hasattr(type_obj, "__annotations__"):
331
+ schema = {}
332
+ for field_name, field_type in type_obj.__annotations__.items():
333
+ type_name = getattr(field_type, "__name__", str(field_type))
334
+ schema[field_name] = type_name
335
+ return schema
336
+
337
+ if isinstance(type_obj, dict):
338
+ return type_obj
339
+
340
+ except Exception:
341
+ # Type conversion failed - this is expected for complex types
342
+ pass # nosec B110 - intentional silent failure for type conversion
343
+ return None
344
+
345
+ def _extract_function_input_schema(self, func: Any) -> dict[str, str] | None:
346
+ """Extract input schema from function type hints.
347
+
348
+ Args
349
+ ----
350
+ func: Function to analyze
351
+
352
+ Returns
353
+ -------
354
+ Dictionary of input schema fields or None
355
+ """
356
+ try:
357
+ import inspect
358
+ from typing import get_type_hints
359
+
360
+ hints = get_type_hints(func)
361
+ sig = inspect.signature(func)
362
+ params = list(sig.parameters.values())
363
+
364
+ if params and params[0].name != "self":
365
+ first_param = params[0]
366
+ param_type = hints.get(first_param.name)
367
+ return self._convert_type_to_schema_dict(param_type)
368
+
369
+ except Exception:
370
+ # Function signature analysis failed - this is expected for functions without type hints
371
+ pass # nosec B110 - intentional silent failure for function analysis
372
+ return None
373
+
374
+ def _extract_function_output_schema(self, func: Any) -> dict[str, str] | None:
375
+ """Extract output schema from function return type hints.
376
+
377
+ Args
378
+ ----
379
+ func: Function to analyze
380
+
381
+ Returns
382
+ -------
383
+ Dictionary of output schema fields or None
384
+ """
385
+ try:
386
+ from typing import get_type_hints
387
+
388
+ hints = get_type_hints(func)
389
+ return_type = hints.get("return")
390
+
391
+ if return_type and return_type is not type(None):
392
+ return self._convert_type_to_schema_dict(return_type)
393
+
394
+ except Exception:
395
+ pass # nosec B110 - intentional silent failure for return type analysis
396
+ return None
397
+
398
+ def _create_enhanced_node_label(
399
+ self,
400
+ node_name: str,
401
+ node_spec: Any,
402
+ input_schema: dict[str, str] | None,
403
+ output_schema: dict[str, str] | None,
404
+ node_type: str | None = None,
405
+ function_name: str | None = None,
406
+ ) -> str:
407
+ """Create an enhanced node label showing input/output schemas from compiled data.
408
+
409
+ Args
410
+ ----
411
+ node_name: Name of the node
412
+ node_spec: Node specification
413
+ input_schema: Input schema dictionary from compiled data
414
+ output_schema: Output schema dictionary from compiled data
415
+ node_type: Node type from compiled data
416
+ function_name: Function name from compiled data
417
+
418
+ Returns
419
+ -------
420
+ Formatted label string for Graphviz
421
+ """
422
+ detected_type = node_type or getattr(node_spec, "type", "unknown")
423
+
424
+ type_emoji = {
425
+ "function": "⚙️",
426
+ "llm": "🤖",
427
+ "agent": "🧠",
428
+ "loop": "🔄",
429
+ "conditional": "🔀",
430
+ }.get(str(detected_type) if detected_type else "unknown", "📦")
431
+
432
+ # Start with node name and type
433
+ label_parts = [f"{type_emoji} {node_name}"]
434
+
435
+ if function_name:
436
+ label_parts.append(f"({detected_type}: {function_name})")
437
+ elif detected_type:
438
+ label_parts.append(f"({detected_type})")
439
+
440
+ if input_schema and input_schema != {"result": "Any"}:
441
+ input_fields = []
442
+ for field, field_type in input_schema.items():
443
+ # Clean up type names
444
+ clean_type = (
445
+ field_type.replace("typing.", "").replace("<class '", "").replace("'>", "")
446
+ )
447
+ input_fields.append(f"{field}: {clean_type}")
448
+
449
+ if input_fields:
450
+ if len(input_fields) <= 4:
451
+ input_str = "\\n".join(input_fields)
452
+ else:
453
+ input_str = "\\n".join(input_fields[:4]) + "\\n..."
454
+ label_parts.append(f"⬇️ IN\\n{input_str}")
455
+
456
+ if output_schema and output_schema != {"result": "Any"}:
457
+ output_fields = []
458
+ for field, field_type in output_schema.items():
459
+ # Clean up type names
460
+ clean_type = (
461
+ field_type.replace("typing.", "").replace("<class '", "").replace("'>", "")
462
+ )
463
+ output_fields.append(f"{field}: {clean_type}")
464
+
465
+ if output_fields:
466
+ if len(output_fields) <= 4:
467
+ output_str = "\\n".join(output_fields)
468
+ else:
469
+ output_str = "\\n".join(output_fields[:4]) + "\\n..."
470
+ label_parts.append(f"⬆️ OUT\\n{output_str}")
471
+
472
+ return "\\n\\n".join(label_parts)
473
+
474
+ def _format_node_label(self, node_name: str, node_spec: Any) -> str:
475
+ """Format a standard node label without schemas.
476
+
477
+ Returns
478
+ -------
479
+ Formatted node label string.
480
+ """
481
+ node_type = getattr(node_spec, "type", "unknown")
482
+
483
+ if hasattr(node_spec, "fn") and hasattr(node_spec.fn, "__name__"):
484
+ return f"📦 {node_name}\\n({node_type}: {node_spec.fn.__name__})"
485
+ return f"📦 {node_name}\\n({node_type})"
486
+
487
+ def _get_node_style(
488
+ self, node_spec: Any, compiled_node_type: str | None = None
489
+ ) -> dict[str, str]:
490
+ """Get visual style for a node based on its type.
491
+
492
+ Returns
493
+ -------
494
+ Dictionary of style attributes.
495
+ """
496
+ node_type = str(compiled_node_type or getattr(node_spec, "type", "unknown"))
497
+
498
+ node_styles = {
499
+ "function": {"color": "lightgreen", "fillcolor": "lightgreen"},
500
+ "llm": {"color": "lightblue", "fillcolor": "lightblue"},
501
+ "agent": {"color": "lightcoral", "fillcolor": "lightcoral"},
502
+ "loop": {"color": "lightyellow", "fillcolor": "lightyellow"},
503
+ "conditional": {"color": "lightpink", "fillcolor": "lightpink"},
504
+ }
505
+ return node_styles.get(node_type, {"color": "lightgray", "fillcolor": "lightgray"})
506
+
507
+ def _find_io_nodes(self) -> tuple[list[str], list[str]]:
508
+ """Find first nodes (no dependencies) and last nodes (no dependents).
509
+
510
+ Returns
511
+ -------
512
+ Tuple of (first_nodes, last_nodes)
513
+ """
514
+ # Find first nodes (no dependencies)
515
+ first_nodes = []
516
+ for node_name in self.graph.nodes:
517
+ dependencies = self.graph.get_dependencies(node_name)
518
+ if not dependencies:
519
+ first_nodes.append(node_name)
520
+
521
+ # Find last nodes (no dependents)
522
+ all_dependencies: set[str] = set()
523
+ for node_name in self.graph.nodes:
524
+ all_dependencies.update(self.graph.get_dependencies(node_name))
525
+
526
+ last_nodes = [
527
+ node_name for node_name in self.graph.nodes if node_name not in all_dependencies
528
+ ]
529
+
530
+ return first_nodes, last_nodes
531
+
532
+ def _find_terminal_nodes(self) -> tuple[list[str], list[str]]:
533
+ """Find first nodes (no dependencies) and last nodes (no dependents).
534
+
535
+ Returns
536
+ -------
537
+ Tuple of (first_nodes, last_nodes)
538
+ """
539
+ # Find first nodes (no dependencies)
540
+ first_nodes = []
541
+ for node_name in self.graph.nodes:
542
+ dependencies = self.graph.get_dependencies(node_name)
543
+ if not dependencies:
544
+ first_nodes.append(node_name)
545
+
546
+ # Find last nodes (no dependents)
547
+ all_dependencies: set[str] = set()
548
+ for node_name in self.graph.nodes:
549
+ all_dependencies.update(self.graph.get_dependencies(node_name))
550
+
551
+ last_nodes = [
552
+ node_name for node_name in self.graph.nodes if node_name not in all_dependencies
553
+ ]
554
+
555
+ return first_nodes, last_nodes
556
+
557
+ def _format_schema_label(self, label: str, schema: Any) -> str:
558
+ """Format a schema for display in a node label with enhanced Pydantic model support.
559
+
560
+ Args
561
+ ----
562
+ label: Base label (INPUT/OUTPUT)
563
+ schema: Schema information
564
+
565
+ Returns
566
+ -------
567
+ Formatted label string
568
+ """
569
+ if schema is None:
570
+ return label
571
+
572
+ if hasattr(schema, "__name__") and hasattr(schema, "model_fields"):
573
+ model_fields = schema.model_fields
574
+ field_lines = []
575
+
576
+ for field_name, field_info in model_fields.items():
577
+ field_type = getattr(field_info.annotation, "__name__", str(field_info.annotation))
578
+
579
+ if hasattr(field_info, "default") and field_info.default is not ...:
580
+ if field_info.default is None:
581
+ field_line = f"{field_name}: {field_type} = None"
582
+ else:
583
+ field_line = f"{field_name}: {field_type} = {field_info.default}"
584
+ else:
585
+ field_line = f"{field_name}: {field_type}"
586
+
587
+ field_lines.append(field_line)
588
+
589
+ # Format for display
590
+ if len(field_lines) <= 3:
591
+ field_str = "\\n".join(field_lines)
592
+ else:
593
+ field_str = "\\n".join(field_lines[:3]) + "\\n..."
594
+
595
+ return f"{label}\\n{schema.__name__}\\n{field_str}"
596
+
597
+ if hasattr(schema, "__name__"):
598
+ return f"{label}\\n({schema.__name__})"
599
+ if hasattr(schema, "model_fields"):
600
+ # Pydantic model instance
601
+ fields = list(schema.model_fields.keys())
602
+ field_str = ", ".join(fields) if len(fields) <= 3 else f"{', '.join(fields[:3])}..."
603
+ return f"{label}\\n({field_str})"
604
+ if isinstance(schema, dict):
605
+ # Dict schema - format as field: type pairs for input primitives
606
+ field_lines = []
607
+ for key, value in schema.items():
608
+ if isinstance(value, str):
609
+ # Input primitives format: {"field": "type"}
610
+ field_lines.append(f"{key}: {value}")
611
+ else:
612
+ # Other dict formats
613
+ field_lines.append(key)
614
+
615
+ if len(field_lines) <= 4:
616
+ field_str = "\\n".join(field_lines)
617
+ else:
618
+ field_str = "\\n".join(field_lines[:4]) + "\\n..."
619
+ return f"{label}\\n{field_str}"
620
+ if isinstance(schema, type):
621
+ # Type annotation
622
+ return f"{label}\\n({schema.__name__})"
623
+ # String representation
624
+ schema_str = str(schema)
625
+ if len(schema_str) > 30:
626
+ schema_str = schema_str[:27] + "..."
627
+ return f"{label}\\n({schema_str})"
628
+
629
+ def _try_load_generated_schemas(self, pipeline_name: str, pipeline_dir: str) -> dict[str, Any]:
630
+ """Try to load auto-generated schema file for enhanced visualization.
631
+
632
+ Returns
633
+ -------
634
+ Dictionary of loaded schemas, empty dict if loading fails.
635
+ """
636
+ try:
637
+ import importlib.util
638
+ from pathlib import Path
639
+
640
+ schema_file = Path(pipeline_dir) / f"{pipeline_name}_schemas.py"
641
+
642
+ if not schema_file.exists():
643
+ return {}
644
+
645
+ # Dynamically load the schema module
646
+ spec = importlib.util.spec_from_file_location(f"{pipeline_name}_schemas", schema_file)
647
+ if spec and spec.loader:
648
+ schema_module = importlib.util.module_from_spec(spec)
649
+ spec.loader.exec_module(schema_module)
650
+
651
+ schemas = {}
652
+ for attr_name in dir(schema_module):
653
+ attr = getattr(schema_module, attr_name)
654
+ if (
655
+ isinstance(attr, type)
656
+ and hasattr(attr, "model_fields")
657
+ and attr_name.endswith(("Input", "Output"))
658
+ ):
659
+ schemas[attr_name] = attr
660
+
661
+ return schemas
662
+
663
+ except Exception:
664
+ # Silently fail if schema loading doesn't work
665
+ pass # nosec B110 - intentional silent failure for schema loading
666
+ return {}
667
+
668
+ def _load_compiled_schemas(
669
+ self, pipeline_name: str
670
+ ) -> tuple[dict[str, dict[str, Any]], dict[str, str] | None]:
671
+ """Load schema information by compiling the pipeline on-the-fly.
672
+
673
+ Instead of reading pre-compiled files, this now compiles the pipeline
674
+ in memory to extract all type information for visualization.
675
+ Handles compilation failures gracefully.
676
+
677
+ Args
678
+ ----
679
+ pipeline_name: Name of the pipeline
680
+
681
+ Returns
682
+ -------
683
+ Tuple of (node_schemas_dict, pipeline_input_schema)
684
+ """
685
+ try:
686
+ # Note: Pipeline compiler has been removed in favor of simple caching.
687
+ # Schema information should be provided via basic_node_schemas parameter
688
+ # or extracted from runtime Config classes.
689
+ logger.debug(
690
+ "Schema visualization - compiler removed, use basic_node_schemas parameter"
691
+ )
692
+ return {}, None
693
+
694
+ except Exception as e:
695
+ # Silently fail - schemas are optional for visualization
696
+ logger.debug("Exception in schema loading: %s", e)
697
+ return {}, None
698
+
699
+ def _get_node_attributes(
700
+ self,
701
+ node_name: str,
702
+ custom_attributes: dict[str, dict[str, Any]] | None = None,
703
+ generated_schemas: dict[str, Any] | None = None,
704
+ ) -> dict[str, Any]:
705
+ """Get attributes for a node with enhanced schema information.
706
+
707
+ Returns
708
+ -------
709
+ Dictionary of node attributes.
710
+ """
711
+ node_spec = self.graph.nodes[node_name]
712
+ generated_schemas = generated_schemas or {}
713
+
714
+ # Basic attributes
715
+ attrs = {"label": node_name, "fontname": "Arial", "fontsize": "10"}
716
+
717
+ # Enhanced type information with generated schemas
718
+ if node_spec.in_model or node_spec.out_model or generated_schemas:
719
+ in_name = (
720
+ getattr(node_spec.in_model, "__name__", "Any") if node_spec.in_model else "Any"
721
+ )
722
+ out_name = (
723
+ getattr(node_spec.out_model, "__name__", "Any") if node_spec.out_model else "Any"
724
+ )
725
+
726
+ # Check for enhanced schema names from generated files
727
+ for schema_name, schema_class in generated_schemas.items():
728
+ if f"{node_name.title().replace('_', '')}Input" in schema_name:
729
+ in_name = schema_class.__name__
730
+ elif f"{node_name.title().replace('_', '')}Output" in schema_name:
731
+ out_name = schema_class.__name__
732
+
733
+ attrs["label"] = f"{node_name}\\n({in_name} → {out_name})"
734
+
735
+ # Enhanced coloring based on schema complexity
736
+ has_complex_schema = (
737
+ any(node_name.lower() in schema_name.lower() for schema_name in generated_schemas)
738
+ if generated_schemas
739
+ else False
740
+ )
741
+
742
+ # Color based on function type
743
+ fn_name = getattr(node_spec.fn, "__name__", str(node_spec.fn))
744
+ if "llm" in fn_name.lower():
745
+ attrs["fillcolor"] = "lightblue"
746
+ attrs["style"] = "filled"
747
+ elif "agent" in fn_name.lower():
748
+ attrs["fillcolor"] = "lightgreen"
749
+ attrs["style"] = "filled"
750
+ elif "tool" in fn_name.lower():
751
+ attrs["fillcolor"] = "lightyellow"
752
+ attrs["style"] = "filled"
753
+
754
+ # Highlight nodes with generated schemas
755
+ if has_complex_schema:
756
+ attrs["style"] = attrs.get("style", "filled") + ",bold"
757
+ attrs["penwidth"] = "2"
758
+
759
+ if custom_attributes and node_name in custom_attributes:
760
+ attrs.update(custom_attributes[node_name])
761
+
762
+ return attrs
763
+
764
+ def _get_edge_attributes(
765
+ self,
766
+ edge: tuple[str, str],
767
+ custom_attributes: dict[tuple[str, str], dict[str, Any]] | None = None,
768
+ ) -> dict[str, Any]:
769
+ """Get attributes for an edge.
770
+
771
+ Returns
772
+ -------
773
+ Dictionary of edge attributes.
774
+ """
775
+ attrs = {"fontname": "Arial", "fontsize": "8"}
776
+
777
+ if custom_attributes and edge in custom_attributes:
778
+ attrs.update(custom_attributes[edge])
779
+
780
+ return attrs
781
+
782
+ def _format_attributes(self, attrs: dict[str, Any]) -> str:
783
+ """Format attributes for DOT notation.
784
+
785
+ Returns
786
+ -------
787
+ Formatted attribute string.
788
+ """
789
+ if not attrs:
790
+ return ""
791
+
792
+ attr_pairs = []
793
+ for key, value in attrs.items():
794
+ # Escape quotes in values
795
+ if isinstance(value, str):
796
+ value = value.replace('"', '\\"')
797
+ attr_pairs.append(f'{key}="{value}"')
798
+ else:
799
+ attr_pairs.append(f"{key}={value}")
800
+
801
+ return f"[{', '.join(attr_pairs)}]"
802
+
803
+ def render_to_file(
804
+ self, output_path: str, format: str = "png", title: str = "Pipeline DAG", **kwargs: Any
805
+ ) -> str:
806
+ """Render DAG to file using Graphviz.
807
+
808
+ Args
809
+ ----
810
+ output_path: Path where to save the rendered graph (without extension)
811
+ format: Output format ('png', 'svg', 'pdf', etc.)
812
+ title: Title for the graph
813
+ **kwargs: Additional arguments passed to to_dot()
814
+
815
+ Returns
816
+ -------
817
+ Path to the rendered file
818
+
819
+ Raises
820
+ ------
821
+ ImportError
822
+ If graphviz is not installed.
823
+ RuntimeError
824
+ If rendering fails.
825
+ """
826
+ dot_string = self.to_dot(title=title, **kwargs)
827
+
828
+ # Use subprocess to avoid Source.gv creation
829
+ try:
830
+ with tempfile.NamedTemporaryFile(mode="w", suffix=".dot", delete=False) as temp_file:
831
+ temp_file.write(dot_string)
832
+ temp_dot_path = temp_file.name
833
+
834
+ # Use dot command to render
835
+ output_file = f"{output_path}.{format}"
836
+ # nosec B607, B603 - dot is a trusted system command for Graphviz
837
+ subprocess.run( # nosec B607, B603
838
+ ["dot", "-T" + format, "-o", output_file, temp_dot_path],
839
+ capture_output=True,
840
+ text=True,
841
+ check=True,
842
+ )
843
+
844
+ # Clean up temporary file
845
+ with contextlib.suppress(OSError):
846
+ pathlib.Path(temp_dot_path).unlink()
847
+
848
+ return output_file
849
+ except subprocess.CalledProcessError as e:
850
+ raise RuntimeError(f"Failed to render graph: {e.stderr}") from e
851
+ except FileNotFoundError:
852
+ raise ImportError(
853
+ "Graphviz 'dot' command not found. Please install Graphviz."
854
+ ) from None
855
+ except Exception as e:
856
+ raise RuntimeError(f"Failed to render graph: {e}") from e
857
+
858
+ def show(self, title: str = "Pipeline DAG", **kwargs: Any) -> None:
859
+ """Display DAG in default viewer.
860
+
861
+ Args
862
+ ----
863
+ title: Title for the graph
864
+ **kwargs: Additional arguments passed to to_dot()
865
+
866
+ Raises
867
+ ------
868
+ RuntimeError
869
+ If showing graph fails.
870
+ """
871
+ dot_string = self.to_dot(title=title, **kwargs)
872
+
873
+ # Use subprocess to avoid Source.gv creation
874
+ try:
875
+ with tempfile.NamedTemporaryFile(mode="w", suffix=".dot", delete=False) as temp_file:
876
+ temp_file.write(dot_string)
877
+ temp_dot_path = temp_file.name
878
+
879
+ # Use dot command to create a temporary image and open it
880
+ temp_image_path = temp_dot_path.replace(".dot", ".png")
881
+ # nosec B607, B603 - dot is a trusted system command for Graphviz
882
+ subprocess.run( # nosec B607, B603
883
+ ["dot", "-Tpng", "-o", temp_image_path, temp_dot_path],
884
+ capture_output=True,
885
+ text=True,
886
+ check=True,
887
+ )
888
+
889
+ # Open the image with the default viewer
890
+ # nosec B607, B603 - open is a trusted system command for viewing files
891
+ system_platform = platform.system()
892
+ if system_platform == "Darwin":
893
+ viewer_cmd = "open"
894
+ elif system_platform == "Linux":
895
+ viewer_cmd = "xdg-open"
896
+ else:
897
+ viewer_cmd = None
898
+
899
+ if viewer_cmd and shutil.which(viewer_cmd):
900
+ subprocess.run([viewer_cmd, temp_image_path], check=False) # nosec B607, B603
901
+ else:
902
+ help_msg = (
903
+ f"No default image viewer found for platform '{system_platform}'.\n"
904
+ f"For macOS, please ensure the 'open' command is available.\n"
905
+ f"For Linux, please ensure the 'xdg-open' command is installed.\n"
906
+ f"You can manually open the file located at: {temp_image_path}"
907
+ )
908
+ logger.error(help_msg)
909
+
910
+ def cleanup_files() -> None:
911
+ time.sleep(2) # Wait for viewer to open
912
+ try:
913
+ pathlib.Path(temp_dot_path).unlink()
914
+ pathlib.Path(temp_image_path).unlink()
915
+ except OSError:
916
+ pass
917
+
918
+ threading.Thread(target=cleanup_files, daemon=True).start()
919
+
920
+ except subprocess.CalledProcessError as e:
921
+ raise RuntimeError(f"Failed to show graph: {e.stderr}") from e
922
+ except Exception as e:
923
+ raise RuntimeError(f"Failed to show graph: {e}") from e
924
+
925
+
926
+ def export_dag_to_dot(
927
+ graph: DirectedGraph,
928
+ output_file: str | None = None,
929
+ title: str = "Pipeline DAG",
930
+ show_io_nodes: bool = True,
931
+ input_schema: Any = None,
932
+ output_schema: Any = None,
933
+ ) -> str:
934
+ """Export DAG to DOT format with I/O support.
935
+
936
+ Args
937
+ ----
938
+ graph: The DirectedGraph to export
939
+ output_file: Optional file path to save DOT content
940
+ title: Title for the graph
941
+ show_io_nodes: Whether to show input/output nodes
942
+ input_schema: Input schema information
943
+ output_schema: Output schema information
944
+
945
+ Returns
946
+ -------
947
+ DOT format string
948
+ """
949
+ visualizer = DAGVisualizer(graph)
950
+ dot_string = visualizer.to_dot(
951
+ title=title,
952
+ show_io_nodes=show_io_nodes,
953
+ input_schema=input_schema,
954
+ output_schema=output_schema,
955
+ )
956
+
957
+ if output_file:
958
+ output_path = Path(output_file)
959
+ with output_path.open("w", encoding="utf-8") as f:
960
+ f.write(dot_string)
961
+
962
+ return dot_string
963
+
964
+
965
+ def render_dag_to_image(
966
+ graph: DirectedGraph,
967
+ output_path: str,
968
+ format: str = "png",
969
+ title: str = "Pipeline DAG",
970
+ show_io_nodes: bool = True,
971
+ input_schema: Any = None,
972
+ output_schema: Any = None,
973
+ show_node_schemas: bool = True,
974
+ show_intermediate_input: bool = False,
975
+ show_intermediate_output: bool = False,
976
+ basic_node_types: dict[str, str] | None = None,
977
+ basic_node_schemas: dict[str, dict[str, Any]] | None = None,
978
+ ) -> str:
979
+ """Render DAG to image file with enhanced schema and intermediate node support.
980
+
981
+ Args
982
+ ----
983
+ graph: The DirectedGraph to render
984
+ output_path: Path where to save the rendered graph (without extension)
985
+ format: Output format ('png', 'svg', 'pdf', etc.)
986
+ title: Title for the graph
987
+ show_io_nodes: Whether to show input/output nodes
988
+ input_schema: Input schema information
989
+ output_schema: Output schema information
990
+ show_node_schemas: Whether to show schemas on nodes
991
+ show_intermediate_input: Whether to show input schemas on intermediate nodes
992
+ show_intermediate_output: Whether to show output schemas on intermediate nodes
993
+ basic_node_types: Basic node type information from YAML (fallback mode)
994
+ basic_node_schemas: Basic schema information from YAML (fallback mode)
995
+
996
+ Returns
997
+ -------
998
+ Path to the rendered file
999
+ """
1000
+ if "Pipeline:" in title and not hasattr(graph, "_pipeline_name"):
1001
+ pipeline_name = title.split("Pipeline:")[-1].strip()
1002
+ object.__setattr__(graph, "_pipeline_name", pipeline_name)
1003
+
1004
+ visualizer = DAGVisualizer(graph)
1005
+
1006
+ # Generate and render the DOT content with enhanced options
1007
+ dot_content = visualizer.to_dot(
1008
+ title=title,
1009
+ show_io_nodes=show_io_nodes,
1010
+ input_schema=input_schema,
1011
+ output_schema=output_schema,
1012
+ show_node_schemas=show_node_schemas,
1013
+ show_intermediate_input=show_intermediate_input,
1014
+ show_intermediate_output=show_intermediate_output,
1015
+ basic_node_types=basic_node_types,
1016
+ basic_node_schemas=basic_node_schemas,
1017
+ )
1018
+
1019
+ dot = graphviz.Source(dot_content)
1020
+ return str(dot.render(output_path, format=format, cleanup=True))