vellum-ai 1.11.2__py3-none-any.whl → 1.13.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of vellum-ai might be problematic. Click here for more details.

Files changed (275) hide show
  1. vellum/__init__.py +18 -0
  2. vellum/client/README.md +1 -1
  3. vellum/client/core/client_wrapper.py +2 -2
  4. vellum/client/core/force_multipart.py +4 -2
  5. vellum/client/core/http_response.py +1 -1
  6. vellum/client/core/pydantic_utilities.py +7 -4
  7. vellum/client/errors/too_many_requests_error.py +1 -2
  8. vellum/client/reference.md +677 -76
  9. vellum/client/resources/container_images/client.py +299 -0
  10. vellum/client/resources/container_images/raw_client.py +286 -0
  11. vellum/client/resources/documents/client.py +20 -10
  12. vellum/client/resources/documents/raw_client.py +20 -10
  13. vellum/client/resources/events/raw_client.py +4 -4
  14. vellum/client/resources/integration_auth_configs/client.py +2 -0
  15. vellum/client/resources/integration_auth_configs/raw_client.py +2 -0
  16. vellum/client/resources/integration_providers/client.py +28 -2
  17. vellum/client/resources/integration_providers/raw_client.py +24 -0
  18. vellum/client/resources/integrations/client.py +52 -4
  19. vellum/client/resources/integrations/raw_client.py +61 -0
  20. vellum/client/resources/workflow_deployments/client.py +156 -0
  21. vellum/client/resources/workflow_deployments/raw_client.py +334 -0
  22. vellum/client/resources/workflows/client.py +212 -8
  23. vellum/client/resources/workflows/raw_client.py +343 -6
  24. vellum/client/types/__init__.py +18 -0
  25. vellum/client/types/api_actor_type_enum.py +1 -1
  26. vellum/client/types/check_workflow_execution_status_error.py +21 -0
  27. vellum/client/types/check_workflow_execution_status_response.py +29 -0
  28. vellum/client/types/code_execution_package_request.py +21 -0
  29. vellum/client/types/composio_execute_tool_request.py +5 -0
  30. vellum/client/types/composio_tool_definition.py +1 -0
  31. vellum/client/types/container_image_build_config.py +1 -0
  32. vellum/client/types/container_image_container_image_tag.py +1 -0
  33. vellum/client/types/dataset_row_push_request.py +3 -0
  34. vellum/client/types/document_document_to_document_index.py +1 -0
  35. vellum/client/types/integration_name.py +24 -0
  36. vellum/client/types/node_execution_fulfilled_body.py +1 -0
  37. vellum/client/types/node_execution_log_body.py +24 -0
  38. vellum/client/types/node_execution_log_event.py +47 -0
  39. vellum/client/types/prompt_deployment_release_prompt_deployment.py +1 -0
  40. vellum/client/types/runner_config_request.py +24 -0
  41. vellum/client/types/severity_enum.py +5 -0
  42. vellum/client/types/slim_composio_tool_definition.py +1 -0
  43. vellum/client/types/slim_document_document_to_document_index.py +2 -0
  44. vellum/client/types/type_checker_enum.py +5 -0
  45. vellum/client/types/vellum_audio.py +5 -1
  46. vellum/client/types/vellum_audio_request.py +5 -1
  47. vellum/client/types/vellum_document.py +5 -1
  48. vellum/client/types/vellum_document_request.py +5 -1
  49. vellum/client/types/vellum_image.py +5 -1
  50. vellum/client/types/vellum_image_request.py +5 -1
  51. vellum/client/types/vellum_node_execution_event.py +2 -0
  52. vellum/client/types/vellum_variable.py +5 -0
  53. vellum/client/types/vellum_variable_extensions.py +1 -0
  54. vellum/client/types/vellum_variable_type.py +1 -0
  55. vellum/client/types/vellum_video.py +5 -1
  56. vellum/client/types/vellum_video_request.py +5 -1
  57. vellum/client/types/workflow_deployment_release_workflow_deployment.py +1 -0
  58. vellum/client/types/workflow_event.py +2 -0
  59. vellum/client/types/workflow_execution_fulfilled_body.py +1 -0
  60. vellum/client/types/workflow_result_event_output_data_array.py +1 -1
  61. vellum/client/types/workflow_result_event_output_data_chat_history.py +1 -1
  62. vellum/client/types/workflow_result_event_output_data_error.py +1 -1
  63. vellum/client/types/workflow_result_event_output_data_function_call.py +1 -1
  64. vellum/client/types/workflow_result_event_output_data_json.py +1 -1
  65. vellum/client/types/workflow_result_event_output_data_number.py +1 -1
  66. vellum/client/types/workflow_result_event_output_data_search_results.py +1 -1
  67. vellum/client/types/workflow_result_event_output_data_string.py +1 -1
  68. vellum/client/types/workflow_sandbox_execute_node_response.py +8 -0
  69. vellum/plugins/vellum_mypy.py +37 -2
  70. vellum/types/check_workflow_execution_status_error.py +3 -0
  71. vellum/types/check_workflow_execution_status_response.py +3 -0
  72. vellum/types/code_execution_package_request.py +3 -0
  73. vellum/types/node_execution_log_body.py +3 -0
  74. vellum/types/node_execution_log_event.py +3 -0
  75. vellum/types/runner_config_request.py +3 -0
  76. vellum/types/severity_enum.py +3 -0
  77. vellum/types/type_checker_enum.py +3 -0
  78. vellum/types/workflow_sandbox_execute_node_response.py +3 -0
  79. vellum/utils/files/mixin.py +26 -0
  80. vellum/utils/files/tests/test_mixin.py +62 -0
  81. vellum/utils/tests/test_vellum_client.py +95 -0
  82. vellum/utils/uuid.py +19 -2
  83. vellum/utils/vellum_client.py +10 -3
  84. vellum/workflows/__init__.py +7 -1
  85. vellum/workflows/descriptors/base.py +86 -0
  86. vellum/workflows/descriptors/tests/test_utils.py +9 -0
  87. vellum/workflows/errors/tests/__init__.py +0 -0
  88. vellum/workflows/errors/tests/test_types.py +52 -0
  89. vellum/workflows/errors/types.py +1 -0
  90. vellum/workflows/events/node.py +24 -0
  91. vellum/workflows/events/tests/test_event.py +123 -0
  92. vellum/workflows/events/types.py +2 -1
  93. vellum/workflows/events/workflow.py +28 -2
  94. vellum/workflows/expressions/add.py +3 -0
  95. vellum/workflows/expressions/tests/test_add.py +24 -0
  96. vellum/workflows/graph/graph.py +26 -5
  97. vellum/workflows/graph/tests/test_graph.py +228 -1
  98. vellum/workflows/inputs/base.py +22 -6
  99. vellum/workflows/inputs/dataset_row.py +121 -16
  100. vellum/workflows/inputs/tests/test_inputs.py +3 -3
  101. vellum/workflows/integrations/tests/test_vellum_integration_service.py +84 -0
  102. vellum/workflows/integrations/vellum_integration_service.py +12 -1
  103. vellum/workflows/loaders/base.py +2 -0
  104. vellum/workflows/nodes/bases/base.py +37 -16
  105. vellum/workflows/nodes/bases/tests/test_base_node.py +104 -1
  106. vellum/workflows/nodes/core/inline_subworkflow_node/node.py +1 -0
  107. vellum/workflows/nodes/core/inline_subworkflow_node/tests/test_node.py +1 -1
  108. vellum/workflows/nodes/core/map_node/node.py +7 -5
  109. vellum/workflows/nodes/core/map_node/tests/test_node.py +33 -0
  110. vellum/workflows/nodes/core/retry_node/node.py +1 -0
  111. vellum/workflows/nodes/core/try_node/node.py +1 -0
  112. vellum/workflows/nodes/displayable/api_node/node.py +3 -2
  113. vellum/workflows/nodes/displayable/api_node/tests/test_api_node.py +38 -0
  114. vellum/workflows/nodes/displayable/bases/api_node/node.py +1 -1
  115. vellum/workflows/nodes/displayable/bases/base_prompt_node/node.py +18 -1
  116. vellum/workflows/nodes/displayable/bases/inline_prompt_node/node.py +109 -2
  117. vellum/workflows/nodes/displayable/bases/prompt_deployment_node.py +13 -2
  118. vellum/workflows/nodes/displayable/code_execution_node/node.py +9 -15
  119. vellum/workflows/nodes/displayable/code_execution_node/tests/test_node.py +65 -24
  120. vellum/workflows/nodes/displayable/code_execution_node/utils.py +3 -0
  121. vellum/workflows/nodes/displayable/final_output_node/node.py +24 -69
  122. vellum/workflows/nodes/displayable/final_output_node/tests/test_node.py +53 -3
  123. vellum/workflows/nodes/displayable/note_node/node.py +4 -1
  124. vellum/workflows/nodes/displayable/subworkflow_deployment_node/node.py +16 -5
  125. vellum/workflows/nodes/displayable/tests/test_text_prompt_deployment_node.py +47 -0
  126. vellum/workflows/nodes/displayable/tool_calling_node/node.py +74 -34
  127. vellum/workflows/nodes/displayable/tool_calling_node/tests/test_node.py +204 -8
  128. vellum/workflows/nodes/displayable/tool_calling_node/utils.py +92 -71
  129. vellum/workflows/nodes/mocks.py +47 -213
  130. vellum/workflows/nodes/tests/test_mocks.py +0 -177
  131. vellum/workflows/nodes/utils.py +23 -8
  132. vellum/workflows/outputs/base.py +36 -3
  133. vellum/workflows/references/environment_variable.py +1 -11
  134. vellum/workflows/references/lazy.py +8 -0
  135. vellum/workflows/references/state_value.py +24 -1
  136. vellum/workflows/references/tests/test_lazy.py +58 -0
  137. vellum/workflows/references/trigger.py +8 -3
  138. vellum/workflows/references/workflow_input.py +8 -0
  139. vellum/workflows/resolvers/resolver.py +13 -3
  140. vellum/workflows/resolvers/tests/test_resolver.py +31 -0
  141. vellum/workflows/runner/runner.py +159 -14
  142. vellum/workflows/runner/tests/__init__.py +0 -0
  143. vellum/workflows/runner/tests/test_runner.py +170 -0
  144. vellum/workflows/sandbox.py +7 -8
  145. vellum/workflows/state/base.py +89 -30
  146. vellum/workflows/state/context.py +74 -3
  147. vellum/workflows/state/tests/test_state.py +269 -1
  148. vellum/workflows/tests/test_dataset_row.py +8 -7
  149. vellum/workflows/tests/test_sandbox.py +97 -8
  150. vellum/workflows/triggers/__init__.py +2 -1
  151. vellum/workflows/triggers/base.py +160 -28
  152. vellum/workflows/triggers/chat_message.py +141 -0
  153. vellum/workflows/triggers/integration.py +12 -0
  154. vellum/workflows/triggers/manual.py +3 -1
  155. vellum/workflows/triggers/schedule.py +3 -1
  156. vellum/workflows/triggers/tests/test_chat_message.py +257 -0
  157. vellum/workflows/types/core.py +18 -0
  158. vellum/workflows/types/definition.py +6 -13
  159. vellum/workflows/types/generics.py +12 -0
  160. vellum/workflows/types/tests/test_utils.py +12 -0
  161. vellum/workflows/types/utils.py +32 -2
  162. vellum/workflows/types/workflow_metadata.py +124 -0
  163. vellum/workflows/utils/functions.py +152 -16
  164. vellum/workflows/utils/pydantic_schema.py +19 -1
  165. vellum/workflows/utils/tests/test_functions.py +123 -8
  166. vellum/workflows/utils/tests/test_validate.py +79 -0
  167. vellum/workflows/utils/tests/test_vellum_variables.py +62 -2
  168. vellum/workflows/utils/uuids.py +90 -0
  169. vellum/workflows/utils/validate.py +108 -0
  170. vellum/workflows/utils/vellum_variables.py +96 -16
  171. vellum/workflows/workflows/base.py +177 -35
  172. vellum/workflows/workflows/tests/test_base_workflow.py +51 -0
  173. {vellum_ai-1.11.2.dist-info → vellum_ai-1.13.5.dist-info}/METADATA +6 -1
  174. {vellum_ai-1.11.2.dist-info → vellum_ai-1.13.5.dist-info}/RECORD +274 -227
  175. vellum_cli/__init__.py +21 -0
  176. vellum_cli/config.py +16 -2
  177. vellum_cli/pull.py +2 -0
  178. vellum_cli/push.py +23 -10
  179. vellum_cli/tests/conftest.py +8 -13
  180. vellum_cli/tests/test_image_push.py +4 -11
  181. vellum_cli/tests/test_pull.py +83 -68
  182. vellum_cli/tests/test_push.py +251 -2
  183. vellum_ee/assets/node-definitions.json +225 -12
  184. vellum_ee/scripts/generate_node_definitions.py +15 -3
  185. vellum_ee/workflows/display/base.py +4 -3
  186. vellum_ee/workflows/display/nodes/base_node_display.py +44 -11
  187. vellum_ee/workflows/display/nodes/tests/test_base_node_display.py +93 -0
  188. vellum_ee/workflows/display/nodes/types.py +1 -0
  189. vellum_ee/workflows/display/nodes/vellum/__init__.py +0 -2
  190. vellum_ee/workflows/display/nodes/vellum/base_adornment_node.py +5 -2
  191. vellum_ee/workflows/display/nodes/vellum/code_execution_node.py +1 -1
  192. vellum_ee/workflows/display/nodes/vellum/inline_prompt_node.py +10 -2
  193. vellum_ee/workflows/display/nodes/vellum/inline_subworkflow_node.py +17 -14
  194. vellum_ee/workflows/display/nodes/vellum/map_node.py +2 -0
  195. vellum_ee/workflows/display/nodes/vellum/note_node.py +18 -3
  196. vellum_ee/workflows/display/nodes/vellum/subworkflow_deployment_node.py +37 -14
  197. vellum_ee/workflows/display/nodes/vellum/tests/test_code_execution_node.py +62 -2
  198. vellum_ee/workflows/display/nodes/vellum/tests/test_final_output_node.py +136 -0
  199. vellum_ee/workflows/display/nodes/vellum/tests/test_note_node.py +44 -7
  200. vellum_ee/workflows/display/nodes/vellum/tests/test_prompt_node.py +5 -13
  201. vellum_ee/workflows/display/nodes/vellum/tests/test_subworkflow_deployment_node.py +27 -17
  202. vellum_ee/workflows/display/nodes/vellum/tests/test_tool_calling_node.py +145 -22
  203. vellum_ee/workflows/display/nodes/vellum/tests/test_utils.py +107 -2
  204. vellum_ee/workflows/display/nodes/vellum/utils.py +54 -12
  205. vellum_ee/workflows/display/tests/test_base_workflow_display.py +13 -16
  206. vellum_ee/workflows/display/tests/test_json_schema_validation.py +190 -0
  207. vellum_ee/workflows/display/tests/test_mocks.py +912 -0
  208. vellum_ee/workflows/display/tests/workflow_serialization/generic_nodes/test_adornments_serialization.py +14 -2
  209. vellum_ee/workflows/display/tests/workflow_serialization/generic_nodes/test_attributes_serialization.py +109 -0
  210. vellum_ee/workflows/display/tests/workflow_serialization/generic_nodes/test_outputs_serialization.py +3 -0
  211. vellum_ee/workflows/display/tests/workflow_serialization/generic_nodes/test_ports_serialization.py +187 -1
  212. vellum_ee/workflows/display/tests/workflow_serialization/test_basic_api_node_serialization.py +34 -325
  213. vellum_ee/workflows/display/tests/workflow_serialization/test_basic_code_execution_node_serialization.py +42 -393
  214. vellum_ee/workflows/display/tests/workflow_serialization/test_basic_conditional_node_serialization.py +13 -315
  215. vellum_ee/workflows/display/tests/workflow_serialization/test_basic_default_state_serialization.py +2 -122
  216. vellum_ee/workflows/display/tests/workflow_serialization/test_basic_error_node_serialization.py +24 -115
  217. vellum_ee/workflows/display/tests/workflow_serialization/test_basic_generic_node_serialization.py +4 -93
  218. vellum_ee/workflows/display/tests/workflow_serialization/test_basic_guardrail_node_serialization.py +7 -80
  219. vellum_ee/workflows/display/tests/workflow_serialization/test_basic_inline_prompt_node_serialization.py +9 -101
  220. vellum_ee/workflows/display/tests/workflow_serialization/test_basic_inline_subworkflow_serialization.py +77 -308
  221. vellum_ee/workflows/display/tests/workflow_serialization/test_basic_map_node_serialization.py +62 -324
  222. vellum_ee/workflows/display/tests/workflow_serialization/test_basic_merge_node_serialization.py +3 -82
  223. vellum_ee/workflows/display/tests/workflow_serialization/test_basic_prompt_deployment_serialization.py +4 -142
  224. vellum_ee/workflows/display/tests/workflow_serialization/test_basic_search_node_serialization.py +1 -61
  225. vellum_ee/workflows/display/tests/workflow_serialization/test_basic_set_state_node_serialization.py +4 -4
  226. vellum_ee/workflows/display/tests/workflow_serialization/test_basic_subworkflow_deployment_serialization.py +205 -134
  227. vellum_ee/workflows/display/tests/workflow_serialization/test_basic_templating_node_serialization.py +34 -146
  228. vellum_ee/workflows/display/tests/workflow_serialization/test_basic_terminal_node_serialization.py +2 -0
  229. vellum_ee/workflows/display/tests/workflow_serialization/test_basic_tool_calling_node_composio_serialization.py +8 -6
  230. vellum_ee/workflows/display/tests/workflow_serialization/test_basic_tool_calling_node_inline_workflow_serialization.py +137 -266
  231. vellum_ee/workflows/display/tests/workflow_serialization/test_basic_tool_calling_node_inline_workflow_tool_wrapper_serialization.py +84 -0
  232. vellum_ee/workflows/display/tests/workflow_serialization/test_basic_tool_calling_node_mcp_serialization.py +55 -16
  233. vellum_ee/workflows/display/tests/workflow_serialization/test_basic_tool_calling_node_serialization.py +15 -1
  234. vellum_ee/workflows/display/tests/workflow_serialization/test_basic_tool_calling_node_tool_wrapper_serialization.py +71 -0
  235. vellum_ee/workflows/display/tests/workflow_serialization/test_basic_tool_calling_node_vellum_integration_serialization.py +119 -0
  236. vellum_ee/workflows/display/tests/workflow_serialization/test_basic_tool_calling_node_workflow_deployment_serialization.py +1 -1
  237. vellum_ee/workflows/display/tests/workflow_serialization/test_basic_try_node_serialization.py +0 -2
  238. vellum_ee/workflows/display/tests/workflow_serialization/test_chat_message_dict_reference_serialization.py +22 -1
  239. vellum_ee/workflows/display/tests/workflow_serialization/test_chat_message_trigger_serialization.py +412 -0
  240. vellum_ee/workflows/display/tests/workflow_serialization/test_code_tool_node_reference_error.py +106 -0
  241. vellum_ee/workflows/display/tests/workflow_serialization/test_complex_terminal_node_serialization.py +9 -41
  242. vellum_ee/workflows/display/tests/workflow_serialization/test_duplicate_trigger_name_validation.py +208 -0
  243. vellum_ee/workflows/display/tests/workflow_serialization/test_final_output_node_not_referenced_by_workflow_outputs.py +45 -0
  244. vellum_ee/workflows/display/tests/workflow_serialization/test_infinite_loop_validation.py +66 -0
  245. vellum_ee/workflows/display/tests/workflow_serialization/test_int_input_serialization.py +40 -0
  246. vellum_ee/workflows/display/tests/workflow_serialization/test_integration_trigger_serialization.py +8 -14
  247. vellum_ee/workflows/display/tests/workflow_serialization/test_integration_trigger_validation.py +173 -0
  248. vellum_ee/workflows/display/tests/workflow_serialization/test_integration_trigger_with_entrypoint_node_id.py +16 -13
  249. vellum_ee/workflows/display/tests/workflow_serialization/test_list_vellum_document_serialization.py +5 -1
  250. vellum_ee/workflows/display/tests/workflow_serialization/test_manual_trigger_serialization.py +12 -2
  251. vellum_ee/workflows/display/tests/workflow_serialization/test_multi_trigger_same_node_serialization.py +111 -0
  252. vellum_ee/workflows/display/tests/workflow_serialization/test_no_triggers_no_entrypoint_validation.py +64 -0
  253. vellum_ee/workflows/display/tests/workflow_serialization/test_partial_workflow_meta_display_override.py +55 -0
  254. vellum_ee/workflows/display/tests/workflow_serialization/test_sandbox_dataset_mocks_serialization.py +268 -0
  255. vellum_ee/workflows/display/tests/workflow_serialization/test_sandbox_invalid_pdf_data_url.py +49 -0
  256. vellum_ee/workflows/display/tests/workflow_serialization/test_sandbox_validation_errors.py +112 -0
  257. vellum_ee/workflows/display/tests/workflow_serialization/test_scheduled_trigger_serialization.py +25 -16
  258. vellum_ee/workflows/display/tests/workflow_serialization/test_terminal_node_in_unused_graphs_serialization.py +53 -0
  259. vellum_ee/workflows/display/utils/exceptions.py +34 -0
  260. vellum_ee/workflows/display/utils/expressions.py +463 -52
  261. vellum_ee/workflows/display/utils/metadata.py +98 -33
  262. vellum_ee/workflows/display/utils/tests/test_metadata.py +31 -0
  263. vellum_ee/workflows/display/utils/triggers.py +153 -0
  264. vellum_ee/workflows/display/utils/vellum.py +59 -5
  265. vellum_ee/workflows/display/workflows/base_workflow_display.py +656 -254
  266. vellum_ee/workflows/display/workflows/get_vellum_workflow_display_class.py +26 -0
  267. vellum_ee/workflows/display/workflows/tests/test_workflow_display.py +77 -29
  268. vellum_ee/workflows/server/namespaces.py +18 -0
  269. vellum_ee/workflows/tests/test_display_meta.py +2 -0
  270. vellum_ee/workflows/tests/test_serialize_module.py +174 -7
  271. vellum_ee/workflows/tests/test_server.py +0 -3
  272. vellum_ee/workflows/display/nodes/vellum/function_node.py +0 -14
  273. {vellum_ai-1.11.2.dist-info → vellum_ai-1.13.5.dist-info}/LICENSE +0 -0
  274. {vellum_ai-1.11.2.dist-info → vellum_ai-1.13.5.dist-info}/WHEEL +0 -0
  275. {vellum_ai-1.11.2.dist-info → vellum_ai-1.13.5.dist-info}/entry_points.txt +0 -0
@@ -4,22 +4,41 @@ import fnmatch
4
4
  from functools import cached_property
5
5
  import importlib
6
6
  import inspect
7
- import json
8
7
  import logging
9
8
  import os
9
+ import pkgutil
10
+ import re
10
11
  import traceback
11
12
  from uuid import UUID
12
- from typing import Any, Dict, ForwardRef, Generic, List, Optional, Set, Tuple, Type, TypeVar, Union, cast, get_args
13
+ from typing import (
14
+ Any,
15
+ Dict,
16
+ ForwardRef,
17
+ FrozenSet,
18
+ Generic,
19
+ List,
20
+ Literal,
21
+ Optional,
22
+ Set,
23
+ Tuple,
24
+ Type,
25
+ TypeVar,
26
+ Union,
27
+ cast,
28
+ get_args,
29
+ )
30
+
31
+ import jsonschema
13
32
 
14
33
  from vellum.client import Vellum as VellumClient
15
34
  from vellum.client.core.pydantic_utilities import UniversalBaseModel
16
- from vellum.utils.json_encoder import VellumJsonEncoder
17
35
  from vellum.workflows import BaseWorkflow
18
36
  from vellum.workflows.constants import undefined
19
37
  from vellum.workflows.descriptors.base import BaseDescriptor
20
38
  from vellum.workflows.edges import Edge
21
39
  from vellum.workflows.edges.trigger_edge import TriggerEdge
22
40
  from vellum.workflows.events.workflow import NodeEventDisplayContext, WorkflowEventDisplayContext
41
+ from vellum.workflows.exceptions import WorkflowInitializationException
23
42
  from vellum.workflows.inputs.base import BaseInputs
24
43
  from vellum.workflows.inputs.dataset_row import DatasetRow
25
44
  from vellum.workflows.nodes.bases import BaseNode
@@ -27,15 +46,15 @@ from vellum.workflows.nodes.displayable.bases.utils import primitive_to_vellum_v
27
46
  from vellum.workflows.nodes.displayable.final_output_node.node import FinalOutputNode
28
47
  from vellum.workflows.nodes.utils import get_unadorned_node, get_unadorned_port, get_wrapped_node
29
48
  from vellum.workflows.ports import Port
30
- from vellum.workflows.references import OutputReference, WorkflowInputReference
49
+ from vellum.workflows.references import OutputReference, StateValueReference, WorkflowInputReference
50
+ from vellum.workflows.triggers.base import BaseTrigger
51
+ from vellum.workflows.triggers.chat_message import ChatMessageTrigger
31
52
  from vellum.workflows.triggers.integration import IntegrationTrigger
32
53
  from vellum.workflows.triggers.manual import ManualTrigger
33
- from vellum.workflows.triggers.schedule import ScheduleTrigger
34
54
  from vellum.workflows.types.core import Json, JsonArray, JsonObject
35
55
  from vellum.workflows.types.generics import WorkflowType
36
56
  from vellum.workflows.types.utils import get_original_base
37
- from vellum.workflows.utils.uuids import uuid4_from_hash
38
- from vellum.workflows.utils.vellum_variables import primitive_type_to_vellum_variable_type
57
+ from vellum.workflows.utils.uuids import generate_entity_id_from_path, uuid4_from_hash
39
58
  from vellum.workflows.vellum_client import create_vellum_client
40
59
  from vellum_ee.workflows.display.base import (
41
60
  EdgeDisplay,
@@ -52,7 +71,6 @@ from vellum_ee.workflows.display.nodes.base_node_display import BaseNodeDisplay
52
71
  from vellum_ee.workflows.display.nodes.get_node_display_class import get_node_display_class
53
72
  from vellum_ee.workflows.display.nodes.types import NodeOutputDisplay, PortDisplay
54
73
  from vellum_ee.workflows.display.nodes.utils import raise_if_descriptor
55
- from vellum_ee.workflows.display.nodes.vellum.utils import create_node_input
56
74
  from vellum_ee.workflows.display.types import (
57
75
  EdgeDisplays,
58
76
  EntrypointDisplays,
@@ -65,16 +83,27 @@ from vellum_ee.workflows.display.types import (
65
83
  WorkflowOutputDisplays,
66
84
  )
67
85
  from vellum_ee.workflows.display.utils.auto_layout import auto_layout_nodes
68
- from vellum_ee.workflows.display.utils.exceptions import UserFacingException
86
+ from vellum_ee.workflows.display.utils.exceptions import (
87
+ StateValidationError,
88
+ TriggerValidationError,
89
+ UserFacingException,
90
+ WorkflowValidationError,
91
+ )
69
92
  from vellum_ee.workflows.display.utils.expressions import serialize_value
70
93
  from vellum_ee.workflows.display.utils.metadata import (
71
94
  get_entrypoint_edge_id,
72
95
  get_regular_edge_id,
73
96
  get_trigger_edge_id,
74
97
  load_dataset_row_index_to_id_mapping,
98
+ load_runner_config,
75
99
  )
76
100
  from vellum_ee.workflows.display.utils.registry import register_workflow_display_class
77
- from vellum_ee.workflows.display.utils.vellum import infer_vellum_variable_type
101
+ from vellum_ee.workflows.display.utils.triggers import (
102
+ get_trigger_type,
103
+ serialize_trigger_attributes,
104
+ serialize_trigger_display_data,
105
+ )
106
+ from vellum_ee.workflows.display.utils.vellum import compile_descriptor_annotation, infer_vellum_variable_type
78
107
  from vellum_ee.workflows.display.workflows.get_vellum_workflow_display_class import get_workflow_display
79
108
 
80
109
  logger = logging.getLogger(__name__)
@@ -100,7 +129,39 @@ class WorkflowSerializationResult(UniversalBaseModel):
100
129
  dataset: Optional[List[Dict[str, Any]]] = None
101
130
 
102
131
 
103
- class BaseWorkflowDisplay(Generic[WorkflowType]):
132
+ BASE_MODULE_PATH = __name__
133
+
134
+
135
+ class _BaseWorkflowDisplayMeta(type):
136
+ def __new__(mcs, name: str, bases: Tuple[Type[Any], ...], attrs: Dict[str, Any]) -> Type[Any]:
137
+ cls = super().__new__(mcs, name, bases, attrs)
138
+
139
+ # Automatically import all of the node displays now that we don't require the __init__.py file
140
+ # to do so for us.
141
+ module_path = cls.__module__
142
+ if module_path.startswith(BASE_MODULE_PATH):
143
+ return cls
144
+
145
+ nodes_module_path = re.sub(r"\.workflow$", ".nodes", module_path)
146
+ try:
147
+ nodes_module = importlib.import_module(nodes_module_path)
148
+ except Exception:
149
+ # likely because there are no `.nodes` module in the display workflow's module path
150
+ return cls
151
+
152
+ if not hasattr(nodes_module, "__path__") or not hasattr(nodes_module, "__name__"):
153
+ return cls
154
+
155
+ for info in pkgutil.iter_modules(nodes_module.__path__, nodes_module.__name__ + "."):
156
+ try:
157
+ importlib.import_module(info.name)
158
+ except Exception:
159
+ continue
160
+
161
+ return cls
162
+
163
+
164
+ class BaseWorkflowDisplay(Generic[WorkflowType], metaclass=_BaseWorkflowDisplayMeta):
104
165
  # Used to specify the display data for a workflow.
105
166
  workflow_display: Optional[WorkflowMetaDisplay] = None
106
167
 
@@ -144,6 +205,13 @@ class BaseWorkflowDisplay(Generic[WorkflowType]):
144
205
  self._dry_run = dry_run
145
206
 
146
207
  def serialize(self) -> JsonObject:
208
+ try:
209
+ self._workflow.validate()
210
+ except WorkflowInitializationException as e:
211
+ self.display_context.add_error(
212
+ WorkflowValidationError(message=e.message, workflow_class_name=self._workflow.__name__)
213
+ )
214
+
147
215
  self._serialized_files = [
148
216
  "__init__.py",
149
217
  "display/*",
@@ -164,6 +232,8 @@ class BaseWorkflowDisplay(Generic[WorkflowType]):
164
232
 
165
233
  is_required = self._is_reference_required(workflow_input_reference)
166
234
 
235
+ schema = compile_descriptor_annotation(workflow_input_reference)
236
+
167
237
  input_variables.append(
168
238
  {
169
239
  "id": str(workflow_input_display.id),
@@ -172,6 +242,7 @@ class BaseWorkflowDisplay(Generic[WorkflowType]):
172
242
  "default": default.dict() if default else None,
173
243
  "required": is_required,
174
244
  "extensions": {"color": workflow_input_display.color},
245
+ "schema": schema,
175
246
  }
176
247
  )
177
248
 
@@ -199,17 +270,79 @@ class BaseWorkflowDisplay(Generic[WorkflowType]):
199
270
  serialized_nodes: Dict[UUID, JsonObject] = {}
200
271
  edges: JsonArray = []
201
272
 
202
- # Get all trigger edges from the workflow's subgraphs to check if trigger exists
273
+ # Detect duplicate graph paths in the top-level set
274
+ # Signature includes: regular edges (with port identity) + trigger edges
275
+ seen_graph_signatures: Set[FrozenSet[Tuple[Literal["regular", "trigger"], int, Type[BaseNode]]]] = set()
276
+ seen_trigger_edges: Set[Tuple[Type[BaseTrigger], Type[BaseNode]]] = set()
203
277
  trigger_edges: List[TriggerEdge] = []
204
278
  for subgraph in self._workflow.get_subgraphs():
205
- trigger_edges.extend(list(subgraph.trigger_edges))
279
+ # Build signature from regular edges (include port identity to distinguish different ports)
280
+ edge_signature: Set[Tuple[Any, ...]] = set()
281
+ for edge in subgraph.edges:
282
+ # Use port identity (id(port)) to distinguish different ports from the same node
283
+ edge_signature.add(("regular", id(edge.from_port), get_unadorned_node(edge.to_node)))
284
+
285
+ # Include trigger edges in the signature
286
+ for trigger_edge in subgraph.trigger_edges:
287
+ edge_signature.add(
288
+ ("trigger", id(trigger_edge.trigger_class), get_unadorned_node(trigger_edge.to_node))
289
+ )
290
+
291
+ frozen_signature = frozenset(edge_signature)
292
+ if frozen_signature and frozen_signature in seen_graph_signatures:
293
+ self.display_context.add_validation_error(
294
+ WorkflowValidationError(
295
+ message="Duplicate graph path detected in workflow",
296
+ workflow_class_name=self._workflow.__name__,
297
+ )
298
+ )
299
+ elif frozen_signature:
300
+ seen_graph_signatures.add(frozen_signature)
301
+
302
+ # Collect and deduplicate trigger edges (for the trigger_edges list only)
303
+ for trigger_edge in subgraph.trigger_edges:
304
+ edge_key = (trigger_edge.trigger_class, get_unadorned_node(trigger_edge.to_node))
305
+ if edge_key not in seen_trigger_edges:
306
+ seen_trigger_edges.add(edge_key)
307
+ trigger_edges.append(trigger_edge)
206
308
 
207
309
  # Determine if we need an ENTRYPOINT node and what ID to use
208
310
  manual_trigger_edges = [edge for edge in trigger_edges if issubclass(edge.trigger_class, ManualTrigger)]
209
311
  has_manual_trigger = len(manual_trigger_edges) > 0
210
312
 
313
+ # Determine which nodes have explicit non-trigger entrypoints in the graph
314
+ # This is used to decide whether to create an ENTRYPOINT node and skip entrypoint edges
315
+ non_trigger_entrypoint_nodes: Set[Type[BaseNode]] = set()
316
+ for subgraph in self._workflow.get_subgraphs():
317
+ if any(True for _ in subgraph.trigger_edges):
318
+ continue
319
+ for entrypoint in subgraph.entrypoints:
320
+ try:
321
+ non_trigger_entrypoint_nodes.add(get_unadorned_node(entrypoint))
322
+ except Exception:
323
+ continue
324
+
325
+ # Determine if we need an ENTRYPOINT node:
326
+ # - ManualTrigger: always need ENTRYPOINT (backward compatibility)
327
+ # - No triggers: always need ENTRYPOINT (traditional workflows)
328
+ # - Non-trigger entrypoints exist: need ENTRYPOINT for those branches
329
+ # - Only non-manual triggers with no regular entrypoints: skip ENTRYPOINT
330
+ has_triggers = len(trigger_edges) > 0
331
+ needs_entrypoint_node = has_manual_trigger or not has_triggers or len(non_trigger_entrypoint_nodes) > 0
332
+
333
+ # Validate that the workflow has at least one trigger or entrypoint node
334
+ if not has_triggers and len(non_trigger_entrypoint_nodes) == 0:
335
+ self.display_context.add_validation_error(
336
+ WorkflowValidationError(
337
+ message="Workflow has no triggers and no entrypoint nodes. "
338
+ "A workflow must have at least one trigger or one node in its graph.",
339
+ workflow_class_name=self._workflow.__name__,
340
+ )
341
+ )
342
+
211
343
  entrypoint_node_id: Optional[UUID] = None
212
344
  entrypoint_node_source_handle_id: Optional[UUID] = None
345
+ entrypoint_node_display = self.display_context.workflow_display.entrypoint_node_display
213
346
 
214
347
  if has_manual_trigger:
215
348
  # ManualTrigger: use trigger ID for ENTRYPOINT node (backward compatibility)
@@ -226,28 +359,30 @@ class BaseWorkflowDisplay(Generic[WorkflowType]):
226
359
  "label": "Entrypoint Node",
227
360
  "source_handle_id": str(entrypoint_node_source_handle_id),
228
361
  },
229
- "display_data": self.display_context.workflow_display.entrypoint_node_display.dict(),
362
+ "display_data": entrypoint_node_display.dict() if entrypoint_node_display else NodeDisplayData().dict(),
230
363
  "base": None,
231
364
  "definition": None,
232
365
  }
233
- else:
234
- # All other cases: use workflow_display ENTRYPOINT node
366
+ elif needs_entrypoint_node:
367
+ # No triggers or non-trigger entrypoints exist: use workflow_display ENTRYPOINT node
235
368
  entrypoint_node_id = self.display_context.workflow_display.entrypoint_node_id
236
369
  entrypoint_node_source_handle_id = self.display_context.workflow_display.entrypoint_node_source_handle_id
237
370
 
238
- serialized_nodes[entrypoint_node_id] = {
239
- "id": str(entrypoint_node_id),
240
- "type": "ENTRYPOINT",
241
- "inputs": [],
242
- "data": {
243
- "label": "Entrypoint Node",
244
- "source_handle_id": str(entrypoint_node_source_handle_id),
245
- },
246
- "display_data": self.display_context.workflow_display.entrypoint_node_display.dict(),
247
- "base": None,
248
- "definition": None,
249
- }
250
- # else: has_only_integration_trigger without explicit entrypoint - no ENTRYPOINT node needed
371
+ if entrypoint_node_id is not None and entrypoint_node_source_handle_id is not None:
372
+ display_data = entrypoint_node_display.dict() if entrypoint_node_display else NodeDisplayData().dict()
373
+ serialized_nodes[entrypoint_node_id] = {
374
+ "id": str(entrypoint_node_id),
375
+ "type": "ENTRYPOINT",
376
+ "inputs": [],
377
+ "data": {
378
+ "label": "Entrypoint Node",
379
+ "source_handle_id": str(entrypoint_node_source_handle_id),
380
+ },
381
+ "display_data": display_data,
382
+ "base": None,
383
+ "definition": None,
384
+ }
385
+ # else: only non-manual triggers with no regular entrypoints - skip ENTRYPOINT node
251
386
 
252
387
  # Add all the nodes in the workflows
253
388
  for node in self._workflow.get_all_nodes():
@@ -256,7 +391,7 @@ class BaseWorkflowDisplay(Generic[WorkflowType]):
256
391
  try:
257
392
  try:
258
393
  node.__validate__()
259
- except ValueError as validation_error:
394
+ except (ValueError, jsonschema.exceptions.SchemaError) as validation_error:
260
395
  # Only collect node validation errors directly to errors list, don't raise them
261
396
  self.display_context.add_validation_error(validation_error)
262
397
 
@@ -276,7 +411,6 @@ class BaseWorkflowDisplay(Generic[WorkflowType]):
276
411
 
277
412
  serialized_nodes[dict_key] = serialized_node
278
413
 
279
- synthetic_output_edges: JsonArray = []
280
414
  output_variables: JsonArray = []
281
415
  output_values: JsonArray = []
282
416
  final_output_nodes = [
@@ -284,98 +418,37 @@ class BaseWorkflowDisplay(Generic[WorkflowType]):
284
418
  ]
285
419
  final_output_node_outputs = {node.Outputs.value for node in final_output_nodes}
286
420
  unreferenced_final_output_node_outputs = final_output_node_outputs.copy()
287
- final_output_node_base: JsonObject = {
288
- "name": FinalOutputNode.__name__,
289
- "module": cast(JsonArray, FinalOutputNode.__module__.split(".")),
290
- }
291
421
 
292
- # Add a synthetic Terminal Node and track the Workflow's output variables for each Workflow output
422
+ # Track the Workflow's output variables for each Workflow output
293
423
  for workflow_output, workflow_output_display in self.display_context.workflow_output_displays.items():
294
- final_output_node_id = uuid4_from_hash(f"{self.workflow_id}|node_id|{workflow_output.name}")
295
424
  inferred_type = infer_vellum_variable_type(workflow_output)
296
425
  # Remove the terminal node output from the unreferenced set
297
426
  if isinstance(workflow_output.instance, OutputReference):
298
427
  unreferenced_final_output_node_outputs.discard(workflow_output.instance)
299
428
 
300
- if workflow_output.instance not in final_output_node_outputs:
301
- # Create a synthetic terminal node only if there is no terminal node for this output
302
- try:
303
- node_input = create_node_input(
304
- final_output_node_id,
305
- "node_input",
306
- # This is currently the wrapper node's output, but we want the wrapped node
307
- workflow_output.instance,
308
- self.display_context,
309
- )
310
- except ValueError as e:
311
- raise ValueError(f"Failed to serialize output '{workflow_output.name}': {str(e)}") from e
312
-
313
- source_node_display: Optional[BaseNodeDisplay]
314
- if not node_input.value.rules:
315
- source_node_display = None
316
- else:
317
- first_rule = node_input.value.rules[0]
318
- if first_rule.type == "NODE_OUTPUT":
319
- source_node_id = UUID(first_rule.data.node_id)
320
- try:
321
- source_node_display = [
322
- node_display
323
- for node_display in self.display_context.node_displays.values()
324
- if node_display.node_id == source_node_id
325
- ][0]
326
- except IndexError:
327
- source_node_display = None
328
- else:
329
- source_node_display = None
330
-
331
- synthetic_target_handle_id = str(
332
- uuid4_from_hash(f"{self.workflow_id}|target_handle_id|{workflow_output_display.name}")
333
- )
334
- synthetic_display_data = NodeDisplayData().dict()
335
- synthetic_node_label = "Final Output"
336
- serialized_nodes[final_output_node_id] = {
337
- "id": str(final_output_node_id),
338
- "type": "TERMINAL",
339
- "data": {
340
- "label": synthetic_node_label,
341
- "name": workflow_output_display.name,
342
- "target_handle_id": synthetic_target_handle_id,
343
- "output_id": str(workflow_output_display.id),
344
- "output_type": inferred_type,
345
- "node_input_id": str(node_input.id),
346
- },
347
- "inputs": [node_input.dict()],
348
- "display_data": synthetic_display_data,
349
- "base": final_output_node_base,
350
- "definition": None,
351
- }
352
-
353
- if source_node_display:
354
- source_handle_id = source_node_display.get_source_handle_id(
355
- port_displays=self.display_context.port_displays
356
- )
357
-
358
- synthetic_output_edges.append(
359
- {
360
- "id": str(uuid4_from_hash(f"{self.workflow_id}|edge_id|{workflow_output_display.name}")),
361
- "source_node_id": str(source_node_display.node_id),
362
- "source_handle_id": str(source_handle_id),
363
- "target_node_id": str(final_output_node_id),
364
- "target_handle_id": synthetic_target_handle_id,
365
- "type": "DEFAULT",
366
- }
367
- )
368
-
369
- elif isinstance(workflow_output.instance, OutputReference):
429
+ # Update the name of the terminal node if this output references a FinalOutputNode
430
+ if workflow_output.instance in final_output_node_outputs:
370
431
  terminal_node_id = workflow_output.instance.outputs_class.__parent_class__.__id__
371
432
  serialized_terminal_node = serialized_nodes.get(terminal_node_id)
372
- if serialized_terminal_node and isinstance(serialized_terminal_node["data"], dict):
433
+ if (
434
+ serialized_terminal_node
435
+ and "data" in serialized_terminal_node
436
+ and isinstance(serialized_terminal_node["data"], dict)
437
+ ):
373
438
  serialized_terminal_node["data"]["name"] = workflow_output_display.name
374
439
 
440
+ try:
441
+ output_value = self.serialize_value(workflow_output.instance)
442
+ except UserFacingException as e:
443
+ self.display_context.add_error(
444
+ UserFacingException(f"Failed to serialize output '{workflow_output.name}': {e}")
445
+ )
446
+ continue
447
+
375
448
  output_values.append(
376
449
  {
377
450
  "output_variable_id": str(workflow_output_display.id),
378
- "value": serialize_value(self.workflow_id, self.display_context, workflow_output.instance),
451
+ "value": output_value,
379
452
  }
380
453
  )
381
454
 
@@ -391,40 +464,49 @@ class BaseWorkflowDisplay(Generic[WorkflowType]):
391
464
  # raise a serialization error
392
465
  if len(unreferenced_final_output_node_outputs) > 0:
393
466
  self.display_context.add_error(
394
- ValueError("Unable to serialize terminal nodes that are not referenced by workflow outputs.")
467
+ WorkflowValidationError(
468
+ message="Unable to serialize terminal nodes that are not referenced by workflow outputs.",
469
+ workflow_class_name=self._workflow.__name__,
470
+ )
395
471
  )
396
472
 
397
473
  # Identify nodes that already have trigger edges so we can avoid duplicating entrypoint edges
398
- nodes_with_trigger_edges: Set[Type[BaseNode]] = set()
474
+ nodes_with_manual_trigger_edges: Set[Type[BaseNode]] = set()
475
+ nodes_with_non_manual_trigger_edges: Set[Type[BaseNode]] = set()
399
476
  for trigger_edge in trigger_edges:
400
477
  try:
401
- nodes_with_trigger_edges.add(get_unadorned_node(trigger_edge.to_node))
478
+ unadorned_target_node = get_unadorned_node(trigger_edge.to_node)
402
479
  except Exception:
403
480
  continue
404
481
 
405
- # Determine which nodes have explicit non-trigger entrypoints in the graph
406
- non_trigger_entrypoint_nodes: Set[Type[BaseNode]] = set()
407
- for subgraph in self._workflow.get_subgraphs():
408
- # If the subgraph contains trigger edges, its entrypoints were derived from triggers
409
- if any(True for _ in subgraph.trigger_edges):
482
+ if issubclass(trigger_edge.trigger_class, ManualTrigger):
483
+ nodes_with_manual_trigger_edges.add(unadorned_target_node)
484
+ else:
485
+ nodes_with_non_manual_trigger_edges.add(unadorned_target_node)
486
+
487
+ # Track nodes with explicit entrypoint overrides so we retain their edges even if they have triggers
488
+ entrypoint_override_nodes: Set[Type[BaseNode]] = set()
489
+ for entrypoint_node in self.entrypoint_displays.keys():
490
+ try:
491
+ entrypoint_override_nodes.add(get_unadorned_node(entrypoint_node))
492
+ except Exception:
410
493
  continue
411
- for entrypoint in subgraph.entrypoints:
412
- try:
413
- non_trigger_entrypoint_nodes.add(get_unadorned_node(entrypoint))
414
- except Exception:
415
- continue
416
494
 
417
495
  # Add edges from entrypoint first to preserve expected ordering
496
+ # Note: non_trigger_entrypoint_nodes was computed earlier to determine if we need an ENTRYPOINT node
418
497
 
419
498
  for target_node, entrypoint_display in self.display_context.entrypoint_displays.items():
420
499
  unadorned_target_node = get_unadorned_node(target_node)
421
500
 
422
- # Skip the auto-generated entrypoint edge when a trigger already targets this node,
423
- # unless the graph explicitly defines a non-trigger entrypoint for it.
501
+ # Skip the auto-generated entrypoint edge when a manual trigger already targets this node or when a
502
+ # non-manual trigger targets it without an explicit entrypoint override, unless the graph explicitly
503
+ # defines a non-trigger entrypoint for it.
504
+ has_manual_trigger = unadorned_target_node in nodes_with_manual_trigger_edges
505
+ has_non_manual_trigger = unadorned_target_node in nodes_with_non_manual_trigger_edges
506
+ has_override = unadorned_target_node in entrypoint_override_nodes
424
507
  if (
425
- unadorned_target_node in nodes_with_trigger_edges
426
- and unadorned_target_node not in non_trigger_entrypoint_nodes
427
- ):
508
+ has_manual_trigger or (has_non_manual_trigger and not has_override)
509
+ ) and unadorned_target_node not in non_trigger_entrypoint_nodes:
428
510
  continue
429
511
 
430
512
  # Skip edges to invalid nodes
@@ -446,16 +528,19 @@ class BaseWorkflowDisplay(Generic[WorkflowType]):
446
528
  "target_handle_id": str(target_node_display.get_trigger_id()),
447
529
  "type": "DEFAULT",
448
530
  }
449
- display_data = self._serialize_edge_display_data(entrypoint_display.edge_display)
450
- if display_data is not None:
451
- entrypoint_edge_dict["display_data"] = display_data
531
+ edge_display_data = self._serialize_edge_display_data(entrypoint_display.edge_display)
532
+ if edge_display_data is not None:
533
+ entrypoint_edge_dict["display_data"] = edge_display_data
452
534
  edges.append(entrypoint_edge_dict)
453
535
 
454
536
  # Then add trigger edges
455
537
  for trigger_edge in trigger_edges:
456
538
  target_node = trigger_edge.to_node
457
539
  unadorned_target_node = get_unadorned_node(target_node)
458
- nodes_with_trigger_edges.add(unadorned_target_node)
540
+ if issubclass(trigger_edge.trigger_class, ManualTrigger):
541
+ nodes_with_manual_trigger_edges.add(unadorned_target_node)
542
+ else:
543
+ nodes_with_non_manual_trigger_edges.add(unadorned_target_node)
459
544
 
460
545
  # Skip edges to invalid nodes
461
546
  if self._is_node_invalid(unadorned_target_node):
@@ -482,17 +567,23 @@ class BaseWorkflowDisplay(Generic[WorkflowType]):
482
567
  # Prefer stable id from metadata mapping if present
483
568
  stable_edge_id = get_trigger_edge_id(trigger_class, unadorned_target_node, self._workflow.__module__)
484
569
 
570
+ # Generate a unique fallback edge ID using trigger_id and target_node_id
571
+ # This ensures multiple triggers targeting the same node get unique edge IDs
572
+ fallback_edge_id = uuid4_from_hash(
573
+ f"{self.workflow_id}|trigger_edge|{trigger_id}|{target_node_display.node_id}"
574
+ )
575
+
485
576
  trigger_edge_dict: Dict[str, Json] = {
486
- "id": str(stable_edge_id) if stable_edge_id else str(target_entrypoint_display.edge_display.id),
577
+ "id": str(stable_edge_id) if stable_edge_id else str(fallback_edge_id),
487
578
  "source_node_id": str(source_node_id),
488
579
  "source_handle_id": str(source_handle_id),
489
580
  "target_node_id": str(target_node_display.node_id),
490
581
  "target_handle_id": str(target_node_display.get_trigger_id()),
491
582
  "type": "DEFAULT",
492
583
  }
493
- display_data = self._serialize_edge_display_data(target_entrypoint_display.edge_display)
494
- if display_data is not None:
495
- trigger_edge_dict["display_data"] = display_data
584
+ trigger_edge_display_data = self._serialize_edge_display_data(target_entrypoint_display.edge_display)
585
+ if trigger_edge_display_data is not None:
586
+ trigger_edge_dict["display_data"] = trigger_edge_display_data
496
587
  edges.append(trigger_edge_dict)
497
588
 
498
589
  for (source_node_port, target_node), edge_display in self.display_context.edge_displays.items():
@@ -525,13 +616,11 @@ class BaseWorkflowDisplay(Generic[WorkflowType]):
525
616
  ),
526
617
  "type": "DEFAULT",
527
618
  }
528
- display_data = self._serialize_edge_display_data(edge_display)
529
- if display_data is not None:
530
- regular_edge_dict["display_data"] = display_data
619
+ regular_edge_display_data = self._serialize_edge_display_data(edge_display)
620
+ if regular_edge_display_data is not None:
621
+ regular_edge_dict["display_data"] = regular_edge_display_data
531
622
  edges.append(regular_edge_dict)
532
623
 
533
- edges.extend(synthetic_output_edges)
534
-
535
624
  nodes_list = list(serialized_nodes.values())
536
625
  nodes_dict_list = [cast(Dict[str, Any], node) for node in nodes_list if isinstance(node, dict)]
537
626
 
@@ -585,7 +674,7 @@ class BaseWorkflowDisplay(Generic[WorkflowType]):
585
674
 
586
675
  Returns:
587
676
  JsonArray with trigger data if a trigger is present, None otherwise.
588
- Each trigger in the array has: id (UUID), type (str), attributes (list)
677
+ Each trigger in the array has: id (UUID), type (str), name (str), attributes (list)
589
678
  """
590
679
  # Get all trigger edges from the workflow's subgraphs
591
680
  trigger_edges = []
@@ -600,32 +689,35 @@ class BaseWorkflowDisplay(Generic[WorkflowType]):
600
689
 
601
690
  trigger_type_mapping = get_trigger_type_mapping()
602
691
  serialized_triggers: List[JsonObject] = []
692
+ seen_trigger_names: Set[str] = set()
603
693
 
604
694
  for trigger_class in unique_trigger_classes:
605
- # Get the trigger type from the mapping, or check if it's a subclass
695
+ # Get the trigger type from the mapping, or use the utility function
606
696
  trigger_type = trigger_type_mapping.get(trigger_class)
607
697
  if trigger_type is None:
608
- # Check if it's a subclass of a known trigger type
609
- if issubclass(trigger_class, ManualTrigger):
610
- trigger_type = WorkflowTriggerType.MANUAL
611
- elif issubclass(trigger_class, IntegrationTrigger):
612
- trigger_type = WorkflowTriggerType.INTEGRATION
613
- elif issubclass(trigger_class, ScheduleTrigger):
614
- trigger_type = WorkflowTriggerType.SCHEDULED
615
- else:
616
- raise ValueError(
617
- f"Unknown trigger type: {trigger_class.__name__}. "
618
- f"Please add it to the trigger type mapping in get_trigger_type_mapping()."
619
- )
698
+ trigger_type = get_trigger_type(trigger_class)
620
699
 
621
700
  trigger_id = trigger_class.__id__
622
701
 
623
- # Serialize trigger attributes like node outputs
702
+ # Determine trigger name from the trigger class's __trigger_name__ attribute
703
+ trigger_name = trigger_class.__trigger_name__
704
+
705
+ # Validate that trigger names are unique
706
+ if trigger_name in seen_trigger_names:
707
+ self.display_context.add_validation_error(
708
+ TriggerValidationError(
709
+ message=f"Duplicate trigger name '{trigger_name}' found. Each trigger must have a unique name.",
710
+ trigger_class_name=trigger_class.__name__,
711
+ )
712
+ )
713
+ seen_trigger_names.add(trigger_name)
714
+
715
+ # Serialize trigger attributes using the shared utility
716
+ trigger_attributes = serialize_trigger_attributes(trigger_class)
717
+
624
718
  trigger_data: JsonObject
625
719
  if trigger_type == WorkflowTriggerType.SCHEDULED:
626
- # For scheduled triggers, attributes should be empty
627
- # and cron/timezone should be top level
628
-
720
+ # For scheduled triggers, include cron/timezone at top level
629
721
  config_class = trigger_class.Config
630
722
  cron_value = getattr(config_class, "cron", None)
631
723
  timezone_value = getattr(config_class, "timezone", None)
@@ -633,34 +725,17 @@ class BaseWorkflowDisplay(Generic[WorkflowType]):
633
725
  trigger_data = {
634
726
  "id": str(trigger_id),
635
727
  "type": trigger_type.value,
728
+ "name": trigger_name,
636
729
  "cron": cron_value,
637
730
  "timezone": timezone_value,
638
- "attributes": [],
731
+ "attributes": trigger_attributes,
639
732
  }
640
733
  else:
641
- # For other triggers, serialize attributes from attribute_references as VellumVariables
642
- attribute_references = trigger_class.attribute_references().values()
643
- trigger_attributes: JsonArray = cast(
644
- JsonArray,
645
- [
646
- cast(
647
- JsonObject,
648
- {
649
- "id": str(reference.id),
650
- "key": reference.name,
651
- "type": primitive_type_to_vellum_variable_type(reference),
652
- "required": True,
653
- "default": None,
654
- "extensions": None,
655
- },
656
- )
657
- for reference in sorted(attribute_references, key=lambda ref: ref.name)
658
- ],
659
- )
660
-
734
+ # For other triggers (integration, etc.)
661
735
  trigger_data = {
662
736
  "id": str(trigger_id),
663
737
  "type": trigger_type.value,
738
+ "name": trigger_name,
664
739
  "attributes": trigger_attributes,
665
740
  }
666
741
 
@@ -668,44 +743,16 @@ class BaseWorkflowDisplay(Generic[WorkflowType]):
668
743
  exec_config = self._serialize_integration_trigger_exec_config(trigger_class)
669
744
  trigger_data["exec_config"] = exec_config
670
745
 
671
- # Serialize display_data from trigger's Display class
672
- display_class = trigger_class.Display
673
- display_data: JsonObject = {}
674
-
675
- # Add label if present
676
- if hasattr(display_class, "label") and display_class.label is not None:
677
- display_data["label"] = display_class.label
678
-
679
- # Add x and y coordinates if present
680
- if (
681
- hasattr(display_class, "x")
682
- and display_class.x is not None
683
- and hasattr(display_class, "y")
684
- and display_class.y is not None
685
- ):
686
- display_data["position"] = {
687
- "x": display_class.x,
688
- "y": display_class.y,
689
- }
690
-
691
- # Add z index if present
692
- if hasattr(display_class, "z_index") and display_class.z_index is not None:
693
- display_data["z_index"] = display_class.z_index
746
+ # Validate trigger attributes against the expected types from the API
747
+ self._validate_integration_trigger_attributes(trigger_class, trigger_attributes)
694
748
 
695
- # Add icon if present
696
- if hasattr(display_class, "icon") and display_class.icon is not None:
697
- display_data["icon"] = display_class.icon
749
+ if trigger_type == WorkflowTriggerType.CHAT_MESSAGE and issubclass(trigger_class, ChatMessageTrigger):
750
+ chat_exec_config = self._serialize_chat_message_trigger_exec_config(trigger_class)
751
+ if chat_exec_config:
752
+ trigger_data["exec_config"] = chat_exec_config
698
753
 
699
- # Add color if present
700
- if hasattr(display_class, "color") and display_class.color is not None:
701
- display_data["color"] = display_class.color
702
-
703
- # Add comment if present
704
- if hasattr(display_class, "comment") and display_class.comment is not None:
705
- display_data["comment"] = {
706
- "value": display_class.comment.value,
707
- "expanded": display_class.comment.expanded,
708
- }
754
+ # Serialize display_data using the shared utility
755
+ display_data = serialize_trigger_display_data(trigger_class, trigger_type)
709
756
 
710
757
  # Don't include display_data for manual triggers
711
758
  if display_data and trigger_type != WorkflowTriggerType.MANUAL:
@@ -774,6 +821,167 @@ class BaseWorkflowDisplay(Generic[WorkflowType]):
774
821
  },
775
822
  )
776
823
 
824
+ def _fetch_integration_trigger_definition(
825
+ self, provider: str, integration_name: str, trigger_slug: str
826
+ ) -> Optional[JsonObject]:
827
+ """
828
+ Fetch the trigger/tool definition from the API to get the expected attribute types.
829
+
830
+ Uses the client's integrations.retrieve_integration_tool_definition method.
831
+
832
+ Returns the tool definition with output_parameters (payload schema) if found, None otherwise.
833
+ For triggers, output_parameters contains the webhook payload schema, while input_parameters
834
+ contains setup/config arguments.
835
+ """
836
+ try:
837
+ tool_definition = self._client.integrations.retrieve_integration_tool_definition(
838
+ integration_name=integration_name,
839
+ integration_provider=provider,
840
+ tool_name=trigger_slug,
841
+ )
842
+ return cast(
843
+ JsonObject,
844
+ {
845
+ "name": tool_definition.name,
846
+ "output_parameters": tool_definition.output_parameters,
847
+ },
848
+ )
849
+ except Exception as e:
850
+ logger.warning(f"Error fetching tool definition for {trigger_slug}: {e}")
851
+ return None
852
+
853
+ def _validate_integration_trigger_attributes(
854
+ self,
855
+ trigger_class: Type[IntegrationTrigger],
856
+ trigger_attributes: JsonArray,
857
+ ) -> None:
858
+ """
859
+ Validate that the trigger attributes match the expected types from the API.
860
+
861
+ Raises TriggerValidationError if there's a type mismatch.
862
+ """
863
+ config_class = trigger_class.Config
864
+ provider = getattr(config_class, "provider", None)
865
+ if isinstance(provider, Enum):
866
+ provider = provider.value
867
+ elif provider is not None:
868
+ provider = str(provider)
869
+
870
+ slug = getattr(config_class, "slug", None)
871
+ integration_name = getattr(config_class, "integration_name", None)
872
+
873
+ if not provider or not slug or not integration_name:
874
+ return
875
+
876
+ trigger_def = self._fetch_integration_trigger_definition(provider, integration_name, slug)
877
+ if not trigger_def:
878
+ return
879
+
880
+ # output_parameters contains the webhook payload schema for triggers
881
+ # (input_parameters contains setup/config arguments like team_id)
882
+ output_parameters = trigger_def.get("output_parameters", {})
883
+ if not output_parameters or not isinstance(output_parameters, dict):
884
+ return
885
+
886
+ # output_parameters is a JSON Schema object with structure:
887
+ # {"type": "object", "properties": {"key": {"type": "string"}, ...}, "required": [...]}
888
+ properties = output_parameters.get("properties", {})
889
+ if not properties or not isinstance(properties, dict):
890
+ return
891
+
892
+ # Map JSON Schema types to Vellum attribute types
893
+ json_schema_to_vellum_type: Dict[str, str] = {
894
+ "string": "STRING",
895
+ "number": "NUMBER",
896
+ "integer": "NUMBER",
897
+ "boolean": "BOOLEAN",
898
+ "object": "JSON",
899
+ "array": "ARRAY",
900
+ }
901
+
902
+ expected_types_by_key: Dict[str, str] = {}
903
+ for key, param_info in properties.items():
904
+ if not isinstance(param_info, dict):
905
+ continue
906
+ param_type = param_info.get("type")
907
+ if isinstance(param_type, str):
908
+ vellum_type = json_schema_to_vellum_type.get(param_type)
909
+ if vellum_type:
910
+ expected_types_by_key[key] = vellum_type
911
+
912
+ for attr in trigger_attributes:
913
+ if not isinstance(attr, dict):
914
+ continue
915
+ attr_key = attr.get("key")
916
+ actual_type = attr.get("type")
917
+ if isinstance(attr_key, str) and isinstance(actual_type, str) and attr_key in expected_types_by_key:
918
+ expected_type = expected_types_by_key[attr_key]
919
+ if actual_type != expected_type:
920
+ raise TriggerValidationError(
921
+ message=f"Attribute '{attr_key}' has type '{actual_type}' but expected type '{expected_type}'. "
922
+ "The trigger configuration is invalid or contains unsupported values.",
923
+ trigger_class_name=trigger_class.__name__,
924
+ )
925
+
926
+ def _serialize_chat_message_trigger_exec_config(
927
+ self, trigger_class: Type[ChatMessageTrigger]
928
+ ) -> Optional[JsonObject]:
929
+ config_class = trigger_class.Config
930
+ output = getattr(config_class, "output", None)
931
+
932
+ if output is None:
933
+ self.display_context.add_validation_error(
934
+ TriggerValidationError(
935
+ message="Chat Trigger output must be specified.",
936
+ trigger_class_name=trigger_class.__name__,
937
+ )
938
+ )
939
+ return None
940
+
941
+ self._validate_chat_history_state(trigger_class)
942
+
943
+ serialized_output = serialize_value(
944
+ executable_id=trigger_class.__id__,
945
+ display_context=self.display_context,
946
+ value=output,
947
+ )
948
+
949
+ return cast(
950
+ JsonObject,
951
+ {
952
+ "output": serialized_output,
953
+ },
954
+ )
955
+
956
+ def _validate_chat_history_state(self, trigger_class: Type[ChatMessageTrigger]) -> None:
957
+ state_class = self._workflow.get_state_class()
958
+
959
+ if not hasattr(state_class, "chat_history"):
960
+ self.display_context.add_validation_error(
961
+ StateValidationError(
962
+ message=(
963
+ "Chat triggers require a `chat_history` state variable. "
964
+ "Add `chat_history: List[ChatMessage] = Field(default_factory=list)` to your state class."
965
+ ),
966
+ state_class_name=state_class.__name__,
967
+ attribute_name="chat_history",
968
+ )
969
+ )
970
+ return
971
+
972
+ chat_history_ref = getattr(state_class, "chat_history")
973
+ if chat_history_ref.instance is None:
974
+ self.display_context.add_validation_error(
975
+ StateValidationError(
976
+ message=(
977
+ "Chat triggers expect chat_history to default to an empty array. "
978
+ "Use `Field(default_factory=list)` instead of `= None`."
979
+ ),
980
+ state_class_name=state_class.__name__,
981
+ attribute_name="chat_history",
982
+ )
983
+ )
984
+
777
985
  @staticmethod
778
986
  def _model_dump(value: Any) -> Any:
779
987
  if hasattr(value, "model_dump"):
@@ -840,13 +1048,17 @@ class BaseWorkflowDisplay(Generic[WorkflowType]):
840
1048
  node: Type[BaseNode],
841
1049
  node_display: BaseNodeDisplay,
842
1050
  node_output_displays: Dict[OutputReference, NodeOutputDisplay],
1051
+ node_displays: NodeDisplays,
1052
+ errors: List[Exception],
843
1053
  ):
844
1054
  """This method recursively adds nodes wrapped in decorators to the node_output_displays dictionary."""
845
1055
 
846
1056
  inner_node = get_wrapped_node(node)
847
1057
  if inner_node:
848
- inner_node_display = self._get_node_display(inner_node)
849
- self._enrich_global_node_output_displays(inner_node, inner_node_display, node_output_displays)
1058
+ inner_node_display = node_displays.get(inner_node) or self._get_node_display(inner_node, errors)
1059
+ self._enrich_global_node_output_displays(
1060
+ inner_node, inner_node_display, node_output_displays, node_displays, errors
1061
+ )
850
1062
 
851
1063
  for node_output in node.Outputs:
852
1064
  if node_output in node_output_displays:
@@ -859,13 +1071,15 @@ class BaseWorkflowDisplay(Generic[WorkflowType]):
859
1071
  node: Type[BaseNode],
860
1072
  node_display: BaseNodeDisplay,
861
1073
  port_displays: Dict[Port, PortDisplay],
1074
+ node_displays: NodeDisplays,
1075
+ errors: List[Exception],
862
1076
  ):
863
1077
  """This method recursively adds nodes wrapped in decorators to the port_displays dictionary."""
864
1078
 
865
1079
  inner_node = get_wrapped_node(node)
866
1080
  if inner_node:
867
- inner_node_display = self._get_node_display(inner_node)
868
- self._enrich_node_port_displays(inner_node, inner_node_display, port_displays)
1081
+ inner_node_display = node_displays.get(inner_node) or self._get_node_display(inner_node, errors)
1082
+ self._enrich_node_port_displays(inner_node, inner_node_display, port_displays, node_displays, errors)
869
1083
 
870
1084
  for port in node.Ports:
871
1085
  if port in port_displays:
@@ -873,12 +1087,18 @@ class BaseWorkflowDisplay(Generic[WorkflowType]):
873
1087
 
874
1088
  port_displays[port] = node_display.get_node_port_display(port)
875
1089
 
876
- def _get_node_display(self, node: Type[BaseNode]) -> BaseNodeDisplay:
1090
+ def _get_node_display(self, node: Type[BaseNode], errors: List[Exception]) -> BaseNodeDisplay:
877
1091
  node_display_class = get_node_display_class(node)
878
- return node_display_class()
1092
+ node_display = node_display_class()
1093
+ try:
1094
+ node_display.build(client=self._client)
1095
+ except Exception as e:
1096
+ errors.append(e)
1097
+ return node_display
879
1098
 
880
1099
  @cached_property
881
1100
  def display_context(self) -> WorkflowDisplayContext:
1101
+ errors: List[Exception] = []
882
1102
  workflow_meta_display = self._generate_workflow_meta_display()
883
1103
 
884
1104
  global_node_output_displays: NodeOutputDisplays = (
@@ -900,6 +1120,7 @@ class BaseWorkflowDisplay(Generic[WorkflowType]):
900
1120
  global_node_displays=global_node_displays,
901
1121
  global_node_output_displays=global_node_output_displays,
902
1122
  port_displays=port_displays,
1123
+ errors=errors,
903
1124
  )
904
1125
 
905
1126
  workflow_input_displays: WorkflowInputsDisplays = {}
@@ -921,6 +1142,7 @@ class BaseWorkflowDisplay(Generic[WorkflowType]):
921
1142
  )
922
1143
  for state_value in self._workflow.get_state_class():
923
1144
  state_value_display_overrides = self.state_value_displays.get(state_value)
1145
+ self._validate_state_value_default(state_value, errors)
924
1146
  state_value_display = self._generate_state_value_display(
925
1147
  state_value, overrides=state_value_display_overrides
926
1148
  )
@@ -985,19 +1207,37 @@ class BaseWorkflowDisplay(Generic[WorkflowType]):
985
1207
  port_displays=port_displays,
986
1208
  workflow_display_class=self.__class__,
987
1209
  dry_run=self._dry_run,
1210
+ _errors=errors,
988
1211
  )
989
1212
 
990
1213
  def _generate_workflow_meta_display(self) -> WorkflowMetaDisplay:
1214
+ defaults = WorkflowMetaDisplay.get_default(self._workflow)
991
1215
  overrides = self.workflow_display
992
- if overrides:
993
- return WorkflowMetaDisplay(
994
- entrypoint_node_id=overrides.entrypoint_node_id,
995
- entrypoint_node_source_handle_id=overrides.entrypoint_node_source_handle_id,
996
- entrypoint_node_display=overrides.entrypoint_node_display,
997
- display_data=overrides.display_data,
998
- )
999
1216
 
1000
- return WorkflowMetaDisplay.get_default(self._workflow)
1217
+ if not overrides:
1218
+ return defaults
1219
+
1220
+ # Merge overrides with defaults - if override provides None, fall back to default
1221
+ entrypoint_node_id = (
1222
+ overrides.entrypoint_node_id if overrides.entrypoint_node_id is not None else defaults.entrypoint_node_id
1223
+ )
1224
+ entrypoint_node_source_handle_id = (
1225
+ overrides.entrypoint_node_source_handle_id
1226
+ if overrides.entrypoint_node_source_handle_id is not None
1227
+ else defaults.entrypoint_node_source_handle_id
1228
+ )
1229
+ entrypoint_node_display = (
1230
+ overrides.entrypoint_node_display
1231
+ if overrides.entrypoint_node_display is not None
1232
+ else defaults.entrypoint_node_display
1233
+ )
1234
+
1235
+ return WorkflowMetaDisplay(
1236
+ entrypoint_node_id=entrypoint_node_id,
1237
+ entrypoint_node_source_handle_id=entrypoint_node_source_handle_id,
1238
+ entrypoint_node_display=entrypoint_node_display,
1239
+ display_data=overrides.display_data,
1240
+ )
1001
1241
 
1002
1242
  def _generate_workflow_input_display(
1003
1243
  self, workflow_input: WorkflowInputReference, overrides: Optional[WorkflowInputsDisplay] = None
@@ -1010,12 +1250,12 @@ class BaseWorkflowDisplay(Generic[WorkflowType]):
1010
1250
  name = overrides.name
1011
1251
  color = overrides.color
1012
1252
  else:
1013
- workflow_input_id = uuid4_from_hash(f"{self.workflow_id}|inputs|id|{workflow_input.name}")
1253
+ workflow_input_id = workflow_input.id
1014
1254
 
1015
1255
  return WorkflowInputsDisplay(id=workflow_input_id, name=name, color=color)
1016
1256
 
1017
1257
  def _generate_state_value_display(
1018
- self, state_value: BaseDescriptor, overrides: Optional[StateValueDisplay] = None
1258
+ self, state_value: StateValueReference, overrides: Optional[StateValueDisplay] = None
1019
1259
  ) -> StateValueDisplay:
1020
1260
  state_value_id: UUID
1021
1261
  name = None
@@ -1025,10 +1265,25 @@ class BaseWorkflowDisplay(Generic[WorkflowType]):
1025
1265
  name = overrides.name
1026
1266
  color = overrides.color
1027
1267
  else:
1028
- state_value_id = uuid4_from_hash(f"{self.workflow_id}|state_values|id|{state_value.name}")
1268
+ state_value_id = state_value.id
1029
1269
 
1030
1270
  return StateValueDisplay(id=state_value_id, name=name, color=color)
1031
1271
 
1272
+ def _validate_state_value_default(self, state_value: StateValueReference, errors: List[Exception]) -> None:
1273
+ default_value = state_value.instance
1274
+
1275
+ if isinstance(default_value, (list, dict, set)):
1276
+ errors.append(
1277
+ StateValidationError(
1278
+ message=(
1279
+ "Mutable default value detected. Use Field(default_factory=list) instead of = [] "
1280
+ "to avoid shared mutable state between instances."
1281
+ ),
1282
+ state_class_name=state_value.state_class.__name__,
1283
+ attribute_name=state_value.name,
1284
+ )
1285
+ )
1286
+
1032
1287
  def _generate_entrypoint_display(
1033
1288
  self,
1034
1289
  entrypoint: Type[BaseNode],
@@ -1049,9 +1304,12 @@ class BaseWorkflowDisplay(Generic[WorkflowType]):
1049
1304
  target_node_display = node_displays[entrypoint_target]
1050
1305
  target_node_id = target_node_display.node_id
1051
1306
 
1052
- edge_display = edge_display_overrides or self._generate_edge_display_from_source(
1053
- entrypoint_node_id, target_node_id
1054
- )
1307
+ if edge_display_overrides:
1308
+ edge_display = edge_display_overrides
1309
+ elif entrypoint_node_id is not None:
1310
+ edge_display = self._generate_edge_display_from_source(entrypoint_node_id, target_node_id)
1311
+ else:
1312
+ edge_display = EdgeDisplay(id=uuid4_from_hash(f"{self.workflow_id}|id|{target_node_id}"))
1055
1313
 
1056
1314
  return EntrypointDisplay(id=entrypoint_id, edge_display=edge_display)
1057
1315
 
@@ -1123,6 +1381,13 @@ class BaseWorkflowDisplay(Generic[WorkflowType]):
1123
1381
  input.name: display_context.workflow_input_displays[input].id
1124
1382
  for input in display_context.workflow_input_displays
1125
1383
  }
1384
+
1385
+ # Include trigger attributes in workflow_inputs so they appear in the executions list UI
1386
+ for subgraph in self._workflow.get_subgraphs():
1387
+ for trigger_class in subgraph.triggers:
1388
+ for trigger_attr_ref in trigger_class:
1389
+ if trigger_attr_ref.name not in workflow_inputs:
1390
+ workflow_inputs[trigger_attr_ref.name] = trigger_attr_ref.id
1126
1391
  node_displays = {
1127
1392
  node.__id__: (node, display_context.node_displays[node]) for node in display_context.node_displays
1128
1393
  }
@@ -1171,8 +1436,12 @@ class BaseWorkflowDisplay(Generic[WorkflowType]):
1171
1436
  global_node_displays: NodeDisplays,
1172
1437
  global_node_output_displays: NodeOutputDisplays,
1173
1438
  port_displays: PortDisplays,
1439
+ errors: List[Exception],
1174
1440
  ) -> None:
1175
- extracted_node_displays = self._extract_node_displays(node)
1441
+ if node in node_displays:
1442
+ return
1443
+
1444
+ extracted_node_displays = self._extract_node_displays(node, errors)
1176
1445
 
1177
1446
  for extracted_node, extracted_node_display in extracted_node_displays.items():
1178
1447
  if extracted_node not in node_displays:
@@ -1181,11 +1450,15 @@ class BaseWorkflowDisplay(Generic[WorkflowType]):
1181
1450
  if extracted_node not in global_node_displays:
1182
1451
  global_node_displays[extracted_node] = extracted_node_display
1183
1452
 
1184
- self._enrich_global_node_output_displays(node, extracted_node_displays[node], global_node_output_displays)
1185
- self._enrich_node_port_displays(node, extracted_node_displays[node], port_displays)
1453
+ self._enrich_global_node_output_displays(
1454
+ node, extracted_node_displays[node], global_node_output_displays, node_displays, errors
1455
+ )
1456
+ self._enrich_node_port_displays(node, extracted_node_displays[node], port_displays, node_displays, errors)
1186
1457
 
1187
- def _extract_node_displays(self, node: Type[BaseNode]) -> Dict[Type[BaseNode], BaseNodeDisplay]:
1188
- node_display = self._get_node_display(node)
1458
+ def _extract_node_displays(
1459
+ self, node: Type[BaseNode], errors: List[Exception]
1460
+ ) -> Dict[Type[BaseNode], BaseNodeDisplay]:
1461
+ node_display = self._get_node_display(node, errors)
1189
1462
  additional_node_displays: Dict[Type[BaseNode], BaseNodeDisplay] = {
1190
1463
  node: node_display,
1191
1464
  }
@@ -1193,7 +1466,7 @@ class BaseWorkflowDisplay(Generic[WorkflowType]):
1193
1466
  # Nodes wrapped in a decorator need to be in our node display dictionary for later retrieval
1194
1467
  inner_node = get_wrapped_node(node)
1195
1468
  if inner_node:
1196
- inner_node_displays = self._extract_node_displays(inner_node)
1469
+ inner_node_displays = self._extract_node_displays(inner_node, errors)
1197
1470
 
1198
1471
  for node, display in inner_node_displays.items():
1199
1472
  if node not in additional_node_displays:
@@ -1240,6 +1513,92 @@ class BaseWorkflowDisplay(Generic[WorkflowType]):
1240
1513
  def _workflow(self) -> Type[WorkflowType]:
1241
1514
  return cast(Type[WorkflowType], self.__class__.infer_workflow_class())
1242
1515
 
1516
+ @staticmethod
1517
+ def _collect_node_classes_from_module(
1518
+ module: Any,
1519
+ expected_module_prefix: str,
1520
+ ) -> List[Type[BaseNode]]:
1521
+ """
1522
+ Collect BaseNode subclasses defined in a module.
1523
+
1524
+ Args:
1525
+ module: The imported module to scan
1526
+ expected_module_prefix: Module path prefix to filter by (e.g., "my_module")
1527
+
1528
+ Returns:
1529
+ List of BaseNode subclasses defined in the module
1530
+ """
1531
+ node_classes: List[Type[BaseNode]] = []
1532
+ for name, attr in vars(module).items():
1533
+ if name.startswith("_"):
1534
+ continue
1535
+
1536
+ if not (inspect.isclass(attr) and issubclass(attr, BaseNode) and attr is not BaseNode):
1537
+ continue
1538
+
1539
+ if not attr.__module__.startswith(expected_module_prefix):
1540
+ continue
1541
+
1542
+ if "<locals>" in attr.__qualname__:
1543
+ continue
1544
+
1545
+ node_classes.append(attr)
1546
+
1547
+ return node_classes
1548
+
1549
+ @staticmethod
1550
+ def _find_orphan_nodes(
1551
+ base_module: str,
1552
+ workflow: Type[BaseWorkflow],
1553
+ ) -> List[Type[BaseNode]]:
1554
+ """
1555
+ Find nodes defined in the workflow package but not included in graph or unused_graphs.
1556
+
1557
+ Scans both the workflow.py file and the nodes/ subpackage for BaseNode subclasses.
1558
+
1559
+ Args:
1560
+ base_module: The base module path (e.g., "my_module")
1561
+ workflow: The workflow class to check
1562
+
1563
+ Returns:
1564
+ List of orphan node classes
1565
+ """
1566
+ workflow_nodes = set(workflow.get_all_nodes())
1567
+ candidate_nodes: List[Type[BaseNode]] = []
1568
+
1569
+ workflow_module_path = f"{base_module}.workflow"
1570
+ try:
1571
+ workflow_module = importlib.import_module(workflow_module_path)
1572
+ candidate_nodes.extend(BaseWorkflowDisplay._collect_node_classes_from_module(workflow_module, base_module))
1573
+ except ImportError:
1574
+ pass
1575
+
1576
+ nodes_package_path = f"{base_module}.nodes"
1577
+ try:
1578
+ nodes_package = importlib.import_module(nodes_package_path)
1579
+ if hasattr(nodes_package, "__path__"):
1580
+ for module_info in pkgutil.walk_packages(nodes_package.__path__, nodes_package.__name__ + "."):
1581
+ try:
1582
+ submodule = importlib.import_module(module_info.name)
1583
+ candidate_nodes.extend(
1584
+ BaseWorkflowDisplay._collect_node_classes_from_module(submodule, base_module)
1585
+ )
1586
+ except Exception:
1587
+ continue
1588
+ except ImportError:
1589
+ pass
1590
+
1591
+ seen: Set[Type[BaseNode]] = set()
1592
+ orphan_nodes: List[Type[BaseNode]] = []
1593
+ for node in candidate_nodes:
1594
+ if node in seen:
1595
+ continue
1596
+ seen.add(node)
1597
+ if node not in workflow_nodes:
1598
+ orphan_nodes.append(node)
1599
+
1600
+ return orphan_nodes
1601
+
1243
1602
  @staticmethod
1244
1603
  def serialize_module(
1245
1604
  module: str,
@@ -1265,12 +1624,24 @@ class BaseWorkflowDisplay(Generic[WorkflowType]):
1265
1624
  dry_run=dry_run,
1266
1625
  )
1267
1626
 
1627
+ orphan_nodes = BaseWorkflowDisplay._find_orphan_nodes(module, workflow)
1628
+ for orphan_node in orphan_nodes:
1629
+ workflow_display.display_context.add_validation_error(
1630
+ WorkflowValidationError(
1631
+ message=f"Node '{orphan_node.__name__}' is defined in the module but not included in "
1632
+ "the workflow's graph or unused_graphs.",
1633
+ workflow_class_name=workflow.__name__,
1634
+ )
1635
+ )
1636
+
1268
1637
  exec_config = workflow_display.serialize()
1269
1638
  additional_files = workflow_display._gather_additional_module_files(module)
1270
1639
 
1271
1640
  if additional_files:
1272
1641
  exec_config["module_data"] = {"additional_files": cast(JsonObject, additional_files)}
1273
1642
 
1643
+ exec_config["runner_config"] = load_runner_config(module)
1644
+
1274
1645
  dataset = None
1275
1646
  try:
1276
1647
  sandbox_module_path = f"{module}.sandbox"
@@ -1281,27 +1652,38 @@ class BaseWorkflowDisplay(Generic[WorkflowType]):
1281
1652
  dataset = []
1282
1653
  dataset_row_index_to_id = load_dataset_row_index_to_id_mapping(module)
1283
1654
  for i, inputs_obj in enumerate(dataset_attr):
1284
- if isinstance(inputs_obj, DatasetRow):
1285
- serialized_inputs = json.loads(json.dumps(inputs_obj.inputs, cls=VellumJsonEncoder))
1286
- row_data = {"label": inputs_obj.label, "inputs": serialized_inputs}
1287
- trigger_class = inputs_obj.workflow_trigger
1288
- if trigger_class is not None:
1289
- row_data["workflow_trigger_id"] = str(trigger_class.__id__)
1290
- elif isinstance(inputs_obj, BaseInputs):
1291
- serialized_inputs = json.loads(json.dumps(inputs_obj, cls=VellumJsonEncoder))
1292
- row_data = {"label": f"Scenario {i + 1}", "inputs": serialized_inputs}
1293
- else:
1294
- continue
1655
+ normalized_row = (
1656
+ DatasetRow(label=f"Scenario {i + 1}", inputs=inputs_obj)
1657
+ if isinstance(inputs_obj, BaseInputs)
1658
+ else inputs_obj
1659
+ )
1660
+
1661
+ row_data = normalized_row.model_dump(
1662
+ mode="json",
1663
+ by_alias=True,
1664
+ exclude_none=True,
1665
+ context={
1666
+ "add_error": workflow_display.display_context.add_validation_error,
1667
+ "serializer": workflow_display.serialize_value,
1668
+ },
1669
+ )
1295
1670
 
1296
1671
  if i in dataset_row_index_to_id:
1297
1672
  row_data["id"] = dataset_row_index_to_id[i]
1298
1673
  elif isinstance(inputs_obj, DatasetRow) and inputs_obj.id is not None:
1299
1674
  row_data["id"] = inputs_obj.id
1675
+ else:
1676
+ row_data["id"] = str(generate_entity_id_from_path(f"{module}.sandbox.dataset.{i}"))
1300
1677
 
1301
1678
  dataset.append(row_data)
1302
- except (ImportError, AttributeError):
1679
+ except ImportError:
1680
+ # No sandbox module exists, which is fine
1303
1681
  pass
1682
+ except Exception as e:
1683
+ # Capture any other errors (AttributeError, TypeError, etc.) from sandbox module
1684
+ workflow_display.display_context.add_validation_error(e)
1304
1685
 
1686
+ all_errors = list(workflow_display.display_context.errors)
1305
1687
  return WorkflowSerializationResult(
1306
1688
  exec_config=exec_config,
1307
1689
  errors=[
@@ -1309,11 +1691,28 @@ class BaseWorkflowDisplay(Generic[WorkflowType]):
1309
1691
  message=str(error),
1310
1692
  stacktrace="".join(traceback.format_exception(type(error), error, error.__traceback__)),
1311
1693
  )
1312
- for error in workflow_display.display_context.errors
1694
+ for error in all_errors
1313
1695
  ],
1314
1696
  dataset=dataset,
1315
1697
  )
1316
1698
 
1699
+ def serialize_value(self, value: Any) -> Any:
1700
+ return serialize_value(self.workflow_id, self.display_context, value)
1701
+
1702
+ _INCLUDED_FILE_EXTENSIONS = [".py"]
1703
+ _INCLUDED_FILENAMES = ["metadata.json"]
1704
+
1705
+ @staticmethod
1706
+ def should_include_file(filename: str) -> bool:
1707
+ """Check if a file should be included based on its extension or filename.
1708
+
1709
+ This is used by both the serialization logic and the push API to ensure
1710
+ consistency in which files are included in workflow artifacts.
1711
+ """
1712
+ if filename in BaseWorkflowDisplay._INCLUDED_FILENAMES:
1713
+ return True
1714
+ return any(filename.endswith(ext) for ext in BaseWorkflowDisplay._INCLUDED_FILE_EXTENSIONS)
1715
+
1317
1716
  def _gather_additional_module_files(self, module_path: str) -> Dict[str, str]:
1318
1717
  workflow_module_path = f"{module_path}.workflow"
1319
1718
  workflow_module = importlib.import_module(workflow_module_path)
@@ -1327,6 +1726,9 @@ class BaseWorkflowDisplay(Generic[WorkflowType]):
1327
1726
 
1328
1727
  for root, _, filenames in os.walk(module_dir):
1329
1728
  for filename in filenames:
1729
+ if not self.should_include_file(filename):
1730
+ continue
1731
+
1330
1732
  file_path = os.path.join(root, filename)
1331
1733
  relative_path = os.path.relpath(file_path, start=module_dir)
1332
1734