vellum-ai 1.3.3__py3-none-any.whl → 1.3.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. vellum/client/core/client_wrapper.py +2 -2
  2. vellum/client/types/function_definition.py +5 -0
  3. vellum/client/types/scenario_input_audio_variable_value.py +1 -1
  4. vellum/client/types/scenario_input_document_variable_value.py +1 -1
  5. vellum/client/types/scenario_input_image_variable_value.py +1 -1
  6. vellum/client/types/scenario_input_video_variable_value.py +1 -1
  7. vellum/workflows/events/node.py +1 -1
  8. vellum/workflows/events/tests/test_event.py +1 -1
  9. vellum/workflows/events/workflow.py +1 -1
  10. vellum/workflows/nodes/bases/base.py +2 -5
  11. vellum/workflows/nodes/core/map_node/node.py +8 -1
  12. vellum/workflows/nodes/displayable/bases/inline_prompt_node/node.py +2 -2
  13. vellum/workflows/nodes/displayable/guardrail_node/node.py +8 -3
  14. vellum/workflows/nodes/displayable/tool_calling_node/node.py +4 -0
  15. vellum/workflows/nodes/displayable/tool_calling_node/utils.py +17 -2
  16. vellum/workflows/outputs/base.py +11 -11
  17. vellum/workflows/references/output.py +3 -5
  18. vellum/workflows/resolvers/resolver.py +18 -2
  19. vellum/workflows/resolvers/tests/test_resolver.py +121 -0
  20. vellum/workflows/runner/runner.py +17 -17
  21. vellum/workflows/state/encoder.py +0 -37
  22. vellum/workflows/utils/functions.py +35 -0
  23. vellum/workflows/workflows/base.py +9 -1
  24. {vellum_ai-1.3.3.dist-info → vellum_ai-1.3.5.dist-info}/METADATA +1 -1
  25. {vellum_ai-1.3.3.dist-info → vellum_ai-1.3.5.dist-info}/RECORD +39 -37
  26. vellum_ee/workflows/display/nodes/vellum/code_execution_node.py +18 -2
  27. vellum_ee/workflows/display/tests/test_base_workflow_display.py +99 -2
  28. vellum_ee/workflows/display/tests/workflow_serialization/test_basic_inline_prompt_node_serialization.py +1 -0
  29. vellum_ee/workflows/display/tests/workflow_serialization/test_basic_tool_calling_node_parent_input.py +85 -0
  30. vellum_ee/workflows/display/tests/workflow_serialization/test_basic_tool_calling_node_serialization.py +2 -1
  31. vellum_ee/workflows/display/tests/workflow_serialization/test_final_output_node_map_reference_serialization.py +88 -0
  32. vellum_ee/workflows/display/utils/events.py +1 -0
  33. vellum_ee/workflows/display/utils/expressions.py +56 -0
  34. vellum_ee/workflows/display/utils/tests/test_events.py +11 -1
  35. vellum_ee/workflows/display/utils/vellum.py +3 -1
  36. vellum_ee/workflows/display/workflows/base_workflow_display.py +41 -27
  37. {vellum_ai-1.3.3.dist-info → vellum_ai-1.3.5.dist-info}/LICENSE +0 -0
  38. {vellum_ai-1.3.3.dist-info → vellum_ai-1.3.5.dist-info}/WHEEL +0 -0
  39. {vellum_ai-1.3.3.dist-info → vellum_ai-1.3.5.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,88 @@
1
+ from typing import List
2
+
3
+ from vellum.workflows import BaseWorkflow
4
+ from vellum.workflows.inputs.base import BaseInputs
5
+ from vellum.workflows.nodes import MapNode
6
+ from vellum.workflows.nodes.bases import BaseNode
7
+ from vellum.workflows.nodes.displayable import FinalOutputNode
8
+ from vellum.workflows.outputs import BaseOutputs
9
+ from vellum.workflows.state import BaseState
10
+ from vellum_ee.workflows.display.workflows.get_vellum_workflow_display_class import get_workflow_display
11
+
12
+
13
+ class TestInputs(BaseInputs):
14
+ items: List[str]
15
+
16
+
17
+ class TestIteration(BaseNode):
18
+ item = MapNode.SubworkflowInputs.item
19
+ index = MapNode.SubworkflowInputs.index
20
+
21
+ class Outputs(BaseOutputs):
22
+ processed: str
23
+
24
+ def run(self) -> Outputs:
25
+ return self.Outputs(processed=f"processed_{self.item}_{self.index}")
26
+
27
+
28
+ class TestIterationSubworkflow(BaseWorkflow[MapNode.SubworkflowInputs, BaseState]):
29
+ graph = TestIteration
30
+
31
+ class Outputs(BaseOutputs):
32
+ processed = TestIteration.Outputs.processed
33
+
34
+
35
+ class TestMapNode(MapNode):
36
+ items = TestInputs.items
37
+ subworkflow = TestIterationSubworkflow
38
+
39
+
40
+ class TestFinalOutputNode(FinalOutputNode[BaseState, List[str]]):
41
+ class Outputs(FinalOutputNode.Outputs):
42
+ value = TestMapNode.Outputs.processed
43
+
44
+
45
+ class TestWorkflowWithFinalOutputReferencingMap(BaseWorkflow[TestInputs, BaseState]):
46
+ graph = TestMapNode >> TestFinalOutputNode
47
+
48
+ class Outputs(BaseOutputs):
49
+ final_result = TestFinalOutputNode.Outputs.value
50
+
51
+
52
+ def test_serialize_workflow__final_output_node_referencing_map_node():
53
+ """
54
+ Test that final output nodes referencing map node outputs have correct outputs structure.
55
+
56
+ This test verifies that when a FinalOutputNode references a MapNode output,
57
+ the serialized output contains proper NODE_OUTPUT references instead of None values.
58
+ This addresses the Agent Builder issue where final outputs showed value=None in the UI.
59
+ """
60
+ workflow_display = get_workflow_display(workflow_class=TestWorkflowWithFinalOutputReferencingMap)
61
+
62
+ # WHEN we serialize it
63
+ serialized_workflow: dict = workflow_display.serialize()
64
+
65
+ # THEN the final output node should have the correct outputs structure
66
+ workflow_raw_data = serialized_workflow["workflow_raw_data"]
67
+ map_node = next(node for node in workflow_raw_data["nodes"] if node["type"] == "MAP")
68
+ final_output_node = next(node for node in workflow_raw_data["nodes"] if node["type"] == "TERMINAL")
69
+
70
+ # AND the map node's subworkflow should have the one output variable
71
+ output_variable = next(iter(map_node["data"]["output_variables"]))
72
+ map_node_output_id = output_variable["id"]
73
+
74
+ # AND the final output node should have an outputs array with proper structure
75
+ assert "outputs" in final_output_node
76
+ outputs = final_output_node["outputs"]
77
+ assert len(outputs) == 1
78
+
79
+ output = outputs[0]
80
+ # AND the output should have the correct structure with NODE_OUTPUT reference instead of None
81
+ assert output["name"] == "value"
82
+ assert output["type"] == "JSON"
83
+
84
+ # AND the value should be a NODE_OUTPUT reference, not None
85
+ assert output["value"] is not None, f"Expected NODE_OUTPUT reference but got None. Full output: {output}"
86
+ assert output["value"]["type"] == "NODE_OUTPUT", f"Expected NODE_OUTPUT type but got {output['value']['type']}"
87
+ assert "node_id" in output["value"], f"Missing node_id in output value: {output['value']}"
88
+ assert output["value"]["node_output_id"] == map_node_output_id
@@ -36,6 +36,7 @@ def event_enricher(event: WorkflowExecutionInitiatedEvent) -> WorkflowExecutionI
36
36
  dry_run=True,
37
37
  )
38
38
  register_workflow_display_context(event.span_id, workflow_display.display_context)
39
+ event.body.display_context = workflow_display.get_event_display_context()
39
40
 
40
41
  if event.body.workflow_definition.is_dynamic or _should_mark_workflow_dynamic(event):
41
42
  register_workflow_display_class(workflow_definition, workflow_display.__class__)
@@ -1,8 +1,12 @@
1
1
  from dataclasses import asdict, is_dataclass
2
+ import inspect
3
+ from io import StringIO
4
+ import sys
2
5
  from typing import TYPE_CHECKING, Any, Dict, List, cast
3
6
 
4
7
  from pydantic import BaseModel
5
8
 
9
+ from vellum.client.types.function_definition import FunctionDefinition
6
10
  from vellum.client.types.logical_operator import LogicalOperator
7
11
  from vellum.workflows.descriptors.base import BaseDescriptor
8
12
  from vellum.workflows.expressions.accessor import AccessorExpression
@@ -48,13 +52,31 @@ from vellum.workflows.references.workflow_input import WorkflowInputReference
48
52
  from vellum.workflows.types.core import JsonArray, JsonObject
49
53
  from vellum.workflows.types.definition import DeploymentDefinition
50
54
  from vellum.workflows.types.generics import is_workflow_class
55
+ from vellum.workflows.utils.functions import compile_function_definition
51
56
  from vellum.workflows.utils.uuids import uuid4_from_hash
52
57
  from vellum_ee.workflows.display.utils.exceptions import UnsupportedSerializationException
58
+ from vellum_ee.workflows.server.virtual_file_loader import VirtualFileLoader
53
59
 
54
60
  if TYPE_CHECKING:
55
61
  from vellum_ee.workflows.display.types import WorkflowDisplayContext
56
62
 
57
63
 
64
+ def virtual_open(file_path: str, mode: str = "r"):
65
+ """
66
+ Open a file, checking VirtualFileFinder instances first before falling back to regular open().
67
+ """
68
+ for finder in sys.meta_path:
69
+ if hasattr(finder, "loader") and isinstance(finder.loader, VirtualFileLoader):
70
+ namespace = finder.loader.namespace
71
+ if file_path.startswith(namespace + "/"):
72
+ relative_path = file_path[len(namespace) + 1 :]
73
+ content = finder.loader._get_code(relative_path)
74
+ if content is not None:
75
+ return StringIO(content)
76
+
77
+ return open(file_path, mode)
78
+
79
+
58
80
  def convert_descriptor_to_operator(descriptor: BaseDescriptor) -> LogicalOperator:
59
81
  if isinstance(descriptor, EqualsExpression):
60
82
  return "="
@@ -399,6 +421,40 @@ def serialize_value(display_context: "WorkflowDisplayContext", value: Any) -> Js
399
421
  dict_value = value.model_dump()
400
422
  return serialize_value(display_context, dict_value)
401
423
 
424
+ if callable(value):
425
+ function_definition = compile_function_definition(value)
426
+ inputs = getattr(value, "__vellum_inputs__", {})
427
+
428
+ if inputs:
429
+ serialized_inputs = {}
430
+ for param_name, input_ref in inputs.items():
431
+ serialized_inputs[param_name] = serialize_value(display_context, input_ref)
432
+
433
+ model_data = function_definition.model_dump()
434
+ model_data["inputs"] = serialized_inputs
435
+ function_definition = FunctionDefinition.model_validate(model_data)
436
+
437
+ source_path = inspect.getsourcefile(value)
438
+ if source_path is not None:
439
+ with virtual_open(source_path) as f:
440
+ source_code = f.read()
441
+ else:
442
+ source_code = f"Source code not available for {value.__name__}"
443
+
444
+ return {
445
+ "type": "CONSTANT_VALUE",
446
+ "value": {
447
+ "type": "JSON",
448
+ "value": {
449
+ "type": "CODE_EXECUTION",
450
+ "name": function_definition.name,
451
+ "description": function_definition.description,
452
+ "definition": function_definition.model_dump(),
453
+ "src": source_code,
454
+ },
455
+ },
456
+ }
457
+
402
458
  if not isinstance(value, BaseDescriptor):
403
459
  vellum_value = primitive_to_vellum_value(value)
404
460
  return {
@@ -67,9 +67,14 @@ def test_event_enricher_static_workflow(is_dynamic: bool, expected_config: Optio
67
67
  # WHEN the event_enricher is called with mocked dependencies
68
68
  event_enricher(event)
69
69
 
70
- # AND workflow_version_exec_config is set to the expected config
70
+ # THEN workflow_version_exec_config is set to the expected config
71
71
  assert event.body.workflow_version_exec_config == expected_config
72
72
 
73
+ assert event.body.display_context is not None
74
+ assert hasattr(event.body.display_context, "node_displays")
75
+ assert hasattr(event.body.display_context, "workflow_inputs")
76
+ assert hasattr(event.body.display_context, "workflow_outputs")
77
+
73
78
 
74
79
  def test_event_enricher_marks_subworkflow_deployment_as_dynamic():
75
80
  """Test that event_enricher treats subworkflow deployments as dynamic."""
@@ -109,3 +114,8 @@ def test_event_enricher_marks_subworkflow_deployment_as_dynamic():
109
114
 
110
115
  assert hasattr(enriched_event.body, "workflow_version_exec_config")
111
116
  assert enriched_event.body.workflow_version_exec_config is not None
117
+
118
+ assert enriched_event.body.display_context is not None
119
+ assert hasattr(enriched_event.body.display_context, "node_displays")
120
+ assert hasattr(enriched_event.body.display_context, "workflow_inputs")
121
+ assert hasattr(enriched_event.body.display_context, "workflow_outputs")
@@ -102,7 +102,9 @@ def create_node_input_value_pointer_rule(
102
102
  if isinstance(value, OutputReference):
103
103
  if value not in display_context.global_node_output_displays:
104
104
  if issubclass(value.outputs_class, BaseNode.Outputs):
105
- raise ValueError(f"Reference to node '{value.outputs_class._node_class.__name__}' not found in graph.")
105
+ raise ValueError(
106
+ f"Reference to node '{value.outputs_class.__parent_class__.__name__}' not found in graph."
107
+ )
106
108
 
107
109
  raise ValueError(f"Reference to outputs '{value.outputs_class.__qualname__}' is invalid.")
108
110
 
@@ -294,7 +294,7 @@ class BaseWorkflowDisplay(Generic[WorkflowType]):
294
294
  )
295
295
 
296
296
  elif isinstance(workflow_output.instance, OutputReference):
297
- terminal_node_id = workflow_output.instance.outputs_class._node_class.__id__
297
+ terminal_node_id = workflow_output.instance.outputs_class.__parent_class__.__id__
298
298
  serialized_terminal_node = serialized_nodes.get(terminal_node_id)
299
299
  if serialized_terminal_node and isinstance(serialized_terminal_node["data"], dict):
300
300
  serialized_terminal_node["data"]["name"] = workflow_output_display.name
@@ -329,16 +329,18 @@ class BaseWorkflowDisplay(Generic[WorkflowType]):
329
329
  continue
330
330
 
331
331
  target_node_display = self.display_context.node_displays[unadorned_target_node]
332
- edges.append(
333
- {
334
- "id": str(entrypoint_display.edge_display.id),
335
- "source_node_id": str(entrypoint_node_id),
336
- "source_handle_id": str(entrypoint_node_source_handle_id),
337
- "target_node_id": str(target_node_display.node_id),
338
- "target_handle_id": str(target_node_display.get_trigger_id()),
339
- "type": "DEFAULT",
340
- }
341
- )
332
+ entrypoint_edge_dict: Dict[str, Json] = {
333
+ "id": str(entrypoint_display.edge_display.id),
334
+ "source_node_id": str(entrypoint_node_id),
335
+ "source_handle_id": str(entrypoint_node_source_handle_id),
336
+ "target_node_id": str(target_node_display.node_id),
337
+ "target_handle_id": str(target_node_display.get_trigger_id()),
338
+ "type": "DEFAULT",
339
+ }
340
+ display_data = self._serialize_edge_display_data(entrypoint_display.edge_display)
341
+ if display_data is not None:
342
+ entrypoint_edge_dict["display_data"] = display_data
343
+ edges.append(entrypoint_edge_dict)
342
344
 
343
345
  for (source_node_port, target_node), edge_display in self.display_context.edge_displays.items():
344
346
  unadorned_source_node_port = get_unadorned_port(source_node_port)
@@ -353,18 +355,20 @@ class BaseWorkflowDisplay(Generic[WorkflowType]):
353
355
  source_node_port_display = self.display_context.port_displays[unadorned_source_node_port]
354
356
  target_node_display = self.display_context.node_displays[unadorned_target_node]
355
357
 
356
- edges.append(
357
- {
358
- "id": str(edge_display.id),
359
- "source_node_id": str(source_node_port_display.node_id),
360
- "source_handle_id": str(source_node_port_display.id),
361
- "target_node_id": str(target_node_display.node_id),
362
- "target_handle_id": str(
363
- target_node_display.get_target_handle_id_by_source_node_id(source_node_port_display.node_id)
364
- ),
365
- "type": "DEFAULT",
366
- }
367
- )
358
+ regular_edge_dict: Dict[str, Json] = {
359
+ "id": str(edge_display.id),
360
+ "source_node_id": str(source_node_port_display.node_id),
361
+ "source_handle_id": str(source_node_port_display.id),
362
+ "target_node_id": str(target_node_display.node_id),
363
+ "target_handle_id": str(
364
+ target_node_display.get_target_handle_id_by_source_node_id(source_node_port_display.node_id)
365
+ ),
366
+ "type": "DEFAULT",
367
+ }
368
+ display_data = self._serialize_edge_display_data(edge_display)
369
+ if display_data is not None:
370
+ regular_edge_dict["display_data"] = display_data
371
+ edges.append(regular_edge_dict)
368
372
 
369
373
  edges.extend(synthetic_output_edges)
370
374
 
@@ -405,6 +409,12 @@ class BaseWorkflowDisplay(Generic[WorkflowType]):
405
409
  "output_variables": output_variables,
406
410
  }
407
411
 
412
+ def _serialize_edge_display_data(self, edge_display: EdgeDisplay) -> Optional[JsonObject]:
413
+ """Serialize edge display data, returning None if no display data is present."""
414
+ if edge_display.z_index is not None:
415
+ return {"z_index": edge_display.z_index}
416
+ return None
417
+
408
418
  def _apply_auto_layout(self, nodes_dict_list: List[Dict[str, Any]], edges: List[Json]) -> None:
409
419
  """Apply auto-layout to nodes that are all positioned at (0,0)."""
410
420
  nodes_for_layout: List[Tuple[str, NodeDisplayData]] = []
@@ -589,7 +599,7 @@ class BaseWorkflowDisplay(Generic[WorkflowType]):
589
599
 
590
600
  workflow_output_display = self.output_displays.get(workflow_output)
591
601
  workflow_output_displays[workflow_output] = (
592
- workflow_output_display or self._generate_workflow_output_display(workflow_output)
602
+ workflow_output_display or self._generate_workflow_output_display(workflow_output, self._workflow)
593
603
  )
594
604
 
595
605
  return WorkflowDisplayContext(
@@ -678,9 +688,13 @@ class BaseWorkflowDisplay(Generic[WorkflowType]):
678
688
 
679
689
  return EntrypointDisplay(id=entrypoint_id, edge_display=edge_display)
680
690
 
681
- def _generate_workflow_output_display(self, output: BaseDescriptor) -> WorkflowOutputDisplay:
682
- output_id = uuid4_from_hash(f"{self.workflow_id}|id|{output.name}")
683
-
691
+ def _generate_workflow_output_display(
692
+ self, output: OutputReference, workflow_class: Type[BaseWorkflow]
693
+ ) -> WorkflowOutputDisplay:
694
+ # TODO: use the output.id field instead once we add `__parent_class__` to BaseWorkflow.Outputs
695
+ output_id = workflow_class.__output_ids__.get(output.name) or uuid4_from_hash(
696
+ f"{self.workflow_id}|id|{output.name}"
697
+ )
684
698
  return WorkflowOutputDisplay(id=output_id, name=output.name)
685
699
 
686
700
  def __init_subclass__(cls, **kwargs: Any) -> None: