vellum-ai 1.7.3__py3-none-any.whl → 1.7.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. vellum/__init__.py +2 -0
  2. vellum/client/core/client_wrapper.py +2 -2
  3. vellum/client/core/pydantic_utilities.py +8 -1
  4. vellum/client/reference.md +95 -0
  5. vellum/client/resources/workflow_deployments/client.py +111 -0
  6. vellum/client/resources/workflow_deployments/raw_client.py +121 -0
  7. vellum/client/types/__init__.py +2 -0
  8. vellum/client/types/paginated_workflow_deployment_release_list.py +30 -0
  9. vellum/client/types/vellum_error_code_enum.py +2 -0
  10. vellum/client/types/vellum_sdk_error_code_enum.py +2 -0
  11. vellum/client/types/workflow_execution_event_error_code.py +2 -0
  12. vellum/plugins/pydantic.py +1 -0
  13. vellum/types/paginated_workflow_deployment_release_list.py +3 -0
  14. vellum/workflows/edges/__init__.py +2 -0
  15. vellum/workflows/edges/trigger_edge.py +67 -0
  16. vellum/workflows/errors/types.py +3 -0
  17. vellum/workflows/events/tests/test_event.py +41 -0
  18. vellum/workflows/events/workflow.py +15 -3
  19. vellum/workflows/exceptions.py +9 -1
  20. vellum/workflows/graph/graph.py +93 -0
  21. vellum/workflows/graph/tests/test_graph.py +167 -0
  22. vellum/workflows/nodes/displayable/subworkflow_deployment_node/node.py +11 -1
  23. vellum/workflows/nodes/displayable/tool_calling_node/node.py +1 -1
  24. vellum/workflows/nodes/displayable/tool_calling_node/utils.py +1 -1
  25. vellum/workflows/ports/port.py +11 -0
  26. vellum/workflows/runner/runner.py +44 -6
  27. vellum/workflows/triggers/__init__.py +4 -0
  28. vellum/workflows/triggers/base.py +125 -0
  29. vellum/workflows/triggers/manual.py +37 -0
  30. vellum/workflows/types/core.py +2 -1
  31. vellum/workflows/workflows/base.py +9 -9
  32. {vellum_ai-1.7.3.dist-info → vellum_ai-1.7.5.dist-info}/METADATA +1 -1
  33. {vellum_ai-1.7.3.dist-info → vellum_ai-1.7.5.dist-info}/RECORD +42 -35
  34. vellum_ee/assets/node-definitions.json +1 -1
  35. vellum_ee/workflows/display/base.py +26 -1
  36. vellum_ee/workflows/display/tests/workflow_serialization/test_basic_tool_calling_node_inline_workflow_serialization.py +1 -1
  37. vellum_ee/workflows/display/tests/workflow_serialization/test_basic_tool_calling_node_serialization.py +1 -1
  38. vellum_ee/workflows/display/tests/workflow_serialization/test_manual_trigger_serialization.py +113 -0
  39. vellum_ee/workflows/display/workflows/base_workflow_display.py +63 -10
  40. {vellum_ai-1.7.3.dist-info → vellum_ai-1.7.5.dist-info}/LICENSE +0 -0
  41. {vellum_ai-1.7.3.dist-info → vellum_ai-1.7.5.dist-info}/WHEEL +0 -0
  42. {vellum_ai-1.7.3.dist-info → vellum_ai-1.7.5.dist-info}/entry_points.txt +0 -0
@@ -137,22 +137,34 @@ class WorkflowExecutionStreamingEvent(_BaseWorkflowEvent):
137
137
  return self.body.output
138
138
 
139
139
 
140
- class WorkflowExecutionFulfilledBody(_BaseWorkflowExecutionBody, Generic[OutputsType]):
140
+ class WorkflowExecutionFulfilledBody(_BaseWorkflowExecutionBody, Generic[OutputsType, StateType]):
141
141
  outputs: OutputsType
142
+ final_state: Optional[StateType] = None
142
143
 
143
144
  @field_serializer("outputs")
144
145
  def serialize_outputs(self, outputs: OutputsType, _info: Any) -> Dict[str, Any]:
145
146
  return default_serializer(outputs)
146
147
 
148
+ @field_serializer("final_state")
149
+ def serialize_final_state(self, final_state: Optional[StateType], _info: Any) -> Optional[Dict[str, Any]]:
150
+ if final_state is None:
151
+ return None
152
+ state_dict = {k: v for k, v in final_state if k != "meta"}
153
+ return default_serializer(state_dict)
147
154
 
148
- class WorkflowExecutionFulfilledEvent(_BaseWorkflowEvent, Generic[OutputsType]):
155
+
156
+ class WorkflowExecutionFulfilledEvent(_BaseWorkflowEvent, Generic[OutputsType, StateType]):
149
157
  name: Literal["workflow.execution.fulfilled"] = "workflow.execution.fulfilled"
150
- body: WorkflowExecutionFulfilledBody[OutputsType]
158
+ body: WorkflowExecutionFulfilledBody[OutputsType, StateType]
151
159
 
152
160
  @property
153
161
  def outputs(self) -> OutputsType:
154
162
  return self.body.outputs
155
163
 
164
+ @property
165
+ def final_state(self) -> Optional[StateType]:
166
+ return self.body.final_state
167
+
156
168
 
157
169
  class WorkflowExecutionRejectedBody(_BaseWorkflowExecutionBody):
158
170
  error: WorkflowError
@@ -25,10 +25,12 @@ class NodeException(Exception):
25
25
  message: str,
26
26
  code: WorkflowErrorCode = WorkflowErrorCode.INTERNAL_ERROR,
27
27
  raw_data: Optional[Dict[str, Any]] = None,
28
+ stacktrace: Optional[str] = None,
28
29
  ):
29
30
  self.message = message
30
31
  self.code = code
31
32
  self.raw_data = raw_data
33
+ self.stacktrace = stacktrace
32
34
  super().__init__(message)
33
35
 
34
36
  @property
@@ -37,11 +39,17 @@ class NodeException(Exception):
37
39
  message=self.message,
38
40
  code=self.code,
39
41
  raw_data=self.raw_data,
42
+ stacktrace=self.stacktrace,
40
43
  )
41
44
 
42
45
  @staticmethod
43
46
  def of(workflow_error: WorkflowError) -> "NodeException":
44
- return NodeException(message=workflow_error.message, code=workflow_error.code, raw_data=workflow_error.raw_data)
47
+ return NodeException(
48
+ message=workflow_error.message,
49
+ code=workflow_error.code,
50
+ raw_data=workflow_error.raw_data,
51
+ stacktrace=workflow_error.stacktrace,
52
+ )
45
53
 
46
54
 
47
55
  class WorkflowInitializationException(Exception):
@@ -3,11 +3,13 @@ from typing import TYPE_CHECKING, Iterator, List, Optional, Set, Type, Union
3
3
  from orderly_set import OrderedSet
4
4
 
5
5
  from vellum.workflows.edges.edge import Edge
6
+ from vellum.workflows.edges.trigger_edge import TriggerEdge
6
7
  from vellum.workflows.types.generics import NodeType
7
8
 
8
9
  if TYPE_CHECKING:
9
10
  from vellum.workflows.nodes.bases.base import BaseNode
10
11
  from vellum.workflows.ports.port import Port
12
+ from vellum.workflows.triggers.base import BaseTrigger
11
13
 
12
14
 
13
15
  class NoPortsNode:
@@ -46,16 +48,19 @@ class Graph:
46
48
  _entrypoints: Set[Union["Port", "NoPortsNode"]]
47
49
  _edges: List[Edge]
48
50
  _terminals: Set[Union["Port", "NoPortsNode"]]
51
+ _trigger_edges: List[TriggerEdge]
49
52
 
50
53
  def __init__(
51
54
  self,
52
55
  entrypoints: Set[Union["Port", "NoPortsNode"]],
53
56
  edges: List[Edge],
54
57
  terminals: Set[Union["Port", "NoPortsNode"]],
58
+ trigger_edges: Optional[List[TriggerEdge]] = None,
55
59
  ):
56
60
  self._edges = edges
57
61
  self._entrypoints = entrypoints
58
62
  self._terminals = terminals
63
+ self._trigger_edges = trigger_edges or []
59
64
 
60
65
  @staticmethod
61
66
  def from_port(port: "Port") -> "Graph":
@@ -96,12 +101,79 @@ class Graph:
96
101
  def from_edge(edge: Edge) -> "Graph":
97
102
  return Graph(entrypoints={edge.from_port}, edges=[edge], terminals={port for port in edge.to_node.Ports})
98
103
 
104
+ @staticmethod
105
+ def from_trigger_edge(edge: TriggerEdge) -> "Graph":
106
+ """
107
+ Create a graph from a single TriggerEdge (Trigger >> Node).
108
+
109
+ Args:
110
+ edge: TriggerEdge connecting a trigger to a node
111
+
112
+ Returns:
113
+ Graph with the trigger edge and the target node's ports as terminals
114
+ """
115
+ ports = {port for port in edge.to_node.Ports}
116
+ if not ports:
117
+ no_ports_node = NoPortsNode(edge.to_node)
118
+ return Graph(
119
+ entrypoints={no_ports_node},
120
+ edges=[],
121
+ terminals={no_ports_node},
122
+ trigger_edges=[edge],
123
+ )
124
+ return Graph(
125
+ entrypoints=set(ports),
126
+ edges=[],
127
+ terminals=set(ports),
128
+ trigger_edges=[edge],
129
+ )
130
+
131
+ @staticmethod
132
+ def from_trigger_edges(edges: List[TriggerEdge]) -> "Graph":
133
+ """
134
+ Create a graph from multiple TriggerEdges (e.g., Trigger >> {NodeA, NodeB}).
135
+
136
+ Args:
137
+ edges: List of TriggerEdges
138
+
139
+ Returns:
140
+ Graph with all trigger edges and target nodes' ports as entrypoints/terminals
141
+ """
142
+ entrypoints: Set[Union["Port", NoPortsNode]] = set()
143
+ terminals: Set[Union["Port", NoPortsNode]] = set()
144
+
145
+ for edge in edges:
146
+ ports = {port for port in edge.to_node.Ports}
147
+ if not ports:
148
+ no_ports_node = NoPortsNode(edge.to_node)
149
+ entrypoints.add(no_ports_node)
150
+ terminals.add(no_ports_node)
151
+ else:
152
+ entrypoints.update(ports)
153
+ terminals.update(ports)
154
+
155
+ return Graph(
156
+ entrypoints=entrypoints,
157
+ edges=[],
158
+ terminals=terminals,
159
+ trigger_edges=edges,
160
+ )
161
+
99
162
  @staticmethod
100
163
  def empty() -> "Graph":
101
164
  """Create an empty graph with no entrypoints, edges, or terminals."""
102
165
  return Graph(entrypoints=set(), edges=[], terminals=set())
103
166
 
104
167
  def __rshift__(self, other: GraphTarget) -> "Graph":
168
+ # Check for trigger target (class-level only)
169
+ from vellum.workflows.triggers.base import BaseTrigger
170
+
171
+ if isinstance(other, type) and issubclass(other, BaseTrigger):
172
+ raise TypeError(
173
+ f"Cannot create edge targeting trigger {other.__name__}. "
174
+ f"Triggers must be at the start of a graph path, not as targets."
175
+ )
176
+
105
177
  if not self._edges and not self._entrypoints:
106
178
  raise ValueError("Graph instance can only create new edges from nodes within existing edges")
107
179
 
@@ -179,9 +251,30 @@ class Graph:
179
251
  def edges(self) -> Iterator[Edge]:
180
252
  return iter(self._edges)
181
253
 
254
+ @property
255
+ def trigger_edges(self) -> Iterator[TriggerEdge]:
256
+ """Get all trigger edges in this graph."""
257
+ return iter(self._trigger_edges)
258
+
259
+ @property
260
+ def triggers(self) -> Iterator[Type["BaseTrigger"]]:
261
+ """Get all unique trigger classes in this graph."""
262
+ seen_triggers = set()
263
+ for trigger_edge in self._trigger_edges:
264
+ if trigger_edge.trigger_class not in seen_triggers:
265
+ seen_triggers.add(trigger_edge.trigger_class)
266
+ yield trigger_edge.trigger_class
267
+
182
268
  @property
183
269
  def nodes(self) -> Iterator[Type["BaseNode"]]:
184
270
  nodes = set()
271
+
272
+ # Include nodes from trigger edges
273
+ for trigger_edge in self._trigger_edges:
274
+ if trigger_edge.to_node not in nodes:
275
+ nodes.add(trigger_edge.to_node)
276
+ yield trigger_edge.to_node
277
+
185
278
  if not self._edges:
186
279
  for node in self.entrypoints:
187
280
  if node not in nodes:
@@ -1,7 +1,10 @@
1
+ import pytest
2
+
1
3
  from vellum.workflows.edges.edge import Edge
2
4
  from vellum.workflows.graph.graph import Graph
3
5
  from vellum.workflows.nodes.bases.base import BaseNode
4
6
  from vellum.workflows.ports.port import Port
7
+ from vellum.workflows.triggers import ManualTrigger
5
8
 
6
9
 
7
10
  def test_graph__empty():
@@ -617,3 +620,167 @@ def test_graph__from_node_with_empty_ports():
617
620
 
618
621
  # THEN the graph should have exactly 1 node
619
622
  assert len(list(graph.nodes)) == 1
623
+
624
+
625
+ def test_graph__manual_trigger_to_node():
626
+ # GIVEN a node
627
+ class MyNode(BaseNode):
628
+ pass
629
+
630
+ # WHEN we create graph with ManualTrigger >> Node (class-level, no instantiation)
631
+ graph = ManualTrigger >> MyNode
632
+
633
+ # THEN the graph has one trigger edge
634
+ trigger_edges = list(graph.trigger_edges)
635
+ assert len(trigger_edges) == 1
636
+ assert trigger_edges[0].trigger_class == ManualTrigger
637
+ assert trigger_edges[0].to_node == MyNode
638
+
639
+ # AND the graph has one trigger
640
+ triggers = list(graph.triggers)
641
+ assert len(triggers) == 1
642
+ assert triggers[0] == ManualTrigger
643
+
644
+ # AND the graph has one node
645
+ assert len(list(graph.nodes)) == 1
646
+ assert MyNode in list(graph.nodes)
647
+
648
+
649
+ def test_graph__manual_trigger_to_set_of_nodes():
650
+ # GIVEN two nodes
651
+ class NodeA(BaseNode):
652
+ pass
653
+
654
+ class NodeB(BaseNode):
655
+ pass
656
+
657
+ # WHEN we create graph with ManualTrigger >> {NodeA, NodeB}
658
+ graph = ManualTrigger >> {NodeA, NodeB}
659
+
660
+ # THEN the graph has two trigger edges
661
+ trigger_edges = list(graph.trigger_edges)
662
+ assert len(trigger_edges) == 2
663
+
664
+ # AND both edges connect to the same ManualTrigger class
665
+ assert all(edge.trigger_class == ManualTrigger for edge in trigger_edges)
666
+
667
+ # AND edges connect to both nodes
668
+ target_nodes = {edge.to_node for edge in trigger_edges}
669
+ assert target_nodes == {NodeA, NodeB}
670
+
671
+ # AND the graph has one unique trigger
672
+ triggers = list(graph.triggers)
673
+ assert len(triggers) == 1
674
+
675
+ # AND the graph has two nodes
676
+ assert len(list(graph.nodes)) == 2
677
+
678
+
679
+ def test_graph__manual_trigger_to_graph():
680
+ # GIVEN a graph of nodes
681
+ class NodeA(BaseNode):
682
+ pass
683
+
684
+ class NodeB(BaseNode):
685
+ pass
686
+
687
+ node_graph = NodeA >> NodeB
688
+
689
+ # WHEN we create graph with ManualTrigger >> Graph
690
+ graph = ManualTrigger >> node_graph
691
+
692
+ # THEN the graph has a trigger edge to the entrypoint
693
+ trigger_edges = list(graph.trigger_edges)
694
+ assert len(trigger_edges) == 1
695
+ assert trigger_edges[0].to_node == NodeA
696
+
697
+ # AND the graph preserves the original edges
698
+ edges = list(graph.edges)
699
+ assert len(edges) == 1
700
+ assert edges[0].to_node == NodeB
701
+
702
+ # AND the graph has both nodes
703
+ nodes = list(graph.nodes)
704
+ assert len(nodes) == 2
705
+ assert NodeA in nodes
706
+ assert NodeB in nodes
707
+
708
+
709
+ def test_graph__manual_trigger_to_set_of_graphs_preserves_edges():
710
+ # GIVEN two graphs of nodes
711
+ class NodeA(BaseNode):
712
+ pass
713
+
714
+ class NodeB(BaseNode):
715
+ pass
716
+
717
+ class NodeC(BaseNode):
718
+ pass
719
+
720
+ class NodeD(BaseNode):
721
+ pass
722
+
723
+ graph_one = NodeA >> NodeB
724
+ graph_two = NodeC >> NodeD
725
+
726
+ # WHEN we create a graph with ManualTrigger >> {Graph1, Graph2}
727
+ combined_graph = ManualTrigger >> {graph_one, graph_two}
728
+
729
+ # THEN the combined graph has trigger edges to both entrypoints
730
+ trigger_edges = list(combined_graph.trigger_edges)
731
+ assert len(trigger_edges) == 2
732
+ assert {edge.to_node for edge in trigger_edges} == {NodeA, NodeC}
733
+
734
+ # AND the combined graph preserves all downstream edges
735
+ edges = list(combined_graph.edges)
736
+ assert len(edges) == 2
737
+ assert {(edge.from_port.node_class, edge.to_node) for edge in edges} == {
738
+ (NodeA, NodeB),
739
+ (NodeC, NodeD),
740
+ }
741
+
742
+ # AND the combined graph still exposes all nodes
743
+ nodes = list(combined_graph.nodes)
744
+ assert {NodeA, NodeB, NodeC, NodeD}.issubset(nodes)
745
+
746
+
747
+ def test_graph__node_to_trigger_raises():
748
+ # GIVEN a node and trigger
749
+ class MyNode(BaseNode):
750
+ pass
751
+
752
+ # WHEN we try to create Node >> Trigger (class-level)
753
+ # THEN it raises TypeError
754
+ with pytest.raises(TypeError, match="Cannot create edge targeting trigger"):
755
+ MyNode >> ManualTrigger
756
+
757
+ # WHEN we try to create Node >> Trigger (instance-level)
758
+ # THEN it also raises TypeError
759
+ with pytest.raises(TypeError, match="Cannot create edge targeting trigger"):
760
+ MyNode >> ManualTrigger
761
+
762
+
763
+ def test_graph__trigger_then_graph_then_node():
764
+ # GIVEN a trigger, a node, and another node
765
+ class StartNode(BaseNode):
766
+ pass
767
+
768
+ class EndNode(BaseNode):
769
+ pass
770
+
771
+ # WHEN we create Trigger >> Node >> Node
772
+ graph = ManualTrigger >> StartNode >> EndNode
773
+
774
+ # THEN the graph has one trigger edge
775
+ trigger_edges = list(graph.trigger_edges)
776
+ assert len(trigger_edges) == 1
777
+ assert trigger_edges[0].to_node == StartNode
778
+
779
+ # AND the graph has one regular edge
780
+ edges = list(graph.edges)
781
+ assert len(edges) == 1
782
+ assert edges[0].to_node == EndNode
783
+
784
+ # AND the graph has both nodes
785
+ nodes = list(graph.nodes)
786
+ assert len(nodes) == 2
@@ -21,7 +21,7 @@ from vellum.workflows.errors import WorkflowErrorCode
21
21
  from vellum.workflows.errors.types import workflow_event_error_to_workflow_error
22
22
  from vellum.workflows.events.types import default_serializer
23
23
  from vellum.workflows.events.workflow import is_workflow_event
24
- from vellum.workflows.exceptions import NodeException
24
+ from vellum.workflows.exceptions import NodeException, WorkflowInitializationException
25
25
  from vellum.workflows.inputs.base import BaseInputs
26
26
  from vellum.workflows.nodes.bases.base import BaseNode
27
27
  from vellum.workflows.outputs.base import BaseOutput
@@ -164,6 +164,16 @@ class SubworkflowDeploymentNode(BaseNode[StateType], Generic[StateType]):
164
164
  node_output_mocks=self._context._get_all_node_output_mocks(),
165
165
  )
166
166
 
167
+ try:
168
+ next(subworkflow_stream)
169
+ except WorkflowInitializationException as e:
170
+ hashed_module = e.definition.__module__
171
+ raise NodeException(
172
+ message=e.message,
173
+ code=e.code,
174
+ raw_data={"hashed_module": hashed_module},
175
+ ) from e
176
+
167
177
  outputs = None
168
178
  exception = None
169
179
  fulfilled_output_names: Set[str] = set()
@@ -47,7 +47,7 @@ class ToolCallingNode(BaseNode[StateType], Generic[StateType]):
47
47
  functions: ClassVar[List[Tool]] = []
48
48
  prompt_inputs: ClassVar[Optional[EntityInputsInterface]] = None
49
49
  parameters: PromptParameters = DEFAULT_PROMPT_PARAMETERS
50
- max_prompt_iterations: ClassVar[Optional[int]] = 5
50
+ max_prompt_iterations: ClassVar[Optional[int]] = 25
51
51
  settings: ClassVar[Optional[Union[PromptSettings, Dict[str, Any]]]] = None
52
52
 
53
53
  class Outputs(BaseOutputs):
@@ -89,7 +89,7 @@ class FunctionCallNodeMixin:
89
89
 
90
90
 
91
91
  class ToolPromptNode(InlinePromptNode[ToolCallingState]):
92
- max_prompt_iterations: Optional[int] = 5
92
+ max_prompt_iterations: Optional[int] = 25
93
93
 
94
94
  class Trigger(InlinePromptNode.Trigger):
95
95
  merge_behavior = MergeBehavior.AWAIT_ATTRIBUTES
@@ -61,6 +61,17 @@ class Port:
61
61
  return iter(self._edges)
62
62
 
63
63
  def __rshift__(self, other: GraphTarget) -> Graph:
64
+ # Check for trigger target (class-level only)
65
+ from vellum.workflows.triggers.base import BaseTrigger
66
+
67
+ # Check if other is a trigger class
68
+ if isinstance(other, type) and issubclass(other, BaseTrigger):
69
+ raise TypeError(
70
+ f"Cannot create edge targeting trigger {other.__name__}. "
71
+ f"Triggers must be at the start of a graph path, not as targets. "
72
+ f"Did you mean: {other.__name__} >> {self.node_class.__name__}?"
73
+ )
74
+
64
75
  if isinstance(other, set) or isinstance(other, Graph):
65
76
  return Graph.from_port(self) >> other
66
77
 
@@ -1,4 +1,5 @@
1
1
  from collections import defaultdict
2
+ from contextlib import contextmanager
2
3
  from copy import deepcopy
3
4
  from dataclasses import dataclass
4
5
  import logging
@@ -73,6 +74,7 @@ from vellum.workflows.references import ExternalInputReference, OutputReference
73
74
  from vellum.workflows.references.state_value import StateValueReference
74
75
  from vellum.workflows.state.base import BaseState
75
76
  from vellum.workflows.state.delta import StateDelta
77
+ from vellum.workflows.types.core import CancelSignal
76
78
  from vellum.workflows.types.generics import InputsType, OutputsType, StateType
77
79
 
78
80
  if TYPE_CHECKING:
@@ -102,7 +104,7 @@ class WorkflowRunner(Generic[StateType]):
102
104
  entrypoint_nodes: Optional[RunFromNodeArg] = None,
103
105
  external_inputs: Optional[ExternalInputsArg] = None,
104
106
  previous_execution_id: Optional[Union[str, UUID]] = None,
105
- cancel_signal: Optional[ThreadingEvent] = None,
107
+ cancel_signal: Optional[CancelSignal] = None,
106
108
  node_output_mocks: Optional[MockNodeExecutionArg] = None,
107
109
  max_concurrency: Optional[int] = None,
108
110
  init_execution_context: Optional[ExecutionContext] = None,
@@ -232,7 +234,33 @@ class WorkflowRunner(Generic[StateType]):
232
234
 
233
235
  self._background_thread: Optional[Thread] = None
234
236
  self._cancel_thread: Optional[Thread] = None
235
- self._stream_thread: Optional[Thread] = None
237
+
238
+ @contextmanager
239
+ def _httpx_logger_with_span_id(self) -> Iterator[None]:
240
+ """
241
+ Context manager to append the current execution's span ID to httpx logger messages.
242
+
243
+ This is used when making API requests via the Vellum client to include
244
+ the execution's span ID in the httpx request logs for better traceability.
245
+ """
246
+ httpx_logger = logging.getLogger("httpx")
247
+
248
+ class SpanIdFilter(logging.Filter):
249
+ def filter(self, record: logging.LogRecord) -> bool:
250
+ if record.name == "httpx" and "[span_id=" not in record.msg:
251
+ context = get_execution_context()
252
+ if context.parent_context:
253
+ span_id = str(context.parent_context.span_id)
254
+ record.msg = f"{record.msg} [span_id={span_id}]"
255
+ return True
256
+
257
+ span_filter = SpanIdFilter()
258
+ httpx_logger.addFilter(span_filter)
259
+
260
+ try:
261
+ yield
262
+ finally:
263
+ httpx_logger.removeFilter(span_filter)
236
264
 
237
265
  def _snapshot_state(self, state: StateType, deltas: List[StateDelta]) -> StateType:
238
266
  self._workflow_event_inner_queue.put(
@@ -707,6 +735,7 @@ class WorkflowRunner(Generic[StateType]):
707
735
  error_message: The error message to include in the cancellation events
708
736
  parent_context: The parent context for the cancellation events
709
737
  """
738
+ captured_stacktrace = "".join(traceback.format_stack())
710
739
  for span_id, active_node in list(self._active_nodes_by_execution_id.items()):
711
740
  rejection_event = NodeExecutionRejectedEvent(
712
741
  trace_id=self._execution_context.trace_id,
@@ -717,6 +746,7 @@ class WorkflowRunner(Generic[StateType]):
717
746
  code=WorkflowErrorCode.NODE_CANCELLED,
718
747
  message=error_message,
719
748
  ),
749
+ stacktrace=captured_stacktrace,
720
750
  ),
721
751
  parent=parent_context,
722
752
  )
@@ -770,13 +800,14 @@ class WorkflowRunner(Generic[StateType]):
770
800
  parent=self._execution_context.parent_context,
771
801
  )
772
802
 
773
- def _fulfill_workflow_event(self, outputs: OutputsType) -> WorkflowExecutionFulfilledEvent:
803
+ def _fulfill_workflow_event(self, outputs: OutputsType, final_state: StateType) -> WorkflowExecutionFulfilledEvent:
774
804
  return WorkflowExecutionFulfilledEvent(
775
805
  trace_id=self._execution_context.trace_id,
776
806
  span_id=self._initial_state.meta.span_id,
777
807
  body=WorkflowExecutionFulfilledBody(
778
808
  workflow_definition=self.workflow.__class__,
779
809
  outputs=outputs,
810
+ final_state=final_state,
780
811
  ),
781
812
  parent=self._execution_context.parent_context,
782
813
  )
@@ -824,6 +855,11 @@ class WorkflowRunner(Generic[StateType]):
824
855
  parent=self._execution_context.parent_context,
825
856
  )
826
857
 
858
+ def _wrapped_stream(self) -> None:
859
+ """Wrapper for _stream that adds httpx logger span ID context."""
860
+ with self._httpx_logger_with_span_id():
861
+ self._stream()
862
+
827
863
  def _stream(self) -> None:
828
864
  for edge in self.workflow.get_edges():
829
865
  self._dependencies[edge.to_node].add(edge.from_port.node_class)
@@ -927,7 +963,7 @@ class WorkflowRunner(Generic[StateType]):
927
963
  descriptor.instance.resolve(final_state),
928
964
  )
929
965
 
930
- self._workflow_event_outer_queue.put(self._fulfill_workflow_event(fulfilled_outputs))
966
+ self._workflow_event_outer_queue.put(self._fulfill_workflow_event(fulfilled_outputs, final_state))
931
967
 
932
968
  def _run_background_thread(self) -> None:
933
969
  state_class = self.workflow.get_state_class()
@@ -960,12 +996,14 @@ class WorkflowRunner(Generic[StateType]):
960
996
  parent_context=parent_context,
961
997
  )
962
998
 
999
+ captured_stacktrace = "".join(traceback.format_stack())
963
1000
  self._workflow_event_outer_queue.put(
964
1001
  self._reject_workflow_event(
965
1002
  WorkflowError(
966
1003
  code=WorkflowErrorCode.WORKFLOW_CANCELLED,
967
1004
  message="Workflow run cancelled",
968
- )
1005
+ ),
1006
+ captured_stacktrace,
969
1007
  )
970
1008
  )
971
1009
  return
@@ -1005,7 +1043,7 @@ class WorkflowRunner(Generic[StateType]):
1005
1043
 
1006
1044
  # The extra level of indirection prevents the runner from waiting on the caller to consume the event stream
1007
1045
  self._stream_thread = Thread(
1008
- target=self._stream,
1046
+ target=self._wrapped_stream,
1009
1047
  name=f"{self.workflow.__class__.__name__}.stream_thread",
1010
1048
  )
1011
1049
  self._stream_thread.start()
@@ -0,0 +1,4 @@
1
+ from vellum.workflows.triggers.base import BaseTrigger
2
+ from vellum.workflows.triggers.manual import ManualTrigger
3
+
4
+ __all__ = ["BaseTrigger", "ManualTrigger"]