vellum-ai 1.8.2__py3-none-any.whl → 1.8.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (30) hide show
  1. vellum/__init__.py +4 -0
  2. vellum/client/core/client_wrapper.py +2 -2
  3. vellum/client/types/__init__.py +4 -0
  4. vellum/client/types/integration_name.py +1 -0
  5. vellum/client/types/workflow_deployment_display_data.py +27 -0
  6. vellum/client/types/workflow_deployment_read.py +6 -0
  7. vellum/client/types/workflow_display_icon.py +24 -0
  8. vellum/types/workflow_deployment_display_data.py +3 -0
  9. vellum/types/workflow_display_icon.py +3 -0
  10. vellum/workflows/events/types.py +8 -0
  11. vellum/workflows/expressions/concat.py +6 -3
  12. vellum/workflows/expressions/tests/test_concat.py +63 -8
  13. vellum/workflows/nodes/core/map_node/node.py +1 -0
  14. vellum/workflows/nodes/displayable/bases/base_prompt_node/node.py +20 -5
  15. vellum/workflows/nodes/displayable/bases/inline_prompt_node/node.py +11 -7
  16. vellum/workflows/nodes/displayable/bases/inline_prompt_node/tests/test_inline_prompt_node.py +42 -0
  17. vellum/workflows/nodes/displayable/subworkflow_deployment_node/node.py +84 -56
  18. vellum/workflows/nodes/experimental/__init__.py +1 -3
  19. vellum/workflows/runner/runner.py +144 -0
  20. vellum/workflows/state/context.py +75 -8
  21. vellum/workflows/workflows/base.py +20 -1
  22. vellum/workflows/workflows/event_filters.py +13 -0
  23. vellum/workflows/workflows/tests/test_event_filters.py +126 -0
  24. {vellum_ai-1.8.2.dist-info → vellum_ai-1.8.4.dist-info}/METADATA +1 -1
  25. {vellum_ai-1.8.2.dist-info → vellum_ai-1.8.4.dist-info}/RECORD +28 -25
  26. vellum/workflows/nodes/experimental/openai_chat_completion_node/__init__.py +0 -5
  27. vellum/workflows/nodes/experimental/openai_chat_completion_node/node.py +0 -266
  28. {vellum_ai-1.8.2.dist-info → vellum_ai-1.8.4.dist-info}/LICENSE +0 -0
  29. {vellum_ai-1.8.2.dist-info → vellum_ai-1.8.4.dist-info}/WHEEL +0 -0
  30. {vellum_ai-1.8.2.dist-info → vellum_ai-1.8.4.dist-info}/entry_points.txt +0 -0
@@ -74,6 +74,9 @@ from vellum.workflows.references import ExternalInputReference, OutputReference
74
74
  from vellum.workflows.references.state_value import StateValueReference
75
75
  from vellum.workflows.state.base import BaseState
76
76
  from vellum.workflows.state.delta import StateDelta
77
+ from vellum.workflows.triggers.base import BaseTrigger
78
+ from vellum.workflows.triggers.integration import IntegrationTrigger
79
+ from vellum.workflows.triggers.manual import ManualTrigger
77
80
  from vellum.workflows.types.core import CancelSignal
78
81
  from vellum.workflows.types.generics import InputsType, OutputsType, StateType
79
82
 
@@ -109,6 +112,7 @@ class WorkflowRunner(Generic[StateType]):
109
112
  max_concurrency: Optional[int] = None,
110
113
  timeout: Optional[float] = None,
111
114
  init_execution_context: Optional[ExecutionContext] = None,
115
+ trigger: Optional[BaseTrigger] = None,
112
116
  ):
113
117
  if state and external_inputs:
114
118
  raise ValueError("Can only run a Workflow providing one of state or external inputs, not both")
@@ -198,7 +202,24 @@ class WorkflowRunner(Generic[StateType]):
198
202
  )
199
203
 
200
204
  self._entrypoints = self.workflow.get_entrypoints()
205
+ elif trigger:
206
+ # When trigger is provided, set up default state and filter entrypoints by trigger type
207
+ normalized_inputs = deepcopy(inputs) if inputs else self.workflow.get_default_inputs()
208
+ if state:
209
+ self._initial_state = deepcopy(state)
210
+ self._initial_state.meta.workflow_inputs = normalized_inputs
211
+ self._initial_state.meta.span_id = uuid4()
212
+ self._initial_state.meta.workflow_definition = self.workflow.__class__
213
+ else:
214
+ self._initial_state = self.workflow.get_default_state(normalized_inputs)
215
+ self._should_emit_initial_state = False
216
+
217
+ # Validate and bind trigger, then filter entrypoints
218
+ self._validate_and_bind_trigger(trigger)
219
+ self._entrypoints = self.workflow.get_entrypoints()
220
+ self._filter_entrypoints_for_trigger(trigger)
201
221
  else:
222
+ # Default case: no entrypoint overrides and no trigger
202
223
  normalized_inputs = deepcopy(inputs) if inputs else self.workflow.get_default_inputs()
203
224
  if state:
204
225
  self._initial_state = deepcopy(state)
@@ -213,6 +234,9 @@ class WorkflowRunner(Generic[StateType]):
213
234
  self._should_emit_initial_state = False
214
235
  self._entrypoints = self.workflow.get_entrypoints()
215
236
 
237
+ # Check if workflow requires a trigger but none was provided
238
+ self._validate_no_trigger_provided()
239
+
216
240
  # This queue is responsible for sending events from WorkflowRunner to the outside world
217
241
  self._workflow_event_outer_queue: Queue[WorkflowEvent] = Queue()
218
242
 
@@ -250,6 +274,126 @@ class WorkflowRunner(Generic[StateType]):
250
274
  self._cancel_thread: Optional[Thread] = None
251
275
  self._timeout_thread: Optional[Thread] = None
252
276
 
277
+ def _has_manual_trigger(self) -> bool:
278
+ """Check if workflow has ManualTrigger."""
279
+ for subgraph in self.workflow.get_subgraphs():
280
+ for trigger in subgraph.triggers:
281
+ if issubclass(trigger, ManualTrigger):
282
+ return True
283
+ return False
284
+
285
+ def _get_entrypoints_for_trigger_type(self, trigger_class: Type) -> List[Type[BaseNode]]:
286
+ """Get all entrypoints connected to a specific trigger type.
287
+
288
+ Allows subclasses: if trigger_class is a subclass of any declared trigger,
289
+ returns those entrypoints.
290
+ """
291
+ entrypoints: List[Type[BaseNode]] = []
292
+ for subgraph in self.workflow.get_subgraphs():
293
+ for trigger in subgraph.triggers:
294
+ # Check if the provided trigger_class is a subclass of the declared trigger
295
+ # This allows runtime instances to be subclasses of what's declared in the workflow
296
+ if issubclass(trigger_class, trigger):
297
+ entrypoints.extend(subgraph.entrypoints)
298
+ return entrypoints
299
+
300
+ def _validate_and_bind_trigger(self, trigger: BaseTrigger) -> None:
301
+ """
302
+ Validate that trigger is compatible with workflow and bind it to state.
303
+
304
+ Supports all trigger types derived from BaseTrigger:
305
+ - IntegrationTrigger instances (Slack, Gmail, etc.)
306
+ - ManualTrigger instances (explicit manual execution)
307
+ - ScheduledTrigger instances (time-based triggers)
308
+ - Any future trigger types
309
+
310
+ Raises:
311
+ WorkflowInitializationException: If trigger type is not compatible with workflow
312
+ """
313
+ trigger_class = type(trigger)
314
+
315
+ # Search for a compatible trigger type in the workflow
316
+ found_compatible_trigger = False
317
+ has_any_triggers = False
318
+ incompatible_trigger_names: List[str] = []
319
+
320
+ for subgraph in self.workflow.get_subgraphs():
321
+ for declared_trigger in subgraph.triggers:
322
+ has_any_triggers = True
323
+ # Allow subclasses: if workflow declares BaseSlackTrigger, accept SpecificSlackTrigger instances
324
+ if issubclass(trigger_class, declared_trigger):
325
+ found_compatible_trigger = True
326
+ break
327
+ else:
328
+ incompatible_trigger_names.append(declared_trigger.__name__)
329
+
330
+ if found_compatible_trigger:
331
+ break
332
+
333
+ # Special case: workflows with no explicit triggers implicitly support ManualTrigger
334
+ if not has_any_triggers and not isinstance(trigger, ManualTrigger):
335
+ raise WorkflowInitializationException(
336
+ message=f"Provided trigger type {trigger_class.__name__} is not compatible with workflow. "
337
+ f"Workflow has no explicit triggers and only supports ManualTrigger.",
338
+ workflow_definition=self.workflow.__class__,
339
+ code=WorkflowErrorCode.INVALID_INPUTS,
340
+ )
341
+
342
+ # Validate that we found a compatible trigger type
343
+ if has_any_triggers and not found_compatible_trigger:
344
+ raise WorkflowInitializationException(
345
+ message=f"Provided trigger type {trigger_class.__name__} is not compatible with workflow triggers. "
346
+ f"Workflow has: {sorted(set(incompatible_trigger_names))}",
347
+ workflow_definition=self.workflow.__class__,
348
+ code=WorkflowErrorCode.INVALID_INPUTS,
349
+ )
350
+
351
+ # Bind trigger to state (works for all trigger types via BaseTrigger.bind_to_state)
352
+ trigger.bind_to_state(self._initial_state)
353
+
354
+ def _filter_entrypoints_for_trigger(self, trigger: BaseTrigger) -> None:
355
+ """
356
+ Filter entrypoints to those connected to the specific trigger type.
357
+
358
+ Uses the specific trigger subclass, not the parent class, allowing workflows
359
+ with multiple triggers to route to the correct path.
360
+ """
361
+ trigger_class = type(trigger)
362
+ specific_entrypoints = self._get_entrypoints_for_trigger_type(trigger_class)
363
+ if specific_entrypoints:
364
+ self._entrypoints = specific_entrypoints
365
+
366
+ def _validate_no_trigger_provided(self) -> None:
367
+ """
368
+ Validate that workflow can run without a trigger.
369
+
370
+ If workflow has IntegrationTrigger(s) but no ManualTrigger, it requires a trigger instance.
371
+ If workflow has both, filter entrypoints to ManualTrigger path only.
372
+
373
+ Raises:
374
+ WorkflowInitializationException: If workflow requires trigger but none was provided
375
+ """
376
+ # Collect all IntegrationTrigger types in the workflow
377
+ workflow_integration_triggers = []
378
+ for subgraph in self.workflow.get_subgraphs():
379
+ for trigger_type in subgraph.triggers:
380
+ if issubclass(trigger_type, IntegrationTrigger):
381
+ workflow_integration_triggers.append(trigger_type)
382
+
383
+ if workflow_integration_triggers:
384
+ if not self._has_manual_trigger():
385
+ # Workflow has ONLY IntegrationTrigger - this is an error
386
+ raise WorkflowInitializationException(
387
+ message="Workflow has IntegrationTrigger which requires trigger parameter",
388
+ workflow_definition=self.workflow.__class__,
389
+ code=WorkflowErrorCode.INVALID_INPUTS,
390
+ )
391
+
392
+ # Workflow has both IntegrationTrigger and ManualTrigger - filter to ManualTrigger path
393
+ manual_entrypoints = self._get_entrypoints_for_trigger_type(ManualTrigger)
394
+ if manual_entrypoints:
395
+ self._entrypoints = manual_entrypoints
396
+
253
397
  @contextmanager
254
398
  def _httpx_logger_with_span_id(self) -> Iterator[None]:
255
399
  """
@@ -1,7 +1,8 @@
1
+ from dataclasses import dataclass
1
2
  from functools import cached_property
2
3
  from queue import Queue
3
4
  from uuid import UUID, uuid4
4
- from typing import TYPE_CHECKING, Dict, List, Optional, Type
5
+ from typing import TYPE_CHECKING, Dict, List, Optional, Tuple, Type
5
6
 
6
7
  from vellum import Vellum, __version__
7
8
  from vellum.workflows.context import ExecutionContext, get_execution_context, set_execution_context
@@ -9,6 +10,7 @@ from vellum.workflows.events.types import ExternalParentContext
9
10
  from vellum.workflows.nodes.mocks import MockNodeExecution, MockNodeExecutionArg
10
11
  from vellum.workflows.outputs.base import BaseOutputs
11
12
  from vellum.workflows.references.constant import ConstantValueReference
13
+ from vellum.workflows.state.store import Store
12
14
  from vellum.workflows.utils.uuids import generate_workflow_deployment_prefix
13
15
  from vellum.workflows.utils.zip import extract_zip_files
14
16
  from vellum.workflows.vellum_client import create_vellum_client
@@ -19,6 +21,18 @@ if TYPE_CHECKING:
19
21
  from vellum.workflows.workflows.base import BaseWorkflow
20
22
 
21
23
 
24
+ @dataclass
25
+ class WorkflowDeploymentMetadata:
26
+ """Metadata about a workflow deployment needed for parent context construction."""
27
+
28
+ deployment_id: UUID
29
+ deployment_name: str
30
+ deployment_history_item_id: UUID
31
+ release_tag_id: UUID
32
+ release_tag_name: str
33
+ workflow_version_id: UUID
34
+
35
+
22
36
  class WorkflowContext:
23
37
  def __init__(
24
38
  self,
@@ -27,12 +41,14 @@ class WorkflowContext:
27
41
  execution_context: Optional[ExecutionContext] = None,
28
42
  generated_files: Optional[dict[str, str]] = None,
29
43
  namespace: Optional[str] = None,
44
+ store_class: Optional[Type[Store]] = None,
30
45
  ):
31
46
  self._vellum_client = vellum_client
32
47
  self._event_queue: Optional[Queue["WorkflowEvent"]] = None
33
48
  self._node_output_mocks_map: Dict[Type[BaseOutputs], List[MockNodeExecution]] = {}
34
49
  self._execution_context = get_execution_context()
35
50
  self._namespace = namespace
51
+ self._store_class = store_class if store_class is not None else Store
36
52
 
37
53
  if execution_context is not None:
38
54
  self._execution_context.trace_id = execution_context.trace_id
@@ -72,6 +88,10 @@ class WorkflowContext:
72
88
  def node_output_mocks_map(self) -> Dict[Type[BaseOutputs], List[MockNodeExecution]]:
73
89
  return self._node_output_mocks_map
74
90
 
91
+ @property
92
+ def store_class(self) -> Type[Store]:
93
+ return self._store_class
94
+
75
95
  @property
76
96
  def monitoring_url(self) -> Optional[str]:
77
97
  """
@@ -148,7 +168,7 @@ class WorkflowContext:
148
168
 
149
169
  def resolve_workflow_deployment(
150
170
  self, deployment_name: str, release_tag: str, state: "BaseState"
151
- ) -> Optional["BaseWorkflow"]:
171
+ ) -> Optional[Tuple[Type["BaseWorkflow"], Optional[WorkflowDeploymentMetadata]]]:
152
172
  """
153
173
  Resolve a workflow deployment by name and release tag.
154
174
 
@@ -158,20 +178,22 @@ class WorkflowContext:
158
178
  state: The base state to pass to the workflow
159
179
 
160
180
  Returns:
161
- BaseWorkflow instance if found, None otherwise
181
+ Tuple of (BaseWorkflow class, deployment metadata) if found
162
182
  """
163
183
  if not self._generated_files or not self._namespace:
164
184
  return None
165
185
 
166
186
  expected_prefix = generate_workflow_deployment_prefix(deployment_name, release_tag)
167
187
 
188
+ deployment_metadata = self._fetch_deployment_metadata(deployment_name, release_tag)
189
+
168
190
  try:
169
191
  from vellum.workflows.workflows.base import BaseWorkflow
170
192
 
171
193
  WorkflowClass = BaseWorkflow.load_from_module(f"{self.namespace}.{expected_prefix}")
172
194
  WorkflowClass.is_dynamic = True
173
- workflow_instance = WorkflowClass(context=WorkflowContext.create_from(self), parent_state=state)
174
- return workflow_instance
195
+ # Return the class, not an instance, so caller can instantiate within proper execution context
196
+ return (WorkflowClass, deployment_metadata)
175
197
  except Exception:
176
198
  pass
177
199
 
@@ -200,16 +222,61 @@ class WorkflowContext:
200
222
 
201
223
  WorkflowClass = BaseWorkflow.load_from_module(f"{self.namespace}.{expected_prefix}")
202
224
  WorkflowClass.is_dynamic = True
203
- workflow_instance = WorkflowClass(context=WorkflowContext.create_from(self), parent_state=state)
204
- return workflow_instance
225
+ # Return the class, not an instance, so caller can instantiate within proper execution context
226
+ return (WorkflowClass, deployment_metadata)
205
227
 
206
228
  except Exception:
207
229
  pass
208
230
 
209
231
  return None
210
232
 
233
+ def _fetch_deployment_metadata(
234
+ self, deployment_name: str, release_tag: str
235
+ ) -> Optional[WorkflowDeploymentMetadata]:
236
+ """
237
+ Fetch deployment metadata from the Vellum API.
238
+
239
+ Args:
240
+ deployment_name: The name of the workflow deployment
241
+ release_tag: The release tag name
242
+
243
+ Returns:
244
+ WorkflowDeploymentMetadata if successful, None otherwise
245
+ """
246
+ try:
247
+ # Fetch deployment details
248
+ deployment = self.vellum_client.workflow_deployments.retrieve(deployment_name)
249
+
250
+ deployment_id = UUID(deployment.id)
251
+
252
+ # Fetch release tag details
253
+ release_tag_info = self.vellum_client.workflow_deployments.retrieve_workflow_release_tag(
254
+ deployment.id, release_tag
255
+ )
256
+
257
+ # Fetch workflow version
258
+ release = self.vellum_client.workflow_deployments.retrieve_workflow_deployment_release(
259
+ str(deployment_id), release_tag
260
+ )
261
+
262
+ return WorkflowDeploymentMetadata(
263
+ deployment_id=deployment_id,
264
+ deployment_name=deployment.name,
265
+ deployment_history_item_id=UUID(deployment.last_deployed_history_item_id),
266
+ release_tag_id=UUID(release_tag_info.release.id),
267
+ release_tag_name=release_tag_info.name,
268
+ workflow_version_id=UUID(release.workflow_version.id),
269
+ )
270
+ except Exception:
271
+ # If we fail to fetch metadata, return None - the workflow can still run
272
+ # but won't have the full parent context hierarchy
273
+ return None
274
+
211
275
  @classmethod
212
276
  def create_from(cls, context):
213
277
  return cls(
214
- vellum_client=context.vellum_client, generated_files=context.generated_files, namespace=context.namespace
278
+ vellum_client=context.vellum_client,
279
+ generated_files=context.generated_files,
280
+ namespace=context.namespace,
281
+ store_class=context.store_class,
215
282
  )
@@ -79,6 +79,7 @@ from vellum.workflows.runner.runner import ExternalInputsArg, RunFromNodeArg
79
79
  from vellum.workflows.state.base import BaseState, StateMeta
80
80
  from vellum.workflows.state.context import WorkflowContext
81
81
  from vellum.workflows.state.store import Store
82
+ from vellum.workflows.triggers.base import BaseTrigger
82
83
  from vellum.workflows.types import CancelSignal
83
84
  from vellum.workflows.types.generics import InputsType, StateType
84
85
  from vellum.workflows.types.utils import get_original_base
@@ -256,7 +257,9 @@ class BaseWorkflow(Generic[InputsType, StateType], BaseExecutable, metaclass=_Ba
256
257
  self._context = context or WorkflowContext()
257
258
  self.emitters = emitters or (self.emitters if hasattr(self, "emitters") else [])
258
259
  self.resolvers = resolvers or (self.resolvers if hasattr(self, "resolvers") else [])
259
- self._store = store or Store()
260
+ # Prioritize store type from WorkflowContext to allow subworkflows to inherit EmptyStore
261
+ # TODO(v2.0.0): Remove the concept of an internal store altogether (important-comment)
262
+ self._store = store or self._context.store_class()
260
263
  self._execution_context = self._context.execution_context
261
264
  self._current_runner: Optional[WorkflowRunner] = None
262
265
 
@@ -382,6 +385,7 @@ class BaseWorkflow(Generic[InputsType, StateType], BaseExecutable, metaclass=_Ba
382
385
  node_output_mocks: Optional[MockNodeExecutionArg] = None,
383
386
  max_concurrency: Optional[int] = None,
384
387
  timeout: Optional[float] = None,
388
+ trigger: Optional[BaseTrigger] = None,
385
389
  ) -> TerminalWorkflowEvent:
386
390
  """
387
391
  Invoke a Workflow, returning the last event emitted, which should be one of:
@@ -422,6 +426,12 @@ class BaseWorkflow(Generic[InputsType, StateType], BaseExecutable, metaclass=_Ba
422
426
  timeout: Optional[float] = None
423
427
  The maximum time in seconds to allow the Workflow to run. If the timeout is exceeded, the Workflow
424
428
  will be rejected with a WORKFLOW_TIMEOUT error code and any nodes in flight will be rejected.
429
+
430
+ trigger: Optional[BaseTrigger] = None
431
+ A trigger instance for workflows with triggers (e.g., IntegrationTrigger, ManualTrigger, ScheduledTrigger).
432
+ The trigger instance is bound to the workflow state, making its attributes accessible to downstream nodes.
433
+ Required for workflows that only have IntegrationTrigger; optional for workflows with both ManualTrigger
434
+ and IntegrationTrigger.
425
435
  """
426
436
 
427
437
  runner = WorkflowRunner(
@@ -436,6 +446,7 @@ class BaseWorkflow(Generic[InputsType, StateType], BaseExecutable, metaclass=_Ba
436
446
  max_concurrency=max_concurrency,
437
447
  timeout=timeout,
438
448
  init_execution_context=self._execution_context,
449
+ trigger=trigger,
439
450
  )
440
451
  self._current_runner = runner
441
452
  events = runner.stream()
@@ -507,6 +518,7 @@ class BaseWorkflow(Generic[InputsType, StateType], BaseExecutable, metaclass=_Ba
507
518
  node_output_mocks: Optional[MockNodeExecutionArg] = None,
508
519
  max_concurrency: Optional[int] = None,
509
520
  timeout: Optional[float] = None,
521
+ trigger: Optional[BaseTrigger] = None,
510
522
  ) -> WorkflowEventStream:
511
523
  """
512
524
  Invoke a Workflow, yielding events as they are emitted.
@@ -548,6 +560,12 @@ class BaseWorkflow(Generic[InputsType, StateType], BaseExecutable, metaclass=_Ba
548
560
  timeout: Optional[float] = None
549
561
  The maximum time in seconds to allow the Workflow to run. If the timeout is exceeded, the Workflow
550
562
  will be rejected with a WORKFLOW_TIMEOUT error code and any nodes in flight will be rejected.
563
+
564
+ trigger: Optional[BaseTrigger] = None
565
+ A trigger instance for workflows with triggers (e.g., IntegrationTrigger, ManualTrigger, ScheduledTrigger).
566
+ The trigger instance is bound to the workflow state, making its attributes accessible to downstream nodes.
567
+ Required for workflows that only have IntegrationTrigger; optional for workflows with both ManualTrigger
568
+ and IntegrationTrigger.
551
569
  """
552
570
 
553
571
  should_yield = event_filter or workflow_event_filter
@@ -563,6 +581,7 @@ class BaseWorkflow(Generic[InputsType, StateType], BaseExecutable, metaclass=_Ba
563
581
  max_concurrency=max_concurrency,
564
582
  timeout=timeout,
565
583
  init_execution_context=self._execution_context,
584
+ trigger=trigger,
566
585
  )
567
586
  self._current_runner = runner
568
587
  runner_stream = runner.stream()
@@ -52,5 +52,18 @@ def root_workflow_event_filter(workflow_definition: Type["BaseWorkflow"], event:
52
52
  return event_parent_definition.model_dump() == current_workflow_definition.model_dump()
53
53
 
54
54
 
55
+ def workflow_sandbox_event_filter(workflow_definition: Type["BaseWorkflow"], event: "WorkflowEvent") -> bool:
56
+ """
57
+ Filter designed for Workflow Sandbox interfaces: include all events except
58
+ workflow.execution.snapshotted events from nested/subworkflows. Only allow
59
+ snapshotted events when they belong to the root workflow definition.
60
+ """
61
+
62
+ if event.name == "workflow.execution.snapshotted":
63
+ return event.workflow_definition == workflow_definition
64
+
65
+ return True
66
+
67
+
55
68
  def all_workflow_event_filter(workflow_definition: Type["BaseWorkflow"], event: "WorkflowEvent") -> bool:
56
69
  return True
@@ -0,0 +1,126 @@
1
+ from vellum.workflows.inputs import BaseInputs
2
+ from vellum.workflows.nodes import BaseNode, InlineSubworkflowNode
3
+ from vellum.workflows.outputs.base import BaseOutputs
4
+ from vellum.workflows.state.base import BaseState
5
+ from vellum.workflows.workflows.base import BaseWorkflow
6
+ from vellum.workflows.workflows.event_filters import workflow_sandbox_event_filter
7
+
8
+
9
+ class NestedInputs(BaseInputs):
10
+ value: str
11
+
12
+
13
+ class NestedNode(BaseNode):
14
+ value = NestedInputs.value
15
+
16
+ class Outputs(BaseOutputs):
17
+ result: str
18
+
19
+ def run(self) -> Outputs:
20
+ return self.Outputs(result=f"nested: {self.value}")
21
+
22
+
23
+ class NestedWorkflow(BaseWorkflow[NestedInputs, BaseState]):
24
+ graph = NestedNode
25
+
26
+ class Outputs(BaseOutputs):
27
+ result = NestedNode.Outputs.result
28
+
29
+
30
+ class ParentInputs(BaseInputs):
31
+ value: str
32
+
33
+
34
+ class SubworkflowNode(InlineSubworkflowNode):
35
+ subworkflow_inputs = {
36
+ "value": ParentInputs.value,
37
+ }
38
+ subworkflow = NestedWorkflow
39
+
40
+
41
+ class ParentWorkflow(BaseWorkflow[ParentInputs, BaseState]):
42
+ graph = SubworkflowNode
43
+
44
+ class Outputs(BaseOutputs):
45
+ result = SubworkflowNode.Outputs.result
46
+
47
+
48
+ def test_workflow_sandbox_event_filter__filters_nested_workflow_snapshotted_events():
49
+ """
50
+ Tests that workflow_sandbox_event_filter filters out snapshotted events from nested workflows.
51
+ """
52
+
53
+ workflow = ParentWorkflow()
54
+
55
+ # WHEN we stream the workflow with workflow_sandbox_event_filter
56
+ events = list(
57
+ workflow.stream(
58
+ inputs=ParentInputs(value="test"),
59
+ event_filter=workflow_sandbox_event_filter,
60
+ )
61
+ )
62
+
63
+ snapshotted_events = [e for e in events if e.name == "workflow.execution.snapshotted"]
64
+ assert len(snapshotted_events) > 0
65
+
66
+ for event in snapshotted_events:
67
+ assert event.workflow_definition == ParentWorkflow
68
+
69
+
70
+ def test_workflow_sandbox_event_filter__includes_root_workflow_snapshotted_events():
71
+ """
72
+ Tests that workflow_sandbox_event_filter includes snapshotted events from the root workflow.
73
+ """
74
+
75
+ class SimpleNode(BaseNode):
76
+ class Outputs(BaseOutputs):
77
+ result: str = "simple"
78
+
79
+ def run(self) -> Outputs:
80
+ return self.Outputs()
81
+
82
+ class SimpleWorkflow(BaseWorkflow[BaseInputs, BaseState]):
83
+ graph = SimpleNode
84
+
85
+ class Outputs(BaseOutputs):
86
+ result = SimpleNode.Outputs.result
87
+
88
+ workflow = SimpleWorkflow()
89
+
90
+ # WHEN we stream the workflow with workflow_sandbox_event_filter
91
+ events = list(
92
+ workflow.stream(
93
+ inputs=BaseInputs(),
94
+ event_filter=workflow_sandbox_event_filter,
95
+ )
96
+ )
97
+
98
+ snapshotted_events = [e for e in events if e.name == "workflow.execution.snapshotted"]
99
+ assert len(snapshotted_events) > 0
100
+
101
+ for event in snapshotted_events:
102
+ assert event.workflow_definition == SimpleWorkflow
103
+
104
+
105
+ def test_workflow_sandbox_event_filter__includes_nested_workflow_non_snapshotted_events():
106
+ """
107
+ Tests that workflow_sandbox_event_filter includes non-snapshotted events from nested workflows.
108
+ """
109
+
110
+ workflow = ParentWorkflow()
111
+
112
+ # WHEN we stream the workflow with workflow_sandbox_event_filter
113
+ events = list(
114
+ workflow.stream(
115
+ inputs=ParentInputs(value="test"),
116
+ event_filter=workflow_sandbox_event_filter,
117
+ )
118
+ )
119
+
120
+ nested_workflow_events = [
121
+ e for e in events if hasattr(e, "workflow_definition") and e.workflow_definition == NestedWorkflow
122
+ ]
123
+ assert len(nested_workflow_events) > 0
124
+
125
+ for event in nested_workflow_events:
126
+ assert event.name != "workflow.execution.snapshotted"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: vellum-ai
3
- Version: 1.8.2
3
+ Version: 1.8.4
4
4
  Summary:
5
5
  License: MIT
6
6
  Requires-Python: >=3.9,<4.0