vellum-ai 0.10.6__py3-none-any.whl → 0.10.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (32) hide show
  1. vellum/__init__.py +2 -0
  2. vellum/client/README.md +7 -52
  3. vellum/client/__init__.py +16 -136
  4. vellum/client/core/client_wrapper.py +1 -1
  5. vellum/client/resources/ad_hoc/client.py +14 -104
  6. vellum/client/resources/metric_definitions/client.py +113 -0
  7. vellum/client/resources/test_suites/client.py +8 -16
  8. vellum/client/resources/workflows/client.py +0 -32
  9. vellum/client/types/__init__.py +2 -0
  10. vellum/client/types/metric_definition_history_item.py +39 -0
  11. vellum/types/metric_definition_history_item.py +3 -0
  12. vellum/workflows/events/node.py +36 -3
  13. vellum/workflows/events/tests/test_event.py +89 -9
  14. vellum/workflows/nodes/displayable/conditional_node/node.py +2 -2
  15. vellum/workflows/ports/node_ports.py +2 -2
  16. vellum/workflows/ports/port.py +14 -0
  17. vellum/workflows/references/__init__.py +2 -0
  18. vellum/workflows/runner/runner.py +6 -7
  19. vellum/workflows/runner/types.py +1 -3
  20. vellum/workflows/state/encoder.py +2 -1
  21. vellum/workflows/types/tests/test_utils.py +6 -3
  22. vellum/workflows/types/utils.py +3 -0
  23. {vellum_ai-0.10.6.dist-info → vellum_ai-0.10.7.dist-info}/METADATA +1 -1
  24. {vellum_ai-0.10.6.dist-info → vellum_ai-0.10.7.dist-info}/RECORD +32 -30
  25. vellum_ee/workflows/display/nodes/vellum/final_output_node.py +4 -2
  26. vellum_ee/workflows/display/nodes/vellum/map_node.py +20 -48
  27. vellum_ee/workflows/display/tests/workflow_serialization/test_basic_conditional_node_serialization.py +5 -16
  28. vellum_ee/workflows/display/tests/workflow_serialization/test_basic_map_node_serialization.py +11 -8
  29. vellum_ee/workflows/display/utils/vellum.py +3 -2
  30. {vellum_ai-0.10.6.dist-info → vellum_ai-0.10.7.dist-info}/LICENSE +0 -0
  31. {vellum_ai-0.10.6.dist-info → vellum_ai-0.10.7.dist-info}/WHEEL +0 -0
  32. {vellum_ai-0.10.6.dist-info → vellum_ai-0.10.7.dist-info}/entry_points.txt +0 -0
@@ -232,25 +232,21 @@ class TestSuitesClient:
232
232
  api_key="YOUR_API_KEY",
233
233
  )
234
234
  response = client.test_suites.test_suite_test_cases_bulk(
235
- id="string",
235
+ id="id",
236
236
  request=[
237
237
  TestSuiteTestCaseCreateBulkOperationRequest(
238
- id="string",
238
+ id="id",
239
239
  data=CreateTestSuiteTestCaseRequest(
240
- label="string",
241
240
  input_values=[
242
241
  NamedTestCaseStringVariableValueRequest(
243
- value="string",
244
- name="string",
242
+ name="name",
245
243
  )
246
244
  ],
247
245
  evaluation_values=[
248
246
  NamedTestCaseStringVariableValueRequest(
249
- value="string",
250
- name="string",
247
+ name="name",
251
248
  )
252
249
  ],
253
- external_id="string",
254
250
  ),
255
251
  )
256
252
  ],
@@ -571,25 +567,21 @@ class AsyncTestSuitesClient:
571
567
 
572
568
  async def main() -> None:
573
569
  response = await client.test_suites.test_suite_test_cases_bulk(
574
- id="string",
570
+ id="id",
575
571
  request=[
576
572
  TestSuiteTestCaseCreateBulkOperationRequest(
577
- id="string",
573
+ id="id",
578
574
  data=CreateTestSuiteTestCaseRequest(
579
- label="string",
580
575
  input_values=[
581
576
  NamedTestCaseStringVariableValueRequest(
582
- value="string",
583
- name="string",
577
+ name="name",
584
578
  )
585
579
  ],
586
580
  evaluation_values=[
587
581
  NamedTestCaseStringVariableValueRequest(
588
- value="string",
589
- name="string",
582
+ name="name",
590
583
  )
591
584
  ],
592
- external_id="string",
593
585
  ),
594
586
  )
595
587
  ],
@@ -47,18 +47,6 @@ class WorkflowsClient:
47
47
  ------
48
48
  typing.Iterator[bytes]
49
49
 
50
-
51
- Examples
52
- --------
53
- from vellum import Vellum
54
-
55
- client = Vellum(
56
- api_key="YOUR_API_KEY",
57
- )
58
- client.workflows.pull(
59
- id="string",
60
- format="json",
61
- )
62
50
  """
63
51
  with self._client_wrapper.httpx_client.stream(
64
52
  f"v1/workflows/{jsonable_encoder(id)}/pull",
@@ -196,26 +184,6 @@ class AsyncWorkflowsClient:
196
184
  ------
197
185
  typing.AsyncIterator[bytes]
198
186
 
199
-
200
- Examples
201
- --------
202
- import asyncio
203
-
204
- from vellum import AsyncVellum
205
-
206
- client = AsyncVellum(
207
- api_key="YOUR_API_KEY",
208
- )
209
-
210
-
211
- async def main() -> None:
212
- await client.workflows.pull(
213
- id="string",
214
- format="json",
215
- )
216
-
217
-
218
- asyncio.run(main())
219
187
  """
220
188
  async with self._client_wrapper.httpx_client.stream(
221
189
  f"v1/workflows/{jsonable_encoder(id)}/pull",
@@ -197,6 +197,7 @@ from .metadata_filter_rule_combinator import MetadataFilterRuleCombinator
197
197
  from .metadata_filter_rule_request import MetadataFilterRuleRequest
198
198
  from .metadata_filters_request import MetadataFiltersRequest
199
199
  from .metric_definition_execution import MetricDefinitionExecution
200
+ from .metric_definition_history_item import MetricDefinitionHistoryItem
200
201
  from .metric_definition_input import MetricDefinitionInput
201
202
  from .metric_node_result import MetricNodeResult
202
203
  from .ml_model_read import MlModelRead
@@ -685,6 +686,7 @@ __all__ = [
685
686
  "MetadataFilterRuleRequest",
686
687
  "MetadataFiltersRequest",
687
688
  "MetricDefinitionExecution",
689
+ "MetricDefinitionHistoryItem",
688
690
  "MetricDefinitionInput",
689
691
  "MetricNodeResult",
690
692
  "MlModelRead",
@@ -0,0 +1,39 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ from __future__ import annotations
4
+ from ..core.pydantic_utilities import UniversalBaseModel
5
+ from .array_vellum_value import ArrayVellumValue
6
+ import pydantic
7
+ import typing
8
+ from .vellum_variable import VellumVariable
9
+ from ..core.pydantic_utilities import IS_PYDANTIC_V2
10
+ from ..core.pydantic_utilities import update_forward_refs
11
+
12
+
13
+ class MetricDefinitionHistoryItem(UniversalBaseModel):
14
+ id: str
15
+ label: str = pydantic.Field()
16
+ """
17
+ A human-readable label for the metric
18
+ """
19
+
20
+ name: str = pydantic.Field()
21
+ """
22
+ A name that uniquely identifies this metric within its workspace
23
+ """
24
+
25
+ description: str
26
+ input_variables: typing.List[VellumVariable]
27
+ output_variables: typing.List[VellumVariable]
28
+
29
+ if IS_PYDANTIC_V2:
30
+ model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
31
+ else:
32
+
33
+ class Config:
34
+ frozen = True
35
+ smart_union = True
36
+ extra = pydantic.Extra.allow
37
+
38
+
39
+ update_forward_refs(ArrayVellumValue, MetricDefinitionHistoryItem=MetricDefinitionHistoryItem)
@@ -0,0 +1,3 @@
1
+ # WARNING: This file will be removed in a future release. Please import from "vellum.client" instead.
2
+
3
+ from vellum.client.types.metric_definition_history_item import *
@@ -1,13 +1,14 @@
1
- from typing import Any, Dict, Generic, Literal, Type, Union
1
+ from typing import Any, Dict, Generic, Iterable, List, Literal, Optional, Set, Type, Union
2
2
 
3
- from pydantic import field_serializer
3
+ from pydantic import ConfigDict, SerializerFunctionWrapHandler, field_serializer, model_serializer
4
+ from pydantic.main import IncEx
4
5
 
5
6
  from vellum.core.pydantic_utilities import UniversalBaseModel
6
-
7
7
  from vellum.workflows.errors import VellumError
8
8
  from vellum.workflows.expressions.accessor import AccessorExpression
9
9
  from vellum.workflows.nodes.bases import BaseNode
10
10
  from vellum.workflows.outputs.base import BaseOutput
11
+ from vellum.workflows.ports.port import Port
11
12
  from vellum.workflows.references.node import NodeReference
12
13
  from vellum.workflows.types.generics import OutputsType
13
14
 
@@ -21,6 +22,15 @@ class _BaseNodeExecutionBody(UniversalBaseModel):
21
22
  def serialize_node_definition(self, node_definition: Type, _info: Any) -> Dict[str, Any]:
22
23
  return serialize_type_encoder(node_definition)
23
24
 
25
+ # Couldn't get this to work with model_config.exclude_none or model_config.exclude_defaults
26
+ # so we're excluding null invoked_ports manually here for now
27
+ @model_serializer(mode="wrap", when_used="json")
28
+ def serialize_model(self, handler: SerializerFunctionWrapHandler) -> Any:
29
+ serialized = super().serialize_model(handler) # type: ignore[call-arg, arg-type]
30
+ if "invoked_ports" in serialized and serialized["invoked_ports"] is None:
31
+ del serialized["invoked_ports"]
32
+ return serialized
33
+
24
34
 
25
35
  class _BaseNodeEvent(BaseEvent):
26
36
  body: _BaseNodeExecutionBody
@@ -31,6 +41,7 @@ class _BaseNodeEvent(BaseEvent):
31
41
 
32
42
 
33
43
  NodeInputName = Union[NodeReference, AccessorExpression]
44
+ InvokedPorts = Optional[Set["Port"]]
34
45
 
35
46
 
36
47
  class NodeExecutionInitiatedBody(_BaseNodeExecutionBody):
@@ -52,11 +63,18 @@ class NodeExecutionInitiatedEvent(_BaseNodeEvent):
52
63
 
53
64
  class NodeExecutionStreamingBody(_BaseNodeExecutionBody):
54
65
  output: BaseOutput
66
+ invoked_ports: InvokedPorts = None
55
67
 
56
68
  @field_serializer("output")
57
69
  def serialize_output(self, output: BaseOutput, _info: Any) -> Dict[str, Any]:
58
70
  return default_serializer(output)
59
71
 
72
+ @field_serializer("invoked_ports")
73
+ def serialize_invoked_ports(self, invoked_ports: InvokedPorts, _info: Any) -> Optional[List[Dict[str, Any]]]:
74
+ if not invoked_ports:
75
+ return None
76
+ return [default_serializer(port) for port in invoked_ports]
77
+
60
78
 
61
79
  class NodeExecutionStreamingEvent(_BaseNodeEvent):
62
80
  name: Literal["node.execution.streaming"] = "node.execution.streaming"
@@ -66,14 +84,25 @@ class NodeExecutionStreamingEvent(_BaseNodeEvent):
66
84
  def output(self) -> BaseOutput:
67
85
  return self.body.output
68
86
 
87
+ @property
88
+ def invoked_ports(self) -> InvokedPorts:
89
+ return self.body.invoked_ports
90
+
69
91
 
70
92
  class NodeExecutionFulfilledBody(_BaseNodeExecutionBody, Generic[OutputsType]):
71
93
  outputs: OutputsType
94
+ invoked_ports: InvokedPorts = None
72
95
 
73
96
  @field_serializer("outputs")
74
97
  def serialize_outputs(self, outputs: OutputsType, _info: Any) -> Dict[str, Any]:
75
98
  return default_serializer(outputs)
76
99
 
100
+ @field_serializer("invoked_ports")
101
+ def serialize_invoked_ports(self, invoked_ports: InvokedPorts, _info: Any) -> Optional[List[Dict[str, Any]]]:
102
+ if invoked_ports is None:
103
+ return None
104
+ return [default_serializer(port) for port in invoked_ports]
105
+
77
106
 
78
107
  class NodeExecutionFulfilledEvent(_BaseNodeEvent, Generic[OutputsType]):
79
108
  name: Literal["node.execution.fulfilled"] = "node.execution.fulfilled"
@@ -83,6 +112,10 @@ class NodeExecutionFulfilledEvent(_BaseNodeEvent, Generic[OutputsType]):
83
112
  def outputs(self) -> OutputsType:
84
113
  return self.body.outputs
85
114
 
115
+ @property
116
+ def invoked_ports(self) -> InvokedPorts:
117
+ return self.body.invoked_ports
118
+
86
119
 
87
120
  class NodeExecutionRejectedBody(_BaseNodeExecutionBody):
88
121
  error: VellumError
@@ -6,7 +6,14 @@ from uuid import UUID
6
6
  from deepdiff import DeepDiff
7
7
 
8
8
  from vellum.workflows.errors.types import VellumError, VellumErrorCode
9
- from vellum.workflows.events.node import NodeExecutionInitiatedBody, NodeExecutionInitiatedEvent
9
+ from vellum.workflows.events.node import (
10
+ NodeExecutionFulfilledBody,
11
+ NodeExecutionFulfilledEvent,
12
+ NodeExecutionInitiatedBody,
13
+ NodeExecutionInitiatedEvent,
14
+ NodeExecutionStreamingBody,
15
+ NodeExecutionStreamingEvent,
16
+ )
10
17
  from vellum.workflows.events.types import NodeParentContext, WorkflowParentContext
11
18
  from vellum.workflows.events.workflow import (
12
19
  WorkflowExecutionFulfilledBody,
@@ -93,10 +100,9 @@ module_root = name_parts[: name_parts.index("events")]
93
100
  node_definition=MockNode,
94
101
  span_id=UUID("123e4567-e89b-12d3-a456-426614174000"),
95
102
  parent=WorkflowParentContext(
96
- workflow_definition=MockWorkflow,
97
- span_id=UUID("123e4567-e89b-12d3-a456-426614174000")
98
- )
99
- )
103
+ workflow_definition=MockWorkflow, span_id=UUID("123e4567-e89b-12d3-a456-426614174000")
104
+ ),
105
+ ),
100
106
  ),
101
107
  {
102
108
  "id": "123e4567-e89b-12d3-a456-426614174000",
@@ -126,10 +132,10 @@ module_root = name_parts[: name_parts.index("events")]
126
132
  },
127
133
  "type": "WORKFLOW",
128
134
  "parent": None,
129
- "span_id": "123e4567-e89b-12d3-a456-426614174000"
135
+ "span_id": "123e4567-e89b-12d3-a456-426614174000",
130
136
  },
131
137
  "type": "WORKFLOW_NODE",
132
- "span_id": "123e4567-e89b-12d3-a456-426614174000"
138
+ "span_id": "123e4567-e89b-12d3-a456-426614174000",
133
139
  },
134
140
  },
135
141
  ),
@@ -164,7 +170,7 @@ module_root = name_parts[: name_parts.index("events")]
164
170
  "value": "foo",
165
171
  },
166
172
  },
167
- "parent": None
173
+ "parent": None,
168
174
  },
169
175
  ),
170
176
  (
@@ -233,6 +239,78 @@ module_root = name_parts[: name_parts.index("events")]
233
239
  "parent": None,
234
240
  },
235
241
  ),
242
+ (
243
+ NodeExecutionStreamingEvent(
244
+ id=UUID("123e4567-e89b-12d3-a456-426614174000"),
245
+ timestamp=datetime(2024, 1, 1, 12, 0, 0),
246
+ trace_id=UUID("123e4567-e89b-12d3-a456-426614174000"),
247
+ span_id=UUID("123e4567-e89b-12d3-a456-426614174000"),
248
+ body=NodeExecutionStreamingBody(
249
+ node_definition=MockNode,
250
+ output=BaseOutput(
251
+ name="example",
252
+ value="foo",
253
+ ),
254
+ ),
255
+ ),
256
+ {
257
+ "id": "123e4567-e89b-12d3-a456-426614174000",
258
+ "api_version": "2024-10-25",
259
+ "timestamp": "2024-01-01T12:00:00",
260
+ "trace_id": "123e4567-e89b-12d3-a456-426614174000",
261
+ "span_id": "123e4567-e89b-12d3-a456-426614174000",
262
+ "name": "node.execution.streaming",
263
+ "body": {
264
+ "node_definition": {
265
+ "name": "MockNode",
266
+ "module": module_root + ["events", "tests", "test_event"],
267
+ },
268
+ "output": {
269
+ "name": "example",
270
+ "value": "foo",
271
+ },
272
+ },
273
+ "parent": None,
274
+ },
275
+ ),
276
+ (
277
+ NodeExecutionFulfilledEvent(
278
+ id=UUID("123e4567-e89b-12d3-a456-426614174000"),
279
+ timestamp=datetime(2024, 1, 1, 12, 0, 0),
280
+ trace_id=UUID("123e4567-e89b-12d3-a456-426614174000"),
281
+ span_id=UUID("123e4567-e89b-12d3-a456-426614174000"),
282
+ body=NodeExecutionFulfilledBody(
283
+ node_definition=MockNode,
284
+ outputs=MockNode.Outputs(
285
+ example="foo",
286
+ ),
287
+ invoked_ports={MockNode.Ports.default},
288
+ ),
289
+ ),
290
+ {
291
+ "id": "123e4567-e89b-12d3-a456-426614174000",
292
+ "api_version": "2024-10-25",
293
+ "timestamp": "2024-01-01T12:00:00",
294
+ "trace_id": "123e4567-e89b-12d3-a456-426614174000",
295
+ "span_id": "123e4567-e89b-12d3-a456-426614174000",
296
+ "name": "node.execution.fulfilled",
297
+ "body": {
298
+ "node_definition": {
299
+ "name": "MockNode",
300
+ "module": module_root + ["events", "tests", "test_event"],
301
+ },
302
+ "outputs": {
303
+ "example": "foo",
304
+ },
305
+ "invoked_ports": [
306
+ {
307
+ "name": "default",
308
+ }
309
+ ],
310
+ },
311
+ "parent": None,
312
+ },
313
+ ),
236
314
  ],
237
315
  ids=[
238
316
  "workflow.execution.initiated",
@@ -240,7 +318,9 @@ module_root = name_parts[: name_parts.index("events")]
240
318
  "workflow.execution.streaming",
241
319
  "workflow.execution.fulfilled",
242
320
  "workflow.execution.rejected",
321
+ "node.execution.streaming",
322
+ "node.execution.fulfilled",
243
323
  ],
244
324
  )
245
325
  def test_event_serialization(event, expected_json):
246
- assert not DeepDiff(json.loads(event.model_dump_json()), expected_json)
326
+ assert not DeepDiff(event.model_dump(mode="json"), expected_json)
@@ -1,4 +1,4 @@
1
- from typing import Iterable
1
+ from typing import Set
2
2
 
3
3
  from vellum.workflows.nodes.bases import BaseNode
4
4
  from vellum.workflows.outputs.base import BaseOutputs
@@ -15,7 +15,7 @@ class ConditionalNode(BaseNode):
15
15
  """
16
16
 
17
17
  class Ports(NodePorts):
18
- def __call__(self, outputs: BaseOutputs, state: BaseState) -> Iterable[Port]:
18
+ def __call__(self, outputs: BaseOutputs, state: BaseState) -> Set[Port]:
19
19
  all_ports = [port for port in self.__class__]
20
20
  enforce_single_invoked_port = validate_ports(all_ports)
21
21
 
@@ -33,7 +33,7 @@ class _NodePortsMeta(type):
33
33
 
34
34
 
35
35
  class NodePorts(metaclass=_NodePortsMeta):
36
- def __call__(self, outputs: BaseOutputs, state: BaseState) -> Iterable[Port]:
36
+ def __call__(self, outputs: BaseOutputs, state: BaseState) -> Set[Port]:
37
37
  """
38
38
  Invokes the appropriate ports based on the fulfilled outputs and state.
39
39
  """
@@ -67,7 +67,7 @@ class NodePorts(metaclass=_NodePortsMeta):
67
67
 
68
68
  return invoked_ports
69
69
 
70
- def __lt__(self, output: BaseOutput) -> Iterable[Port]:
70
+ def __lt__(self, output: BaseOutput) -> Set[Port]:
71
71
  """
72
72
  Invokes the appropriate ports based on the streamed output
73
73
  """
@@ -1,5 +1,8 @@
1
1
  from typing import TYPE_CHECKING, Any, Iterator, List, Optional, Type
2
2
 
3
+ from pydantic import GetCoreSchemaHandler
4
+ from pydantic_core import core_schema
5
+
3
6
  from vellum.workflows.descriptors.base import BaseDescriptor
4
7
  from vellum.workflows.edges.edge import Edge
5
8
  from vellum.workflows.graph import Graph, GraphTarget
@@ -73,3 +76,14 @@ class Port:
73
76
 
74
77
  value = self._condition.resolve(state)
75
78
  return bool(value)
79
+
80
+ def serialize(self) -> dict:
81
+ return {
82
+ "name": self.name,
83
+ }
84
+
85
+ @classmethod
86
+ def __get_pydantic_core_schema__(
87
+ cls, source_type: Type[Any], handler: GetCoreSchemaHandler
88
+ ) -> core_schema.CoreSchema:
89
+ return core_schema.is_instance_schema(cls)
@@ -4,6 +4,7 @@ from .lazy import LazyReference
4
4
  from .node import NodeReference
5
5
  from .output import OutputReference
6
6
  from .state_value import StateValueReference
7
+ from .vellum_secret import VellumSecretReference
7
8
  from .workflow_input import WorkflowInputReference
8
9
 
9
10
  __all__ = [
@@ -13,5 +14,6 @@ __all__ = [
13
14
  "NodeReference",
14
15
  "OutputReference",
15
16
  "StateValueReference",
17
+ "VellumSecretReference",
16
18
  "WorkflowInputReference",
17
19
  ]
@@ -188,9 +188,9 @@ class WorkflowRunner(Generic[StateType]):
188
188
  body=NodeExecutionStreamingBody(
189
189
  node_definition=node.__class__,
190
190
  output=BaseOutput(name=output.name),
191
+ invoked_ports=invoked_ports,
191
192
  ),
192
193
  ),
193
- invoked_ports=invoked_ports,
194
194
  )
195
195
  )
196
196
 
@@ -212,9 +212,9 @@ class WorkflowRunner(Generic[StateType]):
212
212
  body=NodeExecutionStreamingBody(
213
213
  node_definition=node.__class__,
214
214
  output=output,
215
+ invoked_ports=invoked_ports,
215
216
  ),
216
217
  ),
217
- invoked_ports=invoked_ports,
218
218
  )
219
219
  )
220
220
  elif output.is_fulfilled:
@@ -231,9 +231,9 @@ class WorkflowRunner(Generic[StateType]):
231
231
  body=NodeExecutionStreamingBody(
232
232
  node_definition=node.__class__,
233
233
  output=output,
234
+ invoked_ports=invoked_ports,
234
235
  ),
235
236
  ),
236
- invoked_ports=invoked_ports,
237
237
  )
238
238
  )
239
239
 
@@ -257,9 +257,9 @@ class WorkflowRunner(Generic[StateType]):
257
257
  body=NodeExecutionFulfilledBody(
258
258
  node_definition=node.__class__,
259
259
  outputs=outputs,
260
+ invoked_ports=invoked_ports,
260
261
  ),
261
262
  ),
262
- invoked_ports=invoked_ports,
263
263
  )
264
264
  )
265
265
  except NodeException as e:
@@ -339,7 +339,6 @@ class WorkflowRunner(Generic[StateType]):
339
339
  def _handle_work_item_event(self, work_item_event: WorkItemEvent[StateType]) -> Optional[VellumError]:
340
340
  node = work_item_event.node
341
341
  event = work_item_event.event
342
- invoked_ports = work_item_event.invoked_ports
343
342
 
344
343
  if event.name == "node.execution.initiated":
345
344
  return None
@@ -368,13 +367,13 @@ class WorkflowRunner(Generic[StateType]):
368
367
  )
369
368
  )
370
369
 
371
- self._handle_invoked_ports(node.state, invoked_ports)
370
+ self._handle_invoked_ports(node.state, event.invoked_ports)
372
371
 
373
372
  return None
374
373
 
375
374
  if event.name == "node.execution.fulfilled":
376
375
  self._active_nodes_by_execution_id.pop(event.span_id)
377
- self._handle_invoked_ports(node.state, invoked_ports)
376
+ self._handle_invoked_ports(node.state, event.invoked_ports)
378
377
 
379
378
  return None
380
379
 
@@ -1,18 +1,16 @@
1
1
  """Only intenral types and enums for WorkflowRunner should be defined in this module."""
2
2
 
3
3
  from dataclasses import dataclass
4
- from typing import TYPE_CHECKING, Generic, Iterable, Optional
4
+ from typing import TYPE_CHECKING, Generic
5
5
 
6
6
  from vellum.workflows.types.generics import StateType
7
7
 
8
8
  if TYPE_CHECKING:
9
9
  from vellum.workflows.events import NodeEvent
10
10
  from vellum.workflows.nodes.bases import BaseNode
11
- from vellum.workflows.ports import Port
12
11
 
13
12
 
14
13
  @dataclass(frozen=True)
15
14
  class WorkItemEvent(Generic[StateType]):
16
15
  node: "BaseNode[StateType]"
17
16
  event: "NodeEvent"
18
- invoked_ports: Optional[Iterable["Port"]] = None
@@ -9,6 +9,7 @@ from pydantic import BaseModel
9
9
 
10
10
  from vellum.workflows.inputs.base import BaseInputs
11
11
  from vellum.workflows.outputs.base import BaseOutput, BaseOutputs
12
+ from vellum.workflows.ports.port import Port
12
13
  from vellum.workflows.state.base import BaseState, NodeExecutionCache
13
14
 
14
15
 
@@ -22,7 +23,7 @@ class DefaultStateEncoder(JSONEncoder):
22
23
  if isinstance(obj, (BaseInputs, BaseOutputs)):
23
24
  return {descriptor.name: value for descriptor, value in obj}
24
25
 
25
- if isinstance(obj, BaseOutput):
26
+ if isinstance(obj, (BaseOutput, Port)):
26
27
  return obj.serialize()
27
28
 
28
29
  if isinstance(obj, NodeExecutionCache):
@@ -1,5 +1,5 @@
1
1
  import pytest
2
- from typing import ClassVar, Generic, List, TypeVar, Union
2
+ from typing import Any, ClassVar, Generic, List, TypeVar, Union
3
3
 
4
4
  from vellum.workflows.nodes.bases.base import BaseNode
5
5
  from vellum.workflows.nodes.core.try_node.node import TryNode
@@ -20,6 +20,7 @@ class ExampleClass:
20
20
  )
21
21
  zeta: ClassVar[str]
22
22
  eta: List[str]
23
+ kappa: Any
23
24
 
24
25
 
25
26
  T = TypeVar("T")
@@ -53,6 +54,7 @@ class ExampleNode(BaseNode):
53
54
  (ExampleInheritedClass, "alpha", (str,)),
54
55
  (ExampleInheritedClass, "beta", (int,)),
55
56
  (ExampleNode.Outputs, "iota", (str,)),
57
+ (ExampleClass, "kappa", (Any,)),
56
58
  ],
57
59
  ids=[
58
60
  "str",
@@ -67,6 +69,7 @@ class ExampleNode(BaseNode):
67
69
  "inherited_parent_annotation",
68
70
  "inherited_parent_class_var",
69
71
  "try_node_output",
72
+ "any",
70
73
  ],
71
74
  )
72
75
  def test_infer_types(cls, attr_name, expected_type):
@@ -76,9 +79,9 @@ def test_infer_types(cls, attr_name, expected_type):
76
79
  @pytest.mark.parametrize(
77
80
  "cls, expected_attr_names",
78
81
  [
79
- (ExampleClass, {"alpha", "beta", "gamma", "epsilon", "zeta", "eta"}),
82
+ (ExampleClass, {"alpha", "beta", "gamma", "epsilon", "zeta", "eta", "kappa"}),
80
83
  (ExampleGenericClass, {"delta"}),
81
- (ExampleInheritedClass, {"alpha", "beta", "gamma", "epsilon", "zeta", "eta", "theta"}),
84
+ (ExampleInheritedClass, {"alpha", "beta", "gamma", "epsilon", "zeta", "eta", "theta", "kappa"}),
82
85
  ],
83
86
  )
84
87
  def test_class_attr_names(cls, expected_attr_names):
@@ -13,6 +13,7 @@ from typing import (
13
13
  Type,
14
14
  TypeVar,
15
15
  Union,
16
+ cast,
16
17
  get_args,
17
18
  get_origin,
18
19
  get_type_hints,
@@ -81,6 +82,8 @@ def infer_types(object_: Type, attr_name: str, localns: Optional[Dict[str, Any]]
81
82
  if type_hint in type_var_mapping:
82
83
  return (type_var_mapping[type_hint],)
83
84
  return type_hint.__constraints__
85
+ if type_hint is Any:
86
+ return cast(Tuple[Type[Any], ...], (Any,))
84
87
 
85
88
  for base in reversed(class_.__mro__):
86
89
  class_attributes = vars(base)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: vellum-ai
3
- Version: 0.10.6
3
+ Version: 0.10.7
4
4
  Summary:
5
5
  License: MIT
6
6
  Requires-Python: >=3.9,<4.0