vellum-ai 0.14.41__py3-none-any.whl → 0.14.42__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (58) hide show
  1. vellum/__init__.py +2 -4
  2. vellum/client/core/client_wrapper.py +1 -1
  3. vellum/client/reference.md +3 -3
  4. vellum/client/resources/documents/client.py +0 -6
  5. vellum/client/types/__init__.py +2 -4
  6. vellum/client/types/execute_api_response.py +3 -4
  7. vellum/client/types/execute_api_response_json.py +7 -0
  8. vellum/client/types/prompt_settings.py +1 -0
  9. vellum/client/types/workflow_event_execution_read.py +0 -4
  10. vellum/client/types/workflow_execution_initiated_body.py +0 -9
  11. vellum/client/types/workflow_execution_initiated_event.py +0 -4
  12. vellum/client/types/workflow_execution_span.py +0 -4
  13. vellum/types/{node_event_display_context.py → execute_api_response_json.py} +1 -1
  14. vellum/workflows/inputs/base.py +26 -3
  15. vellum/workflows/inputs/tests/test_inputs.py +15 -0
  16. vellum/workflows/nodes/bases/base_adornment_node.py +9 -0
  17. vellum/workflows/nodes/core/map_node/node.py +3 -2
  18. vellum/workflows/nodes/core/map_node/tests/test_node.py +56 -0
  19. vellum/workflows/nodes/core/retry_node/node.py +2 -1
  20. vellum/workflows/nodes/utils.py +14 -1
  21. vellum/workflows/references/workflow_input.py +5 -1
  22. vellum/workflows/runner/runner.py +2 -0
  23. vellum/workflows/workflows/base.py +5 -0
  24. {vellum_ai-0.14.41.dist-info → vellum_ai-0.14.42.dist-info}/METADATA +1 -1
  25. {vellum_ai-0.14.41.dist-info → vellum_ai-0.14.42.dist-info}/RECORD +55 -57
  26. vellum_ee/workflows/display/nodes/base_node_display.py +32 -23
  27. vellum_ee/workflows/display/nodes/vellum/api_node.py +1 -0
  28. vellum_ee/workflows/display/nodes/vellum/code_execution_node.py +1 -0
  29. vellum_ee/workflows/display/nodes/vellum/conditional_node.py +1 -0
  30. vellum_ee/workflows/display/nodes/vellum/final_output_node.py +6 -6
  31. vellum_ee/workflows/display/nodes/vellum/guardrail_node.py +1 -0
  32. vellum_ee/workflows/display/nodes/vellum/inline_prompt_node.py +1 -0
  33. vellum_ee/workflows/display/nodes/vellum/inline_subworkflow_node.py +1 -0
  34. vellum_ee/workflows/display/nodes/vellum/map_node.py +15 -12
  35. vellum_ee/workflows/display/nodes/vellum/merge_node.py +1 -0
  36. vellum_ee/workflows/display/nodes/vellum/prompt_deployment_node.py +1 -0
  37. vellum_ee/workflows/display/nodes/vellum/search_node.py +1 -0
  38. vellum_ee/workflows/display/nodes/vellum/subworkflow_deployment_node.py +1 -0
  39. vellum_ee/workflows/display/nodes/vellum/templating_node.py +1 -0
  40. vellum_ee/workflows/display/tests/workflow_serialization/test_basic_api_node_serialization.py +1 -0
  41. vellum_ee/workflows/display/tests/workflow_serialization/test_basic_code_execution_node_serialization.py +3 -0
  42. vellum_ee/workflows/display/tests/workflow_serialization/test_basic_conditional_node_serialization.py +138 -0
  43. vellum_ee/workflows/display/tests/workflow_serialization/test_basic_guardrail_node_serialization.py +1 -0
  44. vellum_ee/workflows/display/tests/workflow_serialization/test_basic_inline_subworkflow_serialization.py +1 -0
  45. vellum_ee/workflows/display/tests/workflow_serialization/test_basic_map_node_serialization.py +3 -2
  46. vellum_ee/workflows/display/tests/workflow_serialization/test_basic_merge_node_serialization.py +1 -0
  47. vellum_ee/workflows/display/tests/workflow_serialization/test_basic_prompt_deployment_serialization.py +1 -0
  48. vellum_ee/workflows/display/tests/workflow_serialization/test_basic_search_node_serialization.py +1 -0
  49. vellum_ee/workflows/display/tests/workflow_serialization/test_basic_subworkflow_deployment_serialization.py +1 -0
  50. vellum_ee/workflows/display/tests/workflow_serialization/test_basic_templating_node_serialization.py +1 -0
  51. vellum_ee/workflows/display/tests/workflow_serialization/test_basic_terminal_node_serialization.py +2 -2
  52. vellum_ee/workflows/display/tests/workflow_serialization/test_complex_terminal_node_serialization.py +2 -2
  53. vellum/client/types/node_event_display_context.py +0 -30
  54. vellum/client/types/workflow_event_display_context.py +0 -28
  55. vellum/types/workflow_event_display_context.py +0 -3
  56. {vellum_ai-0.14.41.dist-info → vellum_ai-0.14.42.dist-info}/LICENSE +0 -0
  57. {vellum_ai-0.14.41.dist-info → vellum_ai-0.14.42.dist-info}/WHEEL +0 -0
  58. {vellum_ai-0.14.41.dist-info → vellum_ai-0.14.42.dist-info}/entry_points.txt +0 -0
vellum/__init__.py CHANGED
@@ -105,6 +105,7 @@ from .types import (
105
105
  ExecuteApiRequestBody,
106
106
  ExecuteApiRequestHeadersValue,
107
107
  ExecuteApiResponse,
108
+ ExecuteApiResponseJson,
108
109
  ExecutePromptEvent,
109
110
  ExecutePromptResponse,
110
111
  ExecuteWorkflowResponse,
@@ -229,7 +230,6 @@ from .types import (
229
230
  NamedTestCaseVariableValue,
230
231
  NamedTestCaseVariableValueRequest,
231
232
  NewMemberJoinBehaviorEnum,
232
- NodeEventDisplayContext,
233
233
  NodeExecutionFulfilledBody,
234
234
  NodeExecutionFulfilledEvent,
235
235
  NodeExecutionInitiatedBody,
@@ -518,7 +518,6 @@ from .types import (
518
518
  WorkflowDeploymentReleaseWorkflowDeployment,
519
519
  WorkflowDeploymentReleaseWorkflowVersion,
520
520
  WorkflowError,
521
- WorkflowEventDisplayContext,
522
521
  WorkflowEventError,
523
522
  WorkflowEventExecutionRead,
524
523
  WorkflowExecutionActual,
@@ -728,6 +727,7 @@ __all__ = [
728
727
  "ExecuteApiRequestBody",
729
728
  "ExecuteApiRequestHeadersValue",
730
729
  "ExecuteApiResponse",
730
+ "ExecuteApiResponseJson",
731
731
  "ExecutePromptEvent",
732
732
  "ExecutePromptResponse",
733
733
  "ExecuteWorkflowResponse",
@@ -858,7 +858,6 @@ __all__ = [
858
858
  "NamedTestCaseVariableValue",
859
859
  "NamedTestCaseVariableValueRequest",
860
860
  "NewMemberJoinBehaviorEnum",
861
- "NodeEventDisplayContext",
862
861
  "NodeExecutionFulfilledBody",
863
862
  "NodeExecutionFulfilledEvent",
864
863
  "NodeExecutionInitiatedBody",
@@ -1151,7 +1150,6 @@ __all__ = [
1151
1150
  "WorkflowDeploymentReleaseWorkflowVersion",
1152
1151
  "WorkflowDeploymentsListRequestStatus",
1153
1152
  "WorkflowError",
1154
- "WorkflowEventDisplayContext",
1155
1153
  "WorkflowEventError",
1156
1154
  "WorkflowEventExecutionRead",
1157
1155
  "WorkflowExecutionActual",
@@ -18,7 +18,7 @@ class BaseClientWrapper:
18
18
  headers: typing.Dict[str, str] = {
19
19
  "X-Fern-Language": "Python",
20
20
  "X-Fern-SDK-Name": "vellum-ai",
21
- "X-Fern-SDK-Version": "0.14.41",
21
+ "X-Fern-SDK-Version": "0.14.42",
22
22
  }
23
23
  headers["X_API_KEY"] = self.api_key
24
24
  return headers
@@ -3408,7 +3408,7 @@ client.documents.retrieve(
3408
3408
  <dl>
3409
3409
  <dd>
3410
3410
 
3411
- **id:** `str` — A UUID string identifying this document.
3411
+ **id:** `str`
3412
3412
 
3413
3413
  </dd>
3414
3414
  </dl>
@@ -3478,7 +3478,7 @@ client.documents.destroy(
3478
3478
  <dl>
3479
3479
  <dd>
3480
3480
 
3481
- **id:** `str` — A UUID string identifying this document.
3481
+ **id:** `str`
3482
3482
 
3483
3483
  </dd>
3484
3484
  </dl>
@@ -3548,7 +3548,7 @@ client.documents.partial_update(
3548
3548
  <dl>
3549
3549
  <dd>
3550
3550
 
3551
- **id:** `str` — A UUID string identifying this document.
3551
+ **id:** `str`
3552
3552
 
3553
3553
  </dd>
3554
3554
  </dl>
@@ -106,7 +106,6 @@ class DocumentsClient:
106
106
  Parameters
107
107
  ----------
108
108
  id : str
109
- A UUID string identifying this document.
110
109
 
111
110
  request_options : typing.Optional[RequestOptions]
112
111
  Request-specific configuration.
@@ -154,7 +153,6 @@ class DocumentsClient:
154
153
  Parameters
155
154
  ----------
156
155
  id : str
157
- A UUID string identifying this document.
158
156
 
159
157
  request_options : typing.Optional[RequestOptions]
160
158
  Request-specific configuration.
@@ -203,7 +201,6 @@ class DocumentsClient:
203
201
  Parameters
204
202
  ----------
205
203
  id : str
206
- A UUID string identifying this document.
207
204
 
208
205
  label : typing.Optional[str]
209
206
  A human-readable label for the document. Defaults to the originally uploaded file's file name.
@@ -471,7 +468,6 @@ class AsyncDocumentsClient:
471
468
  Parameters
472
469
  ----------
473
470
  id : str
474
- A UUID string identifying this document.
475
471
 
476
472
  request_options : typing.Optional[RequestOptions]
477
473
  Request-specific configuration.
@@ -527,7 +523,6 @@ class AsyncDocumentsClient:
527
523
  Parameters
528
524
  ----------
529
525
  id : str
530
- A UUID string identifying this document.
531
526
 
532
527
  request_options : typing.Optional[RequestOptions]
533
528
  Request-specific configuration.
@@ -584,7 +579,6 @@ class AsyncDocumentsClient:
584
579
  Parameters
585
580
  ----------
586
581
  id : str
587
- A UUID string identifying this document.
588
582
 
589
583
  label : typing.Optional[str]
590
584
  A human-readable label for the document. Defaults to the originally uploaded file's file name.
@@ -109,6 +109,7 @@ from .execute_api_request_bearer_token import ExecuteApiRequestBearerToken
109
109
  from .execute_api_request_body import ExecuteApiRequestBody
110
110
  from .execute_api_request_headers_value import ExecuteApiRequestHeadersValue
111
111
  from .execute_api_response import ExecuteApiResponse
112
+ from .execute_api_response_json import ExecuteApiResponseJson
112
113
  from .execute_prompt_event import ExecutePromptEvent
113
114
  from .execute_prompt_response import ExecutePromptResponse
114
115
  from .execute_workflow_response import ExecuteWorkflowResponse
@@ -237,7 +238,6 @@ from .named_test_case_string_variable_value_request import NamedTestCaseStringVa
237
238
  from .named_test_case_variable_value import NamedTestCaseVariableValue
238
239
  from .named_test_case_variable_value_request import NamedTestCaseVariableValueRequest
239
240
  from .new_member_join_behavior_enum import NewMemberJoinBehaviorEnum
240
- from .node_event_display_context import NodeEventDisplayContext
241
241
  from .node_execution_fulfilled_body import NodeExecutionFulfilledBody
242
242
  from .node_execution_fulfilled_event import NodeExecutionFulfilledEvent
243
243
  from .node_execution_initiated_body import NodeExecutionInitiatedBody
@@ -542,7 +542,6 @@ from .workflow_deployment_release import WorkflowDeploymentRelease
542
542
  from .workflow_deployment_release_workflow_deployment import WorkflowDeploymentReleaseWorkflowDeployment
543
543
  from .workflow_deployment_release_workflow_version import WorkflowDeploymentReleaseWorkflowVersion
544
544
  from .workflow_error import WorkflowError
545
- from .workflow_event_display_context import WorkflowEventDisplayContext
546
545
  from .workflow_event_error import WorkflowEventError
547
546
  from .workflow_event_execution_read import WorkflowEventExecutionRead
548
547
  from .workflow_execution_actual import WorkflowExecutionActual
@@ -715,6 +714,7 @@ __all__ = [
715
714
  "ExecuteApiRequestBody",
716
715
  "ExecuteApiRequestHeadersValue",
717
716
  "ExecuteApiResponse",
717
+ "ExecuteApiResponseJson",
718
718
  "ExecutePromptEvent",
719
719
  "ExecutePromptResponse",
720
720
  "ExecuteWorkflowResponse",
@@ -839,7 +839,6 @@ __all__ = [
839
839
  "NamedTestCaseVariableValue",
840
840
  "NamedTestCaseVariableValueRequest",
841
841
  "NewMemberJoinBehaviorEnum",
842
- "NodeEventDisplayContext",
843
842
  "NodeExecutionFulfilledBody",
844
843
  "NodeExecutionFulfilledEvent",
845
844
  "NodeExecutionInitiatedBody",
@@ -1128,7 +1127,6 @@ __all__ = [
1128
1127
  "WorkflowDeploymentReleaseWorkflowDeployment",
1129
1128
  "WorkflowDeploymentReleaseWorkflowVersion",
1130
1129
  "WorkflowError",
1131
- "WorkflowEventDisplayContext",
1132
1130
  "WorkflowEventError",
1133
1131
  "WorkflowEventExecutionRead",
1134
1132
  "WorkflowExecutionActual",
@@ -2,8 +2,9 @@
2
2
 
3
3
  from ..core.pydantic_utilities import UniversalBaseModel
4
4
  import typing_extensions
5
- import typing
5
+ from .execute_api_response_json import ExecuteApiResponseJson
6
6
  from ..core.serialization import FieldMetadata
7
+ import typing
7
8
  from ..core.pydantic_utilities import IS_PYDANTIC_V2
8
9
  import pydantic
9
10
 
@@ -11,9 +12,7 @@ import pydantic
11
12
  class ExecuteApiResponse(UniversalBaseModel):
12
13
  status_code: int
13
14
  text: str
14
- json_: typing_extensions.Annotated[
15
- typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]], FieldMetadata(alias="json")
16
- ] = None
15
+ json_: typing_extensions.Annotated[ExecuteApiResponseJson, FieldMetadata(alias="json")]
17
16
  headers: typing.Dict[str, str]
18
17
 
19
18
  if IS_PYDANTIC_V2:
@@ -0,0 +1,7 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import typing
4
+
5
+ ExecuteApiResponseJson = typing.Union[
6
+ typing.Dict[str, typing.Optional[typing.Any]], typing.List[typing.Optional[typing.Any]]
7
+ ]
@@ -8,6 +8,7 @@ import pydantic
8
8
 
9
9
  class PromptSettings(UniversalBaseModel):
10
10
  timeout: typing.Optional[float] = None
11
+ stream_enabled: typing.Optional[bool] = None
11
12
 
12
13
  if IS_PYDANTIC_V2:
13
14
  model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
@@ -10,8 +10,6 @@ from .workflow_deployment_parent_context import WorkflowDeploymentParentContext
10
10
  from .workflow_parent_context import WorkflowParentContext
11
11
  from .workflow_sandbox_parent_context import WorkflowSandboxParentContext
12
12
  from .array_vellum_value import ArrayVellumValue
13
- from .node_event_display_context import NodeEventDisplayContext
14
- from .workflow_event_display_context import WorkflowEventDisplayContext
15
13
  import typing
16
14
  import datetime as dt
17
15
  from .execution_vellum_value import ExecutionVellumValue
@@ -56,5 +54,3 @@ update_forward_refs(WorkflowDeploymentParentContext, WorkflowEventExecutionRead=
56
54
  update_forward_refs(WorkflowParentContext, WorkflowEventExecutionRead=WorkflowEventExecutionRead)
57
55
  update_forward_refs(WorkflowSandboxParentContext, WorkflowEventExecutionRead=WorkflowEventExecutionRead)
58
56
  update_forward_refs(ArrayVellumValue, WorkflowEventExecutionRead=WorkflowEventExecutionRead)
59
- update_forward_refs(NodeEventDisplayContext, WorkflowEventExecutionRead=WorkflowEventExecutionRead)
60
- update_forward_refs(WorkflowEventDisplayContext, WorkflowEventExecutionRead=WorkflowEventExecutionRead)
@@ -1,19 +1,14 @@
1
1
  # This file was auto-generated by Fern from our API Definition.
2
2
 
3
- from __future__ import annotations
4
3
  from ..core.pydantic_utilities import UniversalBaseModel
5
- from .node_event_display_context import NodeEventDisplayContext
6
- from .workflow_event_display_context import WorkflowEventDisplayContext
7
4
  from .vellum_code_resource_definition import VellumCodeResourceDefinition
8
5
  import typing
9
6
  from ..core.pydantic_utilities import IS_PYDANTIC_V2
10
7
  import pydantic
11
- from ..core.pydantic_utilities import update_forward_refs
12
8
 
13
9
 
14
10
  class WorkflowExecutionInitiatedBody(UniversalBaseModel):
15
11
  workflow_definition: VellumCodeResourceDefinition
16
- display_context: typing.Optional[WorkflowEventDisplayContext] = None
17
12
  inputs: typing.Dict[str, typing.Optional[typing.Any]]
18
13
 
19
14
  if IS_PYDANTIC_V2:
@@ -24,7 +19,3 @@ class WorkflowExecutionInitiatedBody(UniversalBaseModel):
24
19
  frozen = True
25
20
  smart_union = True
26
21
  extra = pydantic.Extra.allow
27
-
28
-
29
- update_forward_refs(NodeEventDisplayContext, WorkflowExecutionInitiatedBody=WorkflowExecutionInitiatedBody)
30
- update_forward_refs(WorkflowEventDisplayContext, WorkflowExecutionInitiatedBody=WorkflowExecutionInitiatedBody)
@@ -9,8 +9,6 @@ from .span_link import SpanLink
9
9
  from .workflow_deployment_parent_context import WorkflowDeploymentParentContext
10
10
  from .workflow_parent_context import WorkflowParentContext
11
11
  from .workflow_sandbox_parent_context import WorkflowSandboxParentContext
12
- from .node_event_display_context import NodeEventDisplayContext
13
- from .workflow_event_display_context import WorkflowEventDisplayContext
14
12
  import typing
15
13
  from .parent_context import ParentContext
16
14
  from .workflow_execution_initiated_body import WorkflowExecutionInitiatedBody
@@ -49,5 +47,3 @@ update_forward_refs(SpanLink, WorkflowExecutionInitiatedEvent=WorkflowExecutionI
49
47
  update_forward_refs(WorkflowDeploymentParentContext, WorkflowExecutionInitiatedEvent=WorkflowExecutionInitiatedEvent)
50
48
  update_forward_refs(WorkflowParentContext, WorkflowExecutionInitiatedEvent=WorkflowExecutionInitiatedEvent)
51
49
  update_forward_refs(WorkflowSandboxParentContext, WorkflowExecutionInitiatedEvent=WorkflowExecutionInitiatedEvent)
52
- update_forward_refs(NodeEventDisplayContext, WorkflowExecutionInitiatedEvent=WorkflowExecutionInitiatedEvent)
53
- update_forward_refs(WorkflowEventDisplayContext, WorkflowExecutionInitiatedEvent=WorkflowExecutionInitiatedEvent)
@@ -3,12 +3,10 @@
3
3
  from __future__ import annotations
4
4
  from ..core.pydantic_utilities import UniversalBaseModel
5
5
  from .api_request_parent_context import ApiRequestParentContext
6
- from .node_event_display_context import NodeEventDisplayContext
7
6
  from .node_parent_context import NodeParentContext
8
7
  from .prompt_deployment_parent_context import PromptDeploymentParentContext
9
8
  from .span_link import SpanLink
10
9
  from .workflow_deployment_parent_context import WorkflowDeploymentParentContext
11
- from .workflow_event_display_context import WorkflowEventDisplayContext
12
10
  from .workflow_parent_context import WorkflowParentContext
13
11
  from .workflow_sandbox_parent_context import WorkflowSandboxParentContext
14
12
  import typing
@@ -40,11 +38,9 @@ class WorkflowExecutionSpan(UniversalBaseModel):
40
38
 
41
39
 
42
40
  update_forward_refs(ApiRequestParentContext, WorkflowExecutionSpan=WorkflowExecutionSpan)
43
- update_forward_refs(NodeEventDisplayContext, WorkflowExecutionSpan=WorkflowExecutionSpan)
44
41
  update_forward_refs(NodeParentContext, WorkflowExecutionSpan=WorkflowExecutionSpan)
45
42
  update_forward_refs(PromptDeploymentParentContext, WorkflowExecutionSpan=WorkflowExecutionSpan)
46
43
  update_forward_refs(SpanLink, WorkflowExecutionSpan=WorkflowExecutionSpan)
47
44
  update_forward_refs(WorkflowDeploymentParentContext, WorkflowExecutionSpan=WorkflowExecutionSpan)
48
- update_forward_refs(WorkflowEventDisplayContext, WorkflowExecutionSpan=WorkflowExecutionSpan)
49
45
  update_forward_refs(WorkflowParentContext, WorkflowExecutionSpan=WorkflowExecutionSpan)
50
46
  update_forward_refs(WorkflowSandboxParentContext, WorkflowExecutionSpan=WorkflowExecutionSpan)
@@ -1,3 +1,3 @@
1
1
  # WARNING: This file will be removed in a future release. Please import from "vellum.client" instead.
2
2
 
3
- from vellum.client.types.node_event_display_context import *
3
+ from vellum.client.types.execute_api_response_json import *
@@ -1,4 +1,4 @@
1
- from typing import Any, Iterator, Tuple, Type, Union, get_args, get_origin
1
+ from typing import Any, Dict, Iterator, Set, Tuple, Type, Union, get_args, get_origin
2
2
  from typing_extensions import dataclass_transform
3
3
 
4
4
  from pydantic import GetCoreSchemaHandler
@@ -14,11 +14,28 @@ from vellum.workflows.types.utils import get_class_attr_names, infer_types
14
14
 
15
15
  @dataclass_transform(kw_only_default=True)
16
16
  class _BaseInputsMeta(type):
17
+ def __new__(cls, name: str, bases: Tuple[Type, ...], dct: Dict[str, Any]) -> Any:
18
+ dct["__parent_class__"] = type(None)
19
+ return super().__new__(cls, name, bases, dct)
20
+
17
21
  def __getattribute__(cls, name: str) -> Any:
18
- if not name.startswith("_") and name in cls.__annotations__ and issubclass(cls, BaseInputs):
22
+ if name.startswith("_") or not issubclass(cls, BaseInputs):
23
+ return super().__getattribute__(name)
24
+
25
+ attr_names = get_class_attr_names(cls)
26
+ if name in attr_names:
27
+ # We first try to resolve the instance that this class attribute name is mapped to. If it's not found,
28
+ # we iterate through its inheritance hierarchy to find the first base class that has this attribute
29
+ # and use its mapping.
19
30
  instance = vars(cls).get(name, undefined)
20
- types = infer_types(cls, name)
31
+ if instance is undefined:
32
+ for base in cls.__mro__[1:]:
33
+ inherited_input_reference = getattr(base, name, undefined)
34
+ if isinstance(inherited_input_reference, (ExternalInputReference, WorkflowInputReference)):
35
+ instance = inherited_input_reference.instance
36
+ break
21
37
 
38
+ types = infer_types(cls, name)
22
39
  if getattr(cls, "__descriptor_class__", None) is ExternalInputReference:
23
40
  return ExternalInputReference(name=name, types=types, instance=instance, inputs_class=cls)
24
41
  else:
@@ -29,14 +46,20 @@ class _BaseInputsMeta(type):
29
46
  def __iter__(cls) -> Iterator[InputReference]:
30
47
  # We iterate through the inheritance hierarchy to find all the WorkflowInputReference attached to this
31
48
  # Inputs class. __mro__ is the method resolution order, which is the order in which base classes are resolved.
49
+ yielded_attr_names: Set[str] = set()
50
+
32
51
  for resolved_cls in cls.__mro__:
33
52
  attr_names = get_class_attr_names(resolved_cls)
34
53
  for attr_name in attr_names:
54
+ if attr_name in yielded_attr_names:
55
+ continue
56
+
35
57
  attr_value = getattr(resolved_cls, attr_name)
36
58
  if not isinstance(attr_value, (WorkflowInputReference, ExternalInputReference)):
37
59
  continue
38
60
 
39
61
  yield attr_value
62
+ yielded_attr_names.add(attr_name)
40
63
 
41
64
 
42
65
  class BaseInputs(metaclass=_BaseInputsMeta):
@@ -47,3 +47,18 @@ def test_base_inputs_with_default():
47
47
 
48
48
  # THEN it should use the default value
49
49
  assert inputs.string_with_default == "default_value"
50
+
51
+
52
+ def test_base_inputs__supports_inherited_inputs():
53
+ # GIVEN an inputs class
54
+ class TopInputs(BaseInputs):
55
+ first: str
56
+
57
+ # WHEN we inherit from the base inputs class
58
+ class BottomInputs(TopInputs):
59
+ second: int
60
+
61
+ # THEN both references should be available
62
+ assert BottomInputs.first.name == "first"
63
+ assert BottomInputs.second.name == "second"
64
+ assert len([ref for ref in BottomInputs]) == 2
@@ -1,5 +1,6 @@
1
1
  from typing import TYPE_CHECKING, Any, Dict, Generic, Optional, Tuple, Type
2
2
 
3
+ from vellum.workflows.inputs.base import BaseInputs
3
4
  from vellum.workflows.nodes.bases.base import BaseNode, BaseNodeMeta
4
5
  from vellum.workflows.outputs.base import BaseOutputs
5
6
  from vellum.workflows.references.output import OutputReference
@@ -13,6 +14,14 @@ class _BaseAdornmentNodeMeta(BaseNodeMeta):
13
14
  def __new__(cls, name: str, bases: Tuple[Type, ...], dct: Dict[str, Any]) -> Any:
14
15
  node_class = super().__new__(cls, name, bases, dct)
15
16
 
17
+ SubworkflowInputs = dct.get("SubworkflowInputs")
18
+ if (
19
+ isinstance(SubworkflowInputs, type)
20
+ and issubclass(SubworkflowInputs, BaseInputs)
21
+ and SubworkflowInputs.__parent_class__ is type(None)
22
+ ):
23
+ SubworkflowInputs.__parent_class__ = node_class
24
+
16
25
  subworkflow_attribute = dct.get("subworkflow")
17
26
  if not subworkflow_attribute:
18
27
  return node_class
@@ -62,7 +62,7 @@ class MapNode(BaseAdornmentNode[StateType], Generic[StateType, MapNodeItemType])
62
62
 
63
63
  item: MapNodeItemType # type: ignore[valid-type]
64
64
  index: int
65
- all_items: List[MapNodeItemType] # type: ignore[valid-type]
65
+ items: List[MapNodeItemType] # type: ignore[valid-type]
66
66
 
67
67
  def run(self) -> Iterator[BaseOutput]:
68
68
  mapped_items: Dict[str, List] = defaultdict(list)
@@ -176,8 +176,9 @@ class MapNode(BaseAdornmentNode[StateType], Generic[StateType, MapNodeItemType])
176
176
  parent_state=self.state,
177
177
  context=context,
178
178
  )
179
+ SubworkflowInputsClass = self.subworkflow.get_inputs_class()
179
180
  events = subworkflow.stream(
180
- inputs=self.SubworkflowInputs(index=index, item=item, all_items=self.items),
181
+ inputs=SubworkflowInputsClass(index=index, item=item, items=self.items),
181
182
  node_output_mocks=self._context._get_all_node_output_mocks(),
182
183
  event_filter=all_workflow_event_filter,
183
184
  )
@@ -116,3 +116,59 @@ def test_map_node__inner_try():
116
116
  # THEN the workflow should succeed
117
117
  assert outputs[-1].name == "final_output"
118
118
  assert len(outputs[-1].value) == 2
119
+
120
+
121
+ def test_map_node__nested_map_node():
122
+ # GIVEN the inner map node's inputs
123
+ class VegetableMapNodeInputs(MapNode.SubworkflowInputs):
124
+ item: str
125
+
126
+ # AND the outer map node's inputs
127
+ class FruitMapNodeInputs(MapNode.SubworkflowInputs):
128
+ item: str
129
+
130
+ # AND a simple node that concats both attributes
131
+ class SimpleConcatNode(BaseNode):
132
+ fruit = FruitMapNodeInputs.item
133
+ vegetable = VegetableMapNodeInputs.item
134
+
135
+ class Outputs(BaseNode.Outputs):
136
+ medley: str
137
+
138
+ def run(self) -> Outputs:
139
+ return self.Outputs(medley=f"{self.fruit} {self.vegetable}")
140
+
141
+ # AND a workflow using that node
142
+ class VegetableMapNodeWorkflow(BaseWorkflow[VegetableMapNodeInputs, BaseState]):
143
+ graph = SimpleConcatNode
144
+
145
+ class Outputs(BaseWorkflow.Outputs):
146
+ final_output = SimpleConcatNode.Outputs.medley
147
+
148
+ # AND an inner map node referencing that workflow
149
+ class VegetableMapNode(MapNode):
150
+ items = ["carrot", "potato"]
151
+ subworkflow = VegetableMapNodeWorkflow
152
+
153
+ # AND an outer subworkflow referencing the inner map node
154
+ class FruitMapNodeWorkflow(BaseWorkflow[FruitMapNodeInputs, BaseState]):
155
+ graph = VegetableMapNode
156
+
157
+ class Outputs(BaseWorkflow.Outputs):
158
+ final_output = VegetableMapNode.Outputs.final_output
159
+
160
+ # AND an outer map node referencing the outer subworkflow
161
+ class FruitMapNode(MapNode):
162
+ items = ["apple", "banana"]
163
+ subworkflow = FruitMapNodeWorkflow
164
+
165
+ # WHEN we run the workflow
166
+ stream = FruitMapNode().run()
167
+ outputs = list(stream)
168
+
169
+ # THEN the workflow should succeed
170
+ assert outputs[-1].name == "final_output"
171
+ assert outputs[-1].value == [
172
+ ["apple carrot", "apple potato"],
173
+ ["banana carrot", "banana potato"],
174
+ ]
@@ -47,8 +47,9 @@ class RetryNode(BaseAdornmentNode[StateType], Generic[StateType]):
47
47
  parent_state=self.state,
48
48
  context=WorkflowContext.create_from(self._context),
49
49
  )
50
+ inputs_class = subworkflow.get_inputs_class()
50
51
  subworkflow_stream = subworkflow.stream(
51
- inputs=self.SubworkflowInputs(attempt_number=attempt_number),
52
+ inputs=inputs_class(attempt_number=attempt_number),
52
53
  event_filter=all_workflow_event_filter,
53
54
  node_output_mocks=self._context._get_all_node_output_mocks(),
54
55
  )
@@ -9,9 +9,11 @@ from pydantic import BaseModel, create_model
9
9
  from vellum.client.types.function_call import FunctionCall
10
10
  from vellum.workflows.errors.types import WorkflowErrorCode
11
11
  from vellum.workflows.exceptions import NodeException
12
+ from vellum.workflows.inputs.base import BaseInputs
12
13
  from vellum.workflows.nodes import BaseNode
13
14
  from vellum.workflows.nodes.bases.base_adornment_node import BaseAdornmentNode
14
15
  from vellum.workflows.ports.port import Port
16
+ from vellum.workflows.state.base import BaseState
15
17
  from vellum.workflows.types.core import Json
16
18
  from vellum.workflows.types.generics import NodeType
17
19
 
@@ -54,7 +56,18 @@ def create_adornment(
54
56
  # https://app.shortcut.com/vellum/story/4116
55
57
  from vellum.workflows import BaseWorkflow
56
58
 
57
- class Subworkflow(BaseWorkflow):
59
+ SubworkflowInputs = getattr(adornable_cls, "SubworkflowInputs", None)
60
+ BaseSubworkflowInputs = (
61
+ SubworkflowInputs
62
+ if isinstance(SubworkflowInputs, type) and issubclass(SubworkflowInputs, BaseInputs)
63
+ else BaseInputs
64
+ )
65
+
66
+ # mypy is too conservative here - you can absolutely inherit from dynamic classes in python
67
+ class Inputs(BaseSubworkflowInputs): # type: ignore[misc, valid-type]
68
+ pass
69
+
70
+ class Subworkflow(BaseWorkflow[Inputs, BaseState]):
58
71
  graph = inner_cls
59
72
 
60
73
  class Outputs(BaseWorkflow.Outputs):
@@ -3,6 +3,7 @@ from typing import TYPE_CHECKING, Generic, Optional, Tuple, Type, TypeVar, cast
3
3
  from vellum.workflows.descriptors.base import BaseDescriptor
4
4
  from vellum.workflows.errors.types import WorkflowErrorCode
5
5
  from vellum.workflows.exceptions import NodeException
6
+ from vellum.workflows.types.generics import import_workflow_class
6
7
 
7
8
  if TYPE_CHECKING:
8
9
  from vellum.workflows.inputs.base import BaseInputs
@@ -29,7 +30,10 @@ class WorkflowInputReference(BaseDescriptor[_InputType], Generic[_InputType]):
29
30
  return self._inputs_class
30
31
 
31
32
  def resolve(self, state: "BaseState") -> _InputType:
32
- if hasattr(state.meta.workflow_inputs, self._name):
33
+ if hasattr(state.meta.workflow_inputs, self._name) and (
34
+ state.meta.workflow_definition == self._inputs_class.__parent_class__
35
+ or not issubclass(self._inputs_class.__parent_class__, import_workflow_class())
36
+ ):
33
37
  return cast(_InputType, getattr(state.meta.workflow_inputs, self._name))
34
38
 
35
39
  if state.meta.parent:
@@ -101,6 +101,7 @@ class WorkflowRunner(Generic[StateType]):
101
101
  if state:
102
102
  self._initial_state = deepcopy(state)
103
103
  self._initial_state.meta.span_id = uuid4()
104
+ self._initial_state.meta.workflow_definition = self.workflow.__class__
104
105
  else:
105
106
  self._initial_state = self.workflow.get_state_at_node(node)
106
107
  self._entrypoints = entrypoint_nodes
@@ -126,6 +127,7 @@ class WorkflowRunner(Generic[StateType]):
126
127
  self._initial_state = deepcopy(state)
127
128
  self._initial_state.meta.workflow_inputs = normalized_inputs
128
129
  self._initial_state.meta.span_id = uuid4()
130
+ self._initial_state.meta.workflow_definition = self.workflow.__class__
129
131
  else:
130
132
  self._initial_state = self.workflow.get_default_state(normalized_inputs)
131
133
  # We don't want to emit the initial state on the base case of Workflow Runs, since
@@ -133,6 +133,11 @@ class _BaseWorkflowMeta(type):
133
133
  cls = super().__new__(mcs, name, bases, dct)
134
134
  workflow_class = cast(Type["BaseWorkflow"], cls)
135
135
  workflow_class.__id__ = uuid4_from_hash(workflow_class.__qualname__)
136
+
137
+ inputs_class = workflow_class.get_inputs_class()
138
+ if inputs_class is not BaseInputs and inputs_class.__parent_class__ is type(None):
139
+ inputs_class.__parent_class__ = workflow_class
140
+
136
141
  return workflow_class
137
142
 
138
143
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: vellum-ai
3
- Version: 0.14.41
3
+ Version: 0.14.42
4
4
  Summary:
5
5
  License: MIT
6
6
  Requires-Python: >=3.9,<4.0