vellum-ai 0.13.23__py3-none-any.whl → 0.13.25__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -18,7 +18,7 @@ class BaseClientWrapper:
18
18
  headers: typing.Dict[str, str] = {
19
19
  "X-Fern-Language": "Python",
20
20
  "X-Fern-SDK-Name": "vellum-ai",
21
- "X-Fern-SDK-Version": "0.13.23",
21
+ "X-Fern-SDK-Version": "0.13.25",
22
22
  }
23
23
  headers["X_API_KEY"] = self.api_key
24
24
  return headers
@@ -11,7 +11,9 @@ import pydantic
11
11
  class ExecuteApiResponse(UniversalBaseModel):
12
12
  status_code: int
13
13
  text: str
14
- json_: typing_extensions.Annotated[typing.Dict[str, typing.Optional[typing.Any]], FieldMetadata(alias="json")]
14
+ json_: typing_extensions.Annotated[
15
+ typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]], FieldMetadata(alias="json")
16
+ ] = None
15
17
  headers: typing.Dict[str, str]
16
18
 
17
19
  if IS_PYDANTIC_V2:
@@ -170,11 +170,11 @@ class CodeExecutionNode(BaseNode[StateType], Generic[StateType, _OutputType], me
170
170
  value=cast(Dict[str, Any], input_value),
171
171
  )
172
172
  )
173
- elif isinstance(input_value, float):
173
+ elif isinstance(input_value, (float, int)):
174
174
  compiled_inputs.append(
175
175
  NumberInput(
176
176
  name=input_name,
177
- value=input_value,
177
+ value=float(input_value),
178
178
  )
179
179
  )
180
180
  elif isinstance(input_value, FunctionCall):
@@ -6,6 +6,7 @@ from vellum import CodeExecutorResponse, NumberVellumValue, StringInput
6
6
  from vellum.client.types.code_execution_package import CodeExecutionPackage
7
7
  from vellum.client.types.code_executor_secret_input import CodeExecutorSecretInput
8
8
  from vellum.client.types.function_call import FunctionCall
9
+ from vellum.client.types.number_input import NumberInput
9
10
  from vellum.workflows.exceptions import NodeException
10
11
  from vellum.workflows.inputs.base import BaseInputs
11
12
  from vellum.workflows.nodes.displayable.code_execution_node import CodeExecutionNode
@@ -13,7 +14,7 @@ from vellum.workflows.references.vellum_secret import VellumSecretReference
13
14
  from vellum.workflows.state.base import BaseState, StateMeta
14
15
 
15
16
 
16
- def test_run_workflow__happy_path(vellum_client):
17
+ def test_run_node__happy_path(vellum_client):
17
18
  """Confirm that CodeExecutionNodes output the expected text and results when run."""
18
19
 
19
20
  # GIVEN a node that subclasses CodeExecutionNode
@@ -79,7 +80,7 @@ def main(word: str) -> int:
79
80
  )
80
81
 
81
82
 
82
- def test_run_workflow__code_attribute(vellum_client):
83
+ def test_run_node__code_attribute(vellum_client):
83
84
  """Confirm that CodeExecutionNodes can use the `code` attribute to specify the code to execute."""
84
85
 
85
86
  # GIVEN a node that subclasses CodeExecutionNode
@@ -147,7 +148,7 @@ def main(word: str) -> int:
147
148
  )
148
149
 
149
150
 
150
- def test_run_workflow__code_and_filepath_defined(vellum_client):
151
+ def test_run_node__code_and_filepath_defined(vellum_client):
151
152
  """Confirm that CodeExecutionNodes raise an error if both `code` and `filepath` are defined."""
152
153
 
153
154
  # GIVEN a node that subclasses CodeExecutionNode
@@ -198,7 +199,7 @@ def main(word: str) -> int:
198
199
  assert exc_info.value.message == "Cannot specify both `code` and `filepath` for a CodeExecutionNode"
199
200
 
200
201
 
201
- def test_run_workflow__code_and_filepath_not_defined(vellum_client):
202
+ def test_run_node__code_and_filepath_not_defined(vellum_client):
202
203
  """Confirm that CodeExecutionNodes raise an error if neither `code` nor `filepath` are defined."""
203
204
 
204
205
  # GIVEN a node that subclasses CodeExecutionNode
@@ -241,7 +242,7 @@ def test_run_workflow__code_and_filepath_not_defined(vellum_client):
241
242
  assert exc_info.value.message == "Must specify either `code` or `filepath` for a CodeExecutionNode"
242
243
 
243
244
 
244
- def test_run_workflow__vellum_secret(vellum_client):
245
+ def test_run_node__vellum_secret(vellum_client):
245
246
  """Confirm that CodeExecutionNodes can use Vellum Secrets"""
246
247
 
247
248
  # GIVEN a node that subclasses CodeExecutionNode that references a Vellum Secret
@@ -303,7 +304,53 @@ def main(word: str) -> int:
303
304
  )
304
305
 
305
306
 
306
- def test_run_workflow__run_inline(vellum_client):
307
+ def test_run_node__int_input(vellum_client):
308
+ """Confirm that CodeExecutionNodes can use int's as inputs"""
309
+
310
+ # GIVEN a node that subclasses CodeExecutionNode that references an int
311
+ class State(BaseState):
312
+ pass
313
+
314
+ fixture = os.path.abspath(os.path.join(__file__, "../fixtures/main.py"))
315
+
316
+ class ExampleCodeExecutionNode(CodeExecutionNode[State, int]):
317
+ filepath = fixture
318
+ runtime = "PYTHON_3_11_6"
319
+ packages = [
320
+ CodeExecutionPackage(
321
+ name="openai",
322
+ version="1.0.0",
323
+ )
324
+ ]
325
+
326
+ code_inputs = {
327
+ "counter": 1,
328
+ }
329
+
330
+ # AND we know what the Code Execution Node will respond with
331
+ mock_code_execution = CodeExecutorResponse(
332
+ log="",
333
+ output=NumberVellumValue(value=0),
334
+ )
335
+ vellum_client.execute_code.return_value = mock_code_execution
336
+
337
+ # WHEN we run the node
338
+ node = ExampleCodeExecutionNode(state=State())
339
+ outputs = node.run()
340
+
341
+ # THEN the node should have produced the outputs we expect
342
+ assert outputs == {"result": 0, "log": ""}
343
+
344
+ # AND we should have invoked the Code with the correct inputs
345
+ assert vellum_client.execute_code.call_args_list[0].kwargs["input_values"] == [
346
+ NumberInput(
347
+ name="counter",
348
+ value=1.0,
349
+ )
350
+ ]
351
+
352
+
353
+ def test_run_node__run_inline(vellum_client):
307
354
  """Confirm that CodeExecutionNodes run the code inline instead of through Vellum under certain conditions."""
308
355
 
309
356
  # GIVEN a node that subclasses CodeExecutionNode
@@ -330,7 +377,7 @@ def main(word: str) -> int:
330
377
  vellum_client.execute_code.assert_not_called()
331
378
 
332
379
 
333
- def test_run_workflow__run_inline__incorrect_output_type():
380
+ def test_run_node__run_inline__incorrect_output_type():
334
381
  """Confirm that CodeExecutionNodes raise an error if the output type is incorrect during inline execution."""
335
382
 
336
383
  # GIVEN a node that subclasses CodeExecutionNode that returns a string but is defined to return an int
@@ -354,7 +401,7 @@ def main(word: str) -> int:
354
401
  assert exc_info.value.message == "Expected an output of type 'int', but received 'str'"
355
402
 
356
403
 
357
- def test_run_workflow__run_inline__valid_dict_to_pydantic():
404
+ def test_run_node__run_inline__valid_dict_to_pydantic():
358
405
  """Confirm that CodeExecutionNodes can convert a dict to a Pydantic model during inline execution."""
359
406
 
360
407
  # GIVEN a node that subclasses CodeExecutionNode that returns a dict matching a Pydantic model
@@ -380,7 +427,7 @@ def main(word: str) -> int:
380
427
  assert outputs == {"result": FunctionCall(name="hello", arguments={}), "log": ""}
381
428
 
382
429
 
383
- def test_run_workflow__run_inline__invalid_dict_to_pydantic():
430
+ def test_run_node__run_inline__invalid_dict_to_pydantic():
384
431
  """Confirm that CodeExecutionNodes raise an error if the Pydantic validation fails during inline execution."""
385
432
 
386
433
  # GIVEN a node that subclasses CodeExecutionNode that returns a dict not matching a Pydantic model
@@ -416,7 +463,7 @@ name
416
463
  )
417
464
 
418
465
 
419
- def test_run_workflow__run_inline__valid_dict_to_pydantic_any_type():
466
+ def test_run_node__run_inline__valid_dict_to_pydantic_any_type():
420
467
  """Confirm that CodeExecutionNodes can convert a dict to a Pydantic model during inline execution."""
421
468
 
422
469
  # GIVEN a node that subclasses CodeExecutionNode that returns a dict matching Any
@@ -1,7 +1,8 @@
1
1
  import ast
2
2
  import inspect
3
- from typing import TYPE_CHECKING, Callable, Generic, TypeVar, get_args
3
+ from typing import TYPE_CHECKING, Callable, Generic, TypeVar, Union, get_args
4
4
 
5
+ from vellum.workflows.constants import UNDEF
5
6
  from vellum.workflows.descriptors.base import BaseDescriptor
6
7
 
7
8
  if TYPE_CHECKING:
@@ -13,7 +14,7 @@ _T = TypeVar("_T")
13
14
  class LazyReference(BaseDescriptor[_T], Generic[_T]):
14
15
  def __init__(
15
16
  self,
16
- get: Callable[[], BaseDescriptor[_T]],
17
+ get: Union[Callable[[], BaseDescriptor[_T]], str],
17
18
  ) -> None:
18
19
  self._get = get
19
20
  # TODO: figure out this some times returns empty
@@ -25,6 +26,19 @@ class LazyReference(BaseDescriptor[_T], Generic[_T]):
25
26
  def resolve(self, state: "BaseState") -> _T:
26
27
  from vellum.workflows.descriptors.utils import resolve_value
27
28
 
29
+ if isinstance(self._get, str):
30
+ # The full solution will involve creating a nodes registry on `WorkflowContext`. I want to update
31
+ # how WorkflowContext works so that we could just access it directly instead of it needing to be
32
+ # passed in, similar to get_workflow_context(). Because we don't want this to slow down p1 issues
33
+ # that we are debugging with existing workflows, using the following workaround for now.
34
+ for output_reference, value in state.meta.node_outputs.items():
35
+ if str(output_reference) == self._get:
36
+ return value
37
+
38
+ # Fix typing surrounding the return value of node outputs/output descriptors
39
+ # https://app.shortcut.com/vellum/story/4783
40
+ return UNDEF # type: ignore[return-value]
41
+
28
42
  return resolve_value(self._get(), state)
29
43
 
30
44
  def _get_name(self) -> str:
@@ -33,6 +47,9 @@ class LazyReference(BaseDescriptor[_T], Generic[_T]):
33
47
  setting that as the descriptor's name. Names are only used for debugging, so
34
48
  we could flesh out edge cases over time.
35
49
  """
50
+ if isinstance(self._get, str):
51
+ return self._get
52
+
36
53
  source = inspect.getsource(self._get).strip()
37
54
  try:
38
55
  parsed = ast.parse(source)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: vellum-ai
3
- Version: 0.13.23
3
+ Version: 0.13.25
4
4
  Summary:
5
5
  License: MIT
6
6
  Requires-Python: >=3.9,<4.0
@@ -21,7 +21,7 @@ vellum_ee/workflows/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSu
21
21
  vellum_ee/workflows/display/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
22
22
  vellum_ee/workflows/display/base.py,sha256=3ZFUYRNKL24fBqXhKpa_Dq2W1a-a86J20dmJYA3H2eY,1755
23
23
  vellum_ee/workflows/display/nodes/__init__.py,sha256=5XOcZJXYUgaLS55QgRJzyQ_W1tpeprjnYAeYVezqoGw,160
24
- vellum_ee/workflows/display/nodes/base_node_display.py,sha256=ErIK_1DYax0LlFX4AvV1oua8I7JlpXNncjGNadVe-bo,15801
24
+ vellum_ee/workflows/display/nodes/base_node_display.py,sha256=7RuZkNJxbyOD2w3qPr-5S11mc-iN-NydyUT4FppS62o,16849
25
25
  vellum_ee/workflows/display/nodes/base_node_vellum_display.py,sha256=pLO0dORfRu--Ne9NgoyFT_CNjfpr5fGCsgbsMkUF5GM,2845
26
26
  vellum_ee/workflows/display/nodes/get_node_display_class.py,sha256=0S6ksPp53WXWh1RQVH71nj2bkCWBj4ZaFYhTxW3N2F4,1230
27
27
  vellum_ee/workflows/display/nodes/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -35,7 +35,7 @@ vellum_ee/workflows/display/nodes/vellum/conditional_node.py,sha256=ybLIa4uclqVI
35
35
  vellum_ee/workflows/display/nodes/vellum/error_node.py,sha256=I1Jkp2htRINJATtv1e-zs9BrReFX842djpiVgBPHDYg,2186
36
36
  vellum_ee/workflows/display/nodes/vellum/final_output_node.py,sha256=p-PvlnxpBQ7IKskZi2A19jKAtKnSxJ8LPbGMA83VkFk,2805
37
37
  vellum_ee/workflows/display/nodes/vellum/guardrail_node.py,sha256=aYZSJTxknU4LMiQdWk9LcK6CkhdozeDEMiRxfAyUNEc,2202
38
- vellum_ee/workflows/display/nodes/vellum/inline_prompt_node.py,sha256=h8K183cZsU2b86WYQlqFtwMkwIHydnNr4ZaaQcWnFWo,7316
38
+ vellum_ee/workflows/display/nodes/vellum/inline_prompt_node.py,sha256=7wVzx-NxWI9TnFDSgiBsaVj0l7G2v37NP4jxlTEsX_w,8375
39
39
  vellum_ee/workflows/display/nodes/vellum/inline_subworkflow_node.py,sha256=9858pFzuhkAAmLcw4gkt5mPvsqWk5vqcX_vwHOg2xZw,5610
40
40
  vellum_ee/workflows/display/nodes/vellum/map_node.py,sha256=VlO3UwkspCOdDQ-h3v8k16-7JZwWNSLpOLT4p-eirIs,3740
41
41
  vellum_ee/workflows/display/nodes/vellum/merge_node.py,sha256=HkNMgdQELiON42jdO-xDLmqrEKdGx1RVqrz2DXNTLS8,3239
@@ -57,7 +57,7 @@ vellum_ee/workflows/display/tests/workflow_serialization/__init__.py,sha256=47DE
57
57
  vellum_ee/workflows/display/tests/workflow_serialization/generic_nodes/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
58
58
  vellum_ee/workflows/display/tests/workflow_serialization/generic_nodes/conftest.py,sha256=EenmEdBtHUFQ0OS-kE7Vboax3JnDdj-K4Qixt5oR0Po,2253
59
59
  vellum_ee/workflows/display/tests/workflow_serialization/generic_nodes/test_adornments_serialization.py,sha256=KYdohS5pRgHM0DcUaK0tHa40f0gSvDKi2K5On0zNEU8,8305
60
- vellum_ee/workflows/display/tests/workflow_serialization/generic_nodes/test_attributes_serialization.py,sha256=yPXhdZxEDunNl5LL5Obb0jeO34djt7F-GiaTJhp_fmo,16742
60
+ vellum_ee/workflows/display/tests/workflow_serialization/generic_nodes/test_attributes_serialization.py,sha256=1cszL6N6FNGVm61MOa7AEiHnF0QjZWqDQuPOp4yiG94,18277
61
61
  vellum_ee/workflows/display/tests/workflow_serialization/generic_nodes/test_outputs_serialization.py,sha256=-12ZkZb3f5gyoNASV2yeQtMo5HmNsVEo8nXwL6IC-I8,6261
62
62
  vellum_ee/workflows/display/tests/workflow_serialization/generic_nodes/test_ports_serialization.py,sha256=6th6kCwzql6lddjkTQx4Jbvvs4ChqtJwctW-B4QuBhI,37352
63
63
  vellum_ee/workflows/display/tests/workflow_serialization/generic_nodes/test_trigger_serialization.py,sha256=EbVgg_3_ipTt3MOop4RARX0fmNjwqZtkhIXzx9nGw7Y,4487
@@ -67,6 +67,7 @@ vellum_ee/workflows/display/tests/workflow_serialization/test_basic_conditional_
67
67
  vellum_ee/workflows/display/tests/workflow_serialization/test_basic_error_node_serialization.py,sha256=02gG5C0wHbjzuTgVYCsNrtW1kEOeaM_5zXmznaUKzgk,6079
68
68
  vellum_ee/workflows/display/tests/workflow_serialization/test_basic_generic_node_serialization.py,sha256=Ah8CxAxAQYnxsNasLB8esN_c9eRDQnmV_aDWC9Kp98s,5746
69
69
  vellum_ee/workflows/display/tests/workflow_serialization/test_basic_guardrail_node_serialization.py,sha256=0xK9TFqPD5Hy65T-iacZ8pjYHD8XkzZXS35rLwof7uc,7427
70
+ vellum_ee/workflows/display/tests/workflow_serialization/test_basic_inline_prompt_node_serialization.py,sha256=XRtXTWtNXHe1g8E_mNcAhYZVjc0UcGlJRjq34_sd0jo,787
70
71
  vellum_ee/workflows/display/tests/workflow_serialization/test_basic_inline_subworkflow_serialization.py,sha256=HEfRYBVwZ6fy0hHhgsgTBEQJJAp_0wsaCJ_OtpZGdqE,20578
71
72
  vellum_ee/workflows/display/tests/workflow_serialization/test_basic_map_node_serialization.py,sha256=L1SrD5KfZjlAVh2BkOe3pCQOstVf_SiqD0KGoZlknU8,16145
72
73
  vellum_ee/workflows/display/tests/workflow_serialization/test_basic_merge_node_serialization.py,sha256=ojzdgGb2zGYloOX57V_FyXxjUqOnWMN6z8GUlF8yDMA,8422
@@ -115,7 +116,7 @@ vellum/client/README.md,sha256=JkCJjmMZl4jrPj46pkmL9dpK4gSzQQmP5I7z4aME4LY,4749
115
116
  vellum/client/__init__.py,sha256=j6zi0NZ4BMC6JrwckvzMWuG5x8KoOvO4KqsLhvVCa68,117624
116
117
  vellum/client/core/__init__.py,sha256=SQ85PF84B9MuKnBwHNHWemSGuy-g_515gFYNFhvEE0I,1438
117
118
  vellum/client/core/api_error.py,sha256=RE8LELok2QCjABadECTvtDp7qejA1VmINCh6TbqPwSE,426
118
- vellum/client/core/client_wrapper.py,sha256=u1VLrEPJQZ055tQYEFf0-nEI5ccFS1CB6MysOmwok5g,1869
119
+ vellum/client/core/client_wrapper.py,sha256=cnaRo0Of2VpkokWHVOmmYEpIO-5Jt52DALDty9NxcPs,1869
119
120
  vellum/client/core/datetime_utils.py,sha256=nBys2IsYrhPdszxGKCNRPSOCwa-5DWOHG95FB8G9PKo,1047
120
121
  vellum/client/core/file.py,sha256=X9IbmkZmB2bB_DpmZAO3crWdXagOakAyn6UCOCImCPg,2322
121
122
  vellum/client/core/http_client.py,sha256=R0pQpCppnEtxccGvXl4uJ76s7ro_65Fo_erlNNLp_AI,19228
@@ -270,7 +271,7 @@ vellum/client/types/error_vellum_value_request.py,sha256=o0aSn34dRcpnAwAfwW_sgwP
270
271
  vellum/client/types/execute_api_request_bearer_token.py,sha256=agAhp9lzfzZcYGZdzg2jHAEHCaHlqzbgp6qeeNebcto,183
271
272
  vellum/client/types/execute_api_request_body.py,sha256=MArsO_-H41lU8Lz0dB78MVcFupjWtRV7UBEljY3Dnwk,169
272
273
  vellum/client/types/execute_api_request_headers_value.py,sha256=bHtGwOpknQDcQo6qtMKqJxaYpvbinDfwx2uaPzyuZ9s,184
273
- vellum/client/types/execute_api_response.py,sha256=QpUDx-A2tBELMLjxhcHwJyoNm-I0d8QfdCb5f1TAOEE,804
274
+ vellum/client/types/execute_api_response.py,sha256=1_wGY1eIF6Drwx5FEwnwBRLUxonXX7dOjhkvQakE-bw,842
274
275
  vellum/client/types/execute_prompt_event.py,sha256=wq_TZBDJcmXQhSj25jR9nMTnN-mP8Ku5Vq3rLqmwE5Q,521
275
276
  vellum/client/types/execute_prompt_response.py,sha256=n6ODveXcO8uWG-kr_B9wXziHH8JUaVTUcUAZKifClEo,334
276
277
  vellum/client/types/execute_workflow_response.py,sha256=0Q-NGPv5jpxjq6xNlHa3qUNK7yOmkU8h6Z2vQb6bHsU,1022
@@ -1369,11 +1370,11 @@ vellum/workflows/nodes/displayable/bases/tests/test_utils.py,sha256=eqdqbKNRWVMD
1369
1370
  vellum/workflows/nodes/displayable/bases/types.py,sha256=C37B2Qh2YP7s7pUjd-EYKc2Zl1TbnCgI_mENuUSb8bo,1706
1370
1371
  vellum/workflows/nodes/displayable/bases/utils.py,sha256=ckMUenSsNkiYmSw6FmjSMHYaCk8Y8_sUjL6lkFFEqts,5412
1371
1372
  vellum/workflows/nodes/displayable/code_execution_node/__init__.py,sha256=0FLWMMktpzSnmBMizQglBpcPrP80fzVsoJwJgf822Cg,76
1372
- vellum/workflows/nodes/displayable/code_execution_node/node.py,sha256=KZ5d3_mdpsrPF_ScmEqSfBhfup421RscO9hNiGa52T4,9068
1373
+ vellum/workflows/nodes/displayable/code_execution_node/node.py,sha256=wgtqPljUqan9SILMysPCdSmZ0HoCpTTTNNaW0y9nQQI,9082
1373
1374
  vellum/workflows/nodes/displayable/code_execution_node/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
1374
1375
  vellum/workflows/nodes/displayable/code_execution_node/tests/fixtures/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
1375
1376
  vellum/workflows/nodes/displayable/code_execution_node/tests/fixtures/main.py,sha256=5QsbmkzSlSbcbWTG_JmIqcP-JNJzOPTKxGzdHos19W4,79
1376
- vellum/workflows/nodes/displayable/code_execution_node/tests/test_code_execution_node.py,sha256=zrSgkUGaaTy-cv9h5kLBnao1Aobcn02w39wmcUx7X3I,13224
1377
+ vellum/workflows/nodes/displayable/code_execution_node/tests/test_code_execution_node.py,sha256=y_j4PieOpRYFmTqIEg1IPg-x-y_ezOcjcWCWPXYp1hI,14582
1377
1378
  vellum/workflows/nodes/displayable/code_execution_node/utils.py,sha256=hF9tdCpta7WN1ANz467Q9LNlISOSmp79jDIkR5d2iQM,3542
1378
1379
  vellum/workflows/nodes/displayable/conditional_node/__init__.py,sha256=AS_EIqFdU1F9t8aLmbZU-rLh9ry6LCJ0uj0D8F0L5Uw,72
1379
1380
  vellum/workflows/nodes/displayable/conditional_node/node.py,sha256=Qjfl33gZ3JEgxBA1EgzSUebboGvsARthIxxcQyvx5Gg,1152
@@ -1423,7 +1424,7 @@ vellum/workflows/references/environment_variable.py,sha256=-gfOcdYwVp9ztSUYz6h2W
1423
1424
  vellum/workflows/references/execution_count.py,sha256=JILHqt8ELdc9ct-WsVCA5X-rKiP1rmJODw-XTf4kpHI,722
1424
1425
  vellum/workflows/references/external_input.py,sha256=WyBC6uMDu77431YVSU_WvTt-nGLC_bW65tIsplUJXa4,2056
1425
1426
  vellum/workflows/references/input.py,sha256=3INu-TLTi4dziWmva6LO3WvgDlPzsjayUx61cVvqLJA,325
1426
- vellum/workflows/references/lazy.py,sha256=SXwZUCTzUR-R2-uK0XHALtvp1x84l-QkNY-Ds6KynYA,1932
1427
+ vellum/workflows/references/lazy.py,sha256=HUQRXQMsCwu0JPVPCDUvQm28q8VlJQ-N8SfFyx2Tap8,2901
1427
1428
  vellum/workflows/references/node.py,sha256=LP854wDVs-9I_aZ7-nkbwXqL2H7W2_3LED2e9FixNS8,1418
1428
1429
  vellum/workflows/references/output.py,sha256=6F4zIrGykDxBsR6qEXxEKncQV5zV4yHafnHRNkxvklc,2780
1429
1430
  vellum/workflows/references/state_value.py,sha256=bInUF0A3Pt4-zhA0f6LdSuyv8tz7n5QRkHAEn4gsmqI,711
@@ -1464,8 +1465,8 @@ vellum/workflows/vellum_client.py,sha256=ODrq_TSl-drX2aezXegf7pizpWDVJuTXH-j6528
1464
1465
  vellum/workflows/workflows/__init__.py,sha256=KY45TqvavCCvXIkyCFMEc0dc6jTMOUci93U2DUrlZYc,66
1465
1466
  vellum/workflows/workflows/base.py,sha256=uYT0TQnEDtVaH3pErq785FhxxEEmk7C5ZGfuSO3QK8c,18537
1466
1467
  vellum/workflows/workflows/event_filters.py,sha256=GSxIgwrX26a1Smfd-6yss2abGCnadGsrSZGa7t7LpJA,2008
1467
- vellum_ai-0.13.23.dist-info/LICENSE,sha256=hOypcdt481qGNISA784bnAGWAE6tyIf9gc2E78mYC3E,1574
1468
- vellum_ai-0.13.23.dist-info/METADATA,sha256=xeS3VmSv44WirfJlZ93OLirCl4GNlureoXjhZfDymC4,5335
1469
- vellum_ai-0.13.23.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
1470
- vellum_ai-0.13.23.dist-info/entry_points.txt,sha256=HCH4yc_V3J_nDv3qJzZ_nYS8llCHZViCDP1ejgCc5Ak,42
1471
- vellum_ai-0.13.23.dist-info/RECORD,,
1468
+ vellum_ai-0.13.25.dist-info/LICENSE,sha256=hOypcdt481qGNISA784bnAGWAE6tyIf9gc2E78mYC3E,1574
1469
+ vellum_ai-0.13.25.dist-info/METADATA,sha256=bJbUYjtYgNLNc58zFuoxnvydRfk7utCgEQLmbpbIhJw,5335
1470
+ vellum_ai-0.13.25.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
1471
+ vellum_ai-0.13.25.dist-info/entry_points.txt,sha256=HCH4yc_V3J_nDv3qJzZ_nYS8llCHZViCDP1ejgCc5Ak,42
1472
+ vellum_ai-0.13.25.dist-info/RECORD,,
@@ -343,6 +343,27 @@ class BaseNodeDisplay(Generic[NodeType], metaclass=BaseNodeDisplayMeta):
343
343
  return self.serialize_value(display_context, value._value)
344
344
 
345
345
  if isinstance(value, LazyReference):
346
+ if isinstance(value._get, str):
347
+ reference_parts = value._get.split(".")
348
+ if len(reference_parts) < 3:
349
+ raise Exception(
350
+ f"Failed to parse lazy reference: {value._get}. Only Node Output references are supported."
351
+ )
352
+
353
+ output_name = reference_parts[-1]
354
+ nested_class_name = reference_parts[-2]
355
+ if nested_class_name != "Outputs":
356
+ raise Exception(
357
+ f"Failed to parse lazy reference: {value._get}. Outputs are the only node reference supported."
358
+ )
359
+
360
+ node_class_name = ".".join(reference_parts[:-2])
361
+ for node in display_context.node_displays.keys():
362
+ if node.__name__ == node_class_name:
363
+ return self.serialize_value(display_context, getattr(node.Outputs, output_name))
364
+
365
+ raise NotImplementedError(f"Failed to find a LazyReference for: {value._get}")
366
+
346
367
  return self.serialize_value(display_context, value._get())
347
368
 
348
369
  if isinstance(value, WorkflowInputReference):
@@ -1,9 +1,10 @@
1
1
  from uuid import UUID
2
- from typing import ClassVar, Dict, Generic, List, Optional, Tuple, Type, TypeVar, Union
2
+ from typing import Callable, ClassVar, Dict, Generic, List, Optional, Tuple, Type, TypeVar, Union
3
3
 
4
- from vellum import PromptBlock, RichTextChildBlock, VellumVariable
4
+ from vellum import FunctionDefinition, PromptBlock, RichTextChildBlock, VellumVariable
5
5
  from vellum.workflows.nodes import InlinePromptNode
6
6
  from vellum.workflows.types.core import JsonObject
7
+ from vellum.workflows.utils.functions import compile_function_definition
7
8
  from vellum.workflows.utils.uuids import uuid4_from_hash
8
9
  from vellum_ee.workflows.display.nodes.base_node_vellum_display import BaseNodeVellumDisplay
9
10
  from vellum_ee.workflows.display.nodes.utils import raise_if_descriptor
@@ -32,6 +33,17 @@ class BaseInlinePromptNodeDisplay(BaseNodeVellumDisplay[_InlinePromptNodeType],
32
33
  _, output_display = display_context.global_node_output_displays[node.Outputs.text]
33
34
  _, array_display = display_context.global_node_output_displays[node.Outputs.results]
34
35
  node_blocks = raise_if_descriptor(node.blocks)
36
+ function_definitions = raise_if_descriptor(node.functions)
37
+
38
+ blocks: list = [
39
+ self._generate_prompt_block(block, input_variable_id_by_name, [i]) for i, block in enumerate(node_blocks)
40
+ ]
41
+ functions = (
42
+ [self._generate_function_tools(function) for function in function_definitions]
43
+ if function_definitions
44
+ else []
45
+ )
46
+ blocks.extend(functions)
35
47
 
36
48
  return {
37
49
  "id": str(node_id),
@@ -50,10 +62,7 @@ class BaseInlinePromptNodeDisplay(BaseNodeVellumDisplay[_InlinePromptNodeType],
50
62
  "input_variables": [prompt_input.dict() for prompt_input in prompt_inputs],
51
63
  "prompt_template_block_data": {
52
64
  "version": 1,
53
- "blocks": [
54
- self._generate_prompt_block(block, input_variable_id_by_name, [i])
55
- for i, block in enumerate(node_blocks)
56
- ],
65
+ "blocks": blocks,
57
66
  },
58
67
  },
59
68
  "ml_model_name": raise_if_descriptor(node.ml_model),
@@ -91,6 +100,21 @@ class BaseInlinePromptNodeDisplay(BaseNodeVellumDisplay[_InlinePromptNodeType],
91
100
 
92
101
  return node_inputs, prompt_inputs
93
102
 
103
+ def _generate_function_tools(self, function: Union[FunctionDefinition, Callable]) -> JsonObject:
104
+ normalized_functions = (
105
+ function if isinstance(function, FunctionDefinition) else compile_function_definition(function)
106
+ )
107
+ return {
108
+ "block_type": "FUNCTION_DEFINITION",
109
+ "properties": {
110
+ "function_name": normalized_functions.name,
111
+ "function_description": normalized_functions.description,
112
+ "function_parameters": normalized_functions.parameters,
113
+ "function_forced": normalized_functions.forced,
114
+ "function_strict": normalized_functions.strict,
115
+ },
116
+ }
117
+
94
118
  def _generate_prompt_block(
95
119
  self,
96
120
  prompt_block: Union[PromptBlock, RichTextChildBlock],
@@ -7,9 +7,12 @@ from vellum.workflows.nodes.bases.base import BaseNode
7
7
  from vellum.workflows.references.constant import ConstantValueReference
8
8
  from vellum.workflows.references.lazy import LazyReference
9
9
  from vellum.workflows.references.vellum_secret import VellumSecretReference
10
+ from vellum.workflows.workflows.base import BaseWorkflow
10
11
  from vellum_ee.workflows.display.base import WorkflowInputsDisplay
11
12
  from vellum_ee.workflows.display.nodes.base_node_display import BaseNodeDisplay
12
13
  from vellum_ee.workflows.display.nodes.types import NodeOutputDisplay
14
+ from vellum_ee.workflows.display.workflows.get_vellum_workflow_display_class import get_workflow_display
15
+ from vellum_ee.workflows.display.workflows.vellum_workflow_display import VellumWorkflowDisplay
13
16
 
14
17
 
15
18
  class Inputs(BaseInputs):
@@ -151,6 +154,43 @@ def test_serialize_node__lazy_reference(serialize_node):
151
154
  )
152
155
 
153
156
 
157
+ def test_serialize_node__lazy_reference_with_string():
158
+ # GIVEN two nodes with one lazily referencing the other
159
+ class LazyReferenceGenericNode(BaseNode):
160
+ attr = LazyReference[str]("OtherNode.Outputs.result")
161
+
162
+ class OtherNode(BaseNode):
163
+ class Outputs(BaseNode.Outputs):
164
+ result: str
165
+
166
+ # AND a workflow with both nodes
167
+ class Workflow(BaseWorkflow):
168
+ graph = LazyReferenceGenericNode >> OtherNode
169
+
170
+ # WHEN the workflow is serialized
171
+ workflow_display = get_workflow_display(base_display_class=VellumWorkflowDisplay, workflow_class=Workflow)
172
+ serialized_workflow: dict = workflow_display.serialize()
173
+
174
+ # THEN the node should properly serialize the attribute reference
175
+ lazy_reference_node = next(
176
+ node
177
+ for node in serialized_workflow["workflow_raw_data"]["nodes"]
178
+ if node["id"] == str(LazyReferenceGenericNode.__id__)
179
+ )
180
+
181
+ assert lazy_reference_node["attributes"] == [
182
+ {
183
+ "id": "98833d71-42a8-47e9-81c4-6a35646e3d3c",
184
+ "name": "attr",
185
+ "value": {
186
+ "type": "NODE_OUTPUT",
187
+ "node_id": str(OtherNode.__id__),
188
+ "node_output_id": "7a3406a1-6f11-4568-8aa0-e5dba6534dc2",
189
+ },
190
+ }
191
+ ]
192
+
193
+
154
194
  class WorkflowInputGenericNode(BaseNode):
155
195
  attr: str = Inputs.input
156
196
 
@@ -0,0 +1,18 @@
1
+ from vellum_ee.workflows.display.workflows import VellumWorkflowDisplay
2
+ from vellum_ee.workflows.display.workflows.get_vellum_workflow_display_class import get_workflow_display
3
+
4
+ from tests.workflows.basic_inline_prompt_node_with_functions.workflow import BasicInlinePromptWithFunctionsWorkflow
5
+
6
+
7
+ def test_serialize_workflow():
8
+ # WHEN we serialize it
9
+ workflow_display = get_workflow_display(
10
+ base_display_class=VellumWorkflowDisplay, workflow_class=BasicInlinePromptWithFunctionsWorkflow
11
+ )
12
+ serialized_workflow: dict = workflow_display.serialize()
13
+ assert (
14
+ serialized_workflow["workflow_raw_data"]["nodes"][-2]["data"]["exec_config"]["prompt_template_block_data"][
15
+ "blocks"
16
+ ][-1]["block_type"]
17
+ == "FUNCTION_DEFINITION"
18
+ )