vellum-ai 0.12.5__py3-none-any.whl → 0.12.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -18,7 +18,7 @@ class BaseClientWrapper:
18
18
  headers: typing.Dict[str, str] = {
19
19
  "X-Fern-Language": "Python",
20
20
  "X-Fern-SDK-Name": "vellum-ai",
21
- "X-Fern-SDK-Version": "0.12.5",
21
+ "X-Fern-SDK-Version": "0.12.7",
22
22
  }
23
23
  headers["X_API_KEY"] = self.api_key
24
24
  return headers
@@ -1,6 +1,6 @@
1
1
  import json
2
2
  from uuid import uuid4
3
- from typing import ClassVar, Generic, Iterator, List, Optional, Tuple, cast
3
+ from typing import Callable, ClassVar, Generic, Iterator, List, Optional, Tuple, Union, cast
4
4
 
5
5
  from vellum import (
6
6
  AdHocExecutePromptEvent,
@@ -24,9 +24,10 @@ from vellum.workflows.exceptions import NodeException
24
24
  from vellum.workflows.nodes.displayable.bases.base_prompt_node import BasePromptNode
25
25
  from vellum.workflows.nodes.displayable.bases.inline_prompt_node.constants import DEFAULT_PROMPT_PARAMETERS
26
26
  from vellum.workflows.types.generics import StateType
27
+ from vellum.workflows.utils.functions import compile_function_definition
27
28
 
28
29
 
29
- class BaseInlinePromptNode(BasePromptNode, Generic[StateType]):
30
+ class BaseInlinePromptNode(BasePromptNode[StateType], Generic[StateType]):
30
31
  """
31
32
  Used to execute a Prompt defined inline.
32
33
 
@@ -45,7 +46,7 @@ class BaseInlinePromptNode(BasePromptNode, Generic[StateType]):
45
46
  blocks: ClassVar[List[PromptBlock]]
46
47
 
47
48
  # The functions/tools that a Prompt has access to
48
- functions: Optional[List[FunctionDefinition]] = OMIT
49
+ functions: Optional[List[Union[FunctionDefinition, Callable]]] = None
49
50
 
50
51
  parameters: PromptParameters = DEFAULT_PROMPT_PARAMETERS
51
52
  expand_meta: Optional[AdHocExpandMeta] = OMIT
@@ -59,6 +60,14 @@ class BaseInlinePromptNode(BasePromptNode, Generic[StateType]):
59
60
  "execution_context": {"parent_context": parent_context},
60
61
  **request_options.get("additional_body_parameters", {}),
61
62
  }
63
+ normalized_functions = (
64
+ [
65
+ function if isinstance(function, FunctionDefinition) else compile_function_definition(function)
66
+ for function in self.functions
67
+ ]
68
+ if self.functions
69
+ else None
70
+ )
62
71
 
63
72
  return self._context.vellum_client.ad_hoc.adhoc_execute_prompt_stream(
64
73
  ml_model=self.ml_model,
@@ -66,7 +75,7 @@ class BaseInlinePromptNode(BasePromptNode, Generic[StateType]):
66
75
  input_variables=input_variables,
67
76
  parameters=self.parameters,
68
77
  blocks=self.blocks,
69
- functions=self.functions,
78
+ functions=normalized_functions,
70
79
  expand_meta=self.expand_meta,
71
80
  request_options=self.request_options,
72
81
  )
@@ -5,10 +5,14 @@ from typing import Any, Iterator, List
5
5
  from vellum.client.core.pydantic_utilities import UniversalBaseModel
6
6
  from vellum.client.types.execute_prompt_event import ExecutePromptEvent
7
7
  from vellum.client.types.fulfilled_execute_prompt_event import FulfilledExecutePromptEvent
8
+ from vellum.client.types.function_call import FunctionCall
9
+ from vellum.client.types.function_call_vellum_value import FunctionCallVellumValue
10
+ from vellum.client.types.function_definition import FunctionDefinition
8
11
  from vellum.client.types.initiated_execute_prompt_event import InitiatedExecutePromptEvent
9
12
  from vellum.client.types.prompt_output import PromptOutput
10
13
  from vellum.client.types.prompt_request_json_input import PromptRequestJsonInput
11
14
  from vellum.client.types.string_vellum_value import StringVellumValue
15
+ from vellum.workflows.nodes.displayable.bases.inline_prompt_node.node import BaseInlinePromptNode
12
16
  from vellum.workflows.nodes.displayable.inline_prompt_node.node import InlinePromptNode
13
17
 
14
18
 
@@ -62,3 +66,54 @@ def test_inline_prompt_node__json_inputs(vellum_adhoc_prompt_client):
62
66
  PromptRequestJsonInput(key="a_pydantic", type="JSON", value={"example": "example"}),
63
67
  ]
64
68
  assert len(mock_api.call_args.kwargs["input_variables"]) == 4
69
+
70
+
71
+ def test_inline_prompt_node__function_definitions(vellum_adhoc_prompt_client):
72
+ # GIVEN a function definition
73
+ def my_function(foo: str, bar: int) -> None:
74
+ pass
75
+
76
+ # AND a prompt node with a accepting that function definition
77
+ class MyNode(BaseInlinePromptNode):
78
+ ml_model = "gpt-4o"
79
+ functions = [my_function]
80
+ prompt_inputs = {}
81
+ blocks = []
82
+
83
+ # AND a known response from invoking an inline prompt
84
+ expected_outputs: List[PromptOutput] = [
85
+ FunctionCallVellumValue(value=FunctionCall(name="my_function", arguments={"foo": "hello", "bar": 1})),
86
+ ]
87
+
88
+ def generate_prompt_events(*args: Any, **kwargs: Any) -> Iterator[ExecutePromptEvent]:
89
+ execution_id = str(uuid4())
90
+ events: List[ExecutePromptEvent] = [
91
+ InitiatedExecutePromptEvent(execution_id=execution_id),
92
+ FulfilledExecutePromptEvent(
93
+ execution_id=execution_id,
94
+ outputs=expected_outputs,
95
+ ),
96
+ ]
97
+ yield from events
98
+
99
+ vellum_adhoc_prompt_client.adhoc_execute_prompt_stream.side_effect = generate_prompt_events
100
+
101
+ # WHEN the node is run
102
+ list(MyNode().run())
103
+
104
+ # THEN the prompt is executed with the correct inputs
105
+ mock_api = vellum_adhoc_prompt_client.adhoc_execute_prompt_stream
106
+ assert mock_api.call_count == 1
107
+ assert mock_api.call_args.kwargs["functions"] == [
108
+ FunctionDefinition(
109
+ name="my_function",
110
+ parameters={
111
+ "type": "object",
112
+ "properties": {
113
+ "foo": {"type": "string"},
114
+ "bar": {"type": "integer"},
115
+ },
116
+ "required": ["foo", "bar"],
117
+ },
118
+ ),
119
+ ]
@@ -75,7 +75,7 @@ def test_inline_text_prompt_node__basic(vellum_adhoc_prompt_client):
75
75
  vellum_adhoc_prompt_client.adhoc_execute_prompt_stream.assert_called_once_with(
76
76
  blocks=[],
77
77
  expand_meta=Ellipsis,
78
- functions=Ellipsis,
78
+ functions=None,
79
79
  input_values=[],
80
80
  input_variables=[],
81
81
  ml_model="gpt-4o",
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: vellum-ai
3
- Version: 0.12.5
3
+ Version: 0.12.7
4
4
  Summary:
5
5
  License: MIT
6
6
  Requires-Python: >=3.9,<4.0
@@ -1,17 +1,17 @@
1
1
  vellum_cli/CONTRIBUTING.md,sha256=FtDC7BGxSeMnwCXAUssFsAIElXtmJE-O5Z7BpolcgvI,2935
2
2
  vellum_cli/README.md,sha256=2NudRoLzWxNKqnuVy1JuQ7DerIaxWGYkrH8kMd-asIE,90
3
- vellum_cli/__init__.py,sha256=A9uo9OE7xQACNEtX4k0c-rxypDqS5V8kA8u4BNN0azM,7402
3
+ vellum_cli/__init__.py,sha256=iims87aL6HbwggCAh8JfLBNf3C7tN3TGDmlg7V5W9Lg,8506
4
4
  vellum_cli/aliased_group.py,sha256=ugW498j0yv4ALJ8vS9MsO7ctDW7Jlir9j6nE_uHAP8c,3363
5
5
  vellum_cli/config.py,sha256=wJQnv3tCgu1BOugg0AOP94yQ-x1yAg8juX_QoFN9Y7w,5223
6
6
  vellum_cli/image_push.py,sha256=SJwhwWJsLjwGNezNVd_oCVpFMfPsAB3dfLWmriZZUtw,4419
7
7
  vellum_cli/logger.py,sha256=PuRFa0WCh4sAGFS5aqWB0QIYpS6nBWwPJrIXpWxugV4,1022
8
- vellum_cli/pull.py,sha256=q68fr1o5H9l8Dvc8BTY1GARJYjAV1i6Fg-Lg4Oo4FDw,6155
8
+ vellum_cli/pull.py,sha256=AfSQLHwTeq_T22iRbGyefnmrPiddIKb_fqPMSoTlN0Y,6832
9
9
  vellum_cli/push.py,sha256=gcYhIogeYejZIhNm5Cjp0VBECaXLmVQEvZjrPH0-TSU,5337
10
10
  vellum_cli/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
11
11
  vellum_cli/tests/conftest.py,sha256=eFGwBxib3Nki830lIFintB0b6r4x8T_KMnmzhlTY5x0,1337
12
12
  vellum_cli/tests/test_config.py,sha256=uvKGDc8BoVyT9_H0Z-g8469zVxomn6Oi3Zj-vK7O_wU,2631
13
13
  vellum_cli/tests/test_main.py,sha256=qDZG-aQauPwBwM6A2DIu1494n47v3pL28XakTbLGZ-k,272
14
- vellum_cli/tests/test_pull.py,sha256=P2JFNHU1hE6iydYl7rW35h7c8_DSrLiAS7gNsFMy1JU,17829
14
+ vellum_cli/tests/test_pull.py,sha256=Bnf21VjfiRb_j495idz5N8afucagtiktNCtVMvU8tGs,18977
15
15
  vellum_cli/tests/test_push.py,sha256=V2iGcskh2X3OHj2uV5Vx_BhmtyfmUkyx0lrp8DDOExc,5824
16
16
  vellum_ee/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
17
17
  vellum_ee/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -28,12 +28,12 @@ vellum_ee/workflows/display/nodes/types.py,sha256=St1BB6no528OyELGiyRabWao0GGw6m
28
28
  vellum_ee/workflows/display/nodes/utils.py,sha256=sloya5TpXsnot1HURc9L51INwflRqUzHxRVnCS9Cd-4,973
29
29
  vellum_ee/workflows/display/nodes/vellum/__init__.py,sha256=nmPLj8vkbVCS46XQqmHq8Xj8Mr36wCK_vWf26A9KIkw,1505
30
30
  vellum_ee/workflows/display/nodes/vellum/api_node.py,sha256=4SSQGecKWHuoGy5YIGJeOZVHGKwTs_8Y-gf3GvsHb0M,8506
31
- vellum_ee/workflows/display/nodes/vellum/code_execution_node.py,sha256=XqizRn5bLwT8LMwgyvfbln8inhCxzTi1EkD22Fx-5-U,4222
31
+ vellum_ee/workflows/display/nodes/vellum/code_execution_node.py,sha256=qrf2Ne8n2aaoa2xgK3ExCEGI9BA-2g1zmvjSHyzCWmQ,4230
32
32
  vellum_ee/workflows/display/nodes/vellum/conditional_node.py,sha256=EtdqJfhYw03PuT2iyJ6mSAZK4RsQqDie_2AnJAtMelk,13625
33
33
  vellum_ee/workflows/display/nodes/vellum/error_node.py,sha256=ygTjSjYDI4DtkxADWub5rhBnRWItMKWF6fezBrgpOKA,1979
34
34
  vellum_ee/workflows/display/nodes/vellum/final_output_node.py,sha256=t5iJQVoRT5g-v2IiUb4kFYdvUVKch0zn27016pzDZoo,2761
35
35
  vellum_ee/workflows/display/nodes/vellum/guardrail_node.py,sha256=3TJvHX_Uuf_gr94VkYc_zmNH8I5p71ChIeoAbJZ3ddY,2158
36
- vellum_ee/workflows/display/nodes/vellum/inline_prompt_node.py,sha256=E7U4B05-KSlPvTp0nYQTtGPkNa5563VxrNBQmKwvTqw,7267
36
+ vellum_ee/workflows/display/nodes/vellum/inline_prompt_node.py,sha256=DYhxHgFT06CNCpovO4Z-wFPqCjFKxyl89ygFs8AucRQ,7189
37
37
  vellum_ee/workflows/display/nodes/vellum/inline_subworkflow_node.py,sha256=x5wiuWbRjxNcPGu8BoBEKHwPeqCpHE-vrGjAdM5TJOs,4721
38
38
  vellum_ee/workflows/display/nodes/vellum/map_node.py,sha256=AqUlItgSZij12qRKguKVmDbbaLuDy3Cdom5uOlJPqrc,3640
39
39
  vellum_ee/workflows/display/nodes/vellum/merge_node.py,sha256=jzO63B9KiEAncnBqmz2ZTcxjmEHozMEe7WnfpcpsQYg,3195
@@ -76,7 +76,7 @@ vellum/client/README.md,sha256=JkCJjmMZl4jrPj46pkmL9dpK4gSzQQmP5I7z4aME4LY,4749
76
76
  vellum/client/__init__.py,sha256=o4m7iRZWEV8rP3GkdaztHAjNmjxjWERlarviFoHzuKI,110927
77
77
  vellum/client/core/__init__.py,sha256=SQ85PF84B9MuKnBwHNHWemSGuy-g_515gFYNFhvEE0I,1438
78
78
  vellum/client/core/api_error.py,sha256=RE8LELok2QCjABadECTvtDp7qejA1VmINCh6TbqPwSE,426
79
- vellum/client/core/client_wrapper.py,sha256=wl6o-mN_dvL9IHNiM92SLF4UVlR6ssouBizBfQnMvLw,1868
79
+ vellum/client/core/client_wrapper.py,sha256=RUr-cX1j7KxfabSh_XadU_QRvmolEu273OpLtcRINs8,1868
80
80
  vellum/client/core/datetime_utils.py,sha256=nBys2IsYrhPdszxGKCNRPSOCwa-5DWOHG95FB8G9PKo,1047
81
81
  vellum/client/core/file.py,sha256=X9IbmkZmB2bB_DpmZAO3crWdXagOakAyn6UCOCImCPg,2322
82
82
  vellum/client/core/http_client.py,sha256=R0pQpCppnEtxccGvXl4uJ76s7ro_65Fo_erlNNLp_AI,19228
@@ -1297,7 +1297,7 @@ vellum/workflows/nodes/displayable/bases/base_prompt_node/__init__.py,sha256=Org
1297
1297
  vellum/workflows/nodes/displayable/bases/base_prompt_node/node.py,sha256=EvylK1rGKpd4iiooEW9O5A9Q8DMTtBwETe_GtQT8M-E,2139
1298
1298
  vellum/workflows/nodes/displayable/bases/inline_prompt_node/__init__.py,sha256=Hl35IAoepRpE-j4cALaXVJIYTYOF3qszyVbxTj4kS1s,82
1299
1299
  vellum/workflows/nodes/displayable/bases/inline_prompt_node/constants.py,sha256=fnjiRWLoRlC4Puo5oQcpZD5Hd-EesxsAo9l5tGAkpZQ,270
1300
- vellum/workflows/nodes/displayable/bases/inline_prompt_node/node.py,sha256=H1AVDnitwIkwya12oV68Qj2tyb786pfRHHz5qxtubD4,5935
1300
+ vellum/workflows/nodes/displayable/bases/inline_prompt_node/node.py,sha256=fypgmZHgaDtGqSBC8rjYiyryJ0H58LPt_CafLfAprO0,6341
1301
1301
  vellum/workflows/nodes/displayable/bases/prompt_deployment_node.py,sha256=zdpNJoawB5PedsCCfgOGDDoWuif0jNtlV-K9sFL6cNQ,4968
1302
1302
  vellum/workflows/nodes/displayable/bases/search_node.py,sha256=pqiui8G6l_9FLE1HH4rCdFC73Bl7_AIBAmQQMjqe190,3570
1303
1303
  vellum/workflows/nodes/displayable/code_execution_node/__init__.py,sha256=0FLWMMktpzSnmBMizQglBpcPrP80fzVsoJwJgf822Cg,76
@@ -1316,7 +1316,7 @@ vellum/workflows/nodes/displayable/guardrail_node/node.py,sha256=7Ep7Ff7FtFry3Jw
1316
1316
  vellum/workflows/nodes/displayable/inline_prompt_node/__init__.py,sha256=gSUOoEZLlrx35-tQhSAd3An8WDwBqyiQh-sIebLU9wU,74
1317
1317
  vellum/workflows/nodes/displayable/inline_prompt_node/node.py,sha256=dTnP1yH1P0NqMw3noxt9XwaDCpX8ZOhuvVYNAn_DdCQ,2119
1318
1318
  vellum/workflows/nodes/displayable/inline_prompt_node/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
1319
- vellum/workflows/nodes/displayable/inline_prompt_node/tests/test_node.py,sha256=189Oo66QDYJS8vCcyLe9ErJBGpWZVmPePFHta8wzdeM,2615
1319
+ vellum/workflows/nodes/displayable/inline_prompt_node/tests/test_node.py,sha256=P1DUL0wIG-cyA5dqGv7242cFWJXysmombdujKrJtl7k,4669
1320
1320
  vellum/workflows/nodes/displayable/merge_node/__init__.py,sha256=J8IC08dSH7P76wKlNuxe1sn7toNGtSQdFirUbtPDEs0,60
1321
1321
  vellum/workflows/nodes/displayable/merge_node/node.py,sha256=ZyPvcTgfPOneOm5Dc2kUOoPkwNJqwRPZSj232akXynA,324
1322
1322
  vellum/workflows/nodes/displayable/note_node/__init__.py,sha256=KWA3P4fyYJ-fOTky8qNGlcOotQ-HeHJ9AjZt6mRQmCE,58
@@ -1328,7 +1328,7 @@ vellum/workflows/nodes/displayable/search_node/node.py,sha256=yhFWulbNmSQoDAwtTS
1328
1328
  vellum/workflows/nodes/displayable/subworkflow_deployment_node/__init__.py,sha256=9yYM6001YZeqI1VOk1QuEM_yrffk_EdsO7qaPzINKds,92
1329
1329
  vellum/workflows/nodes/displayable/subworkflow_deployment_node/node.py,sha256=pnbRCgdzWXrXhm5jDkDDASl5xu5w3DxskC34yJVmWUs,7147
1330
1330
  vellum/workflows/nodes/displayable/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
1331
- vellum/workflows/nodes/displayable/tests/test_inline_text_prompt_node.py,sha256=lLnXKAUYtgvQ6MpT4GoTrqLtdlyDlUt1pPHrmu-Gf00,4705
1331
+ vellum/workflows/nodes/displayable/tests/test_inline_text_prompt_node.py,sha256=UI_RMmXn9qwB-StnFPvkDd9FctBQAg43wrfouqvPepk,4701
1332
1332
  vellum/workflows/nodes/displayable/tests/test_search_node_wth_text_output.py,sha256=4CMwDtXwTaEvFfDpA6j2iLqc7S6IICSkvVZOobEpeps,6954
1333
1333
  vellum/workflows/nodes/displayable/tests/test_text_prompt_deployment_node.py,sha256=KqKJtJ0vuNoPuUPMdILmBTt4a2fBBxxun-nmOI7T8jo,2585
1334
1334
  vellum/workflows/nodes/utils.py,sha256=EZt7CzJmgQBR_GWFpZr8d-oaoti3tolTd2Cv9wm7dKo,1087
@@ -1384,8 +1384,8 @@ vellum/workflows/vellum_client.py,sha256=ODrq_TSl-drX2aezXegf7pizpWDVJuTXH-j6528
1384
1384
  vellum/workflows/workflows/__init__.py,sha256=KY45TqvavCCvXIkyCFMEc0dc6jTMOUci93U2DUrlZYc,66
1385
1385
  vellum/workflows/workflows/base.py,sha256=zpspOEdO5Ye_0ZvN-Wkzv9iQSiF1sD201ba8lhbnPbs,17086
1386
1386
  vellum/workflows/workflows/event_filters.py,sha256=GSxIgwrX26a1Smfd-6yss2abGCnadGsrSZGa7t7LpJA,2008
1387
- vellum_ai-0.12.5.dist-info/LICENSE,sha256=hOypcdt481qGNISA784bnAGWAE6tyIf9gc2E78mYC3E,1574
1388
- vellum_ai-0.12.5.dist-info/METADATA,sha256=RkJBl93Re8tabpOh2GJyTu7c9lIotTa2y1wqfeEo0yc,5128
1389
- vellum_ai-0.12.5.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
1390
- vellum_ai-0.12.5.dist-info/entry_points.txt,sha256=HCH4yc_V3J_nDv3qJzZ_nYS8llCHZViCDP1ejgCc5Ak,42
1391
- vellum_ai-0.12.5.dist-info/RECORD,,
1387
+ vellum_ai-0.12.7.dist-info/LICENSE,sha256=hOypcdt481qGNISA784bnAGWAE6tyIf9gc2E78mYC3E,1574
1388
+ vellum_ai-0.12.7.dist-info/METADATA,sha256=czpJHlepxy2KrSpZireMIgDjpv_QdMBAK-g33oxoI3U,5128
1389
+ vellum_ai-0.12.7.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
1390
+ vellum_ai-0.12.7.dist-info/entry_points.txt,sha256=HCH4yc_V3J_nDv3qJzZ_nYS8llCHZViCDP1ejgCc5Ak,42
1391
+ vellum_ai-0.12.7.dist-info/RECORD,,
vellum_cli/__init__.py CHANGED
@@ -130,11 +130,18 @@ Should only be used for debugging purposes.""",
130
130
  is_flag=True,
131
131
  help="""Raises an error immediately if there are any issues with the pulling of the Resource.""",
132
132
  )
133
+ @click.option(
134
+ "--include-sandbox",
135
+ is_flag=True,
136
+ help="""Generates a runnable sandbox.py file containing test data from the Resource's sandbox. \
137
+ Helpful for running and debugging workflows locally.""",
138
+ )
133
139
  def pull(
134
140
  ctx: click.Context,
135
141
  include_json: Optional[bool],
136
142
  exclude_code: Optional[bool],
137
143
  strict: Optional[bool],
144
+ include_sandbox: Optional[bool],
138
145
  ) -> None:
139
146
  """Pull Resources from Vellum"""
140
147
 
@@ -143,6 +150,7 @@ def pull(
143
150
  include_json=include_json,
144
151
  exclude_code=exclude_code,
145
152
  strict=strict,
153
+ include_sandbox=include_sandbox,
146
154
  )
147
155
 
148
156
 
@@ -171,6 +179,12 @@ Should only be used for debugging purposes.""",
171
179
  is_flag=True,
172
180
  help="""Raises an error immediately if there are any issues with the pulling of the Workflow.""",
173
181
  )
182
+ @click.option(
183
+ "--include-sandbox",
184
+ is_flag=True,
185
+ help="""Generates a runnable sandbox.py file containing test data from the Resource's sandbox. \
186
+ Helpful for running and debugging workflows locally.""",
187
+ )
174
188
  def workflows_pull(
175
189
  module: Optional[str],
176
190
  include_json: Optional[bool],
@@ -178,6 +192,7 @@ def workflows_pull(
178
192
  workflow_deployment: Optional[str],
179
193
  exclude_code: Optional[bool],
180
194
  strict: Optional[bool],
195
+ include_sandbox: Optional[bool],
181
196
  ) -> None:
182
197
  """
183
198
  Pull Workflows from Vellum. If a module is provided, only the Workflow for that module will be pulled.
@@ -191,6 +206,7 @@ def workflows_pull(
191
206
  workflow_deployment=workflow_deployment,
192
207
  exclude_code=exclude_code,
193
208
  strict=strict,
209
+ include_sandbox=include_sandbox,
194
210
  )
195
211
 
196
212
 
@@ -208,10 +224,23 @@ Should only be used for debugging purposes.""",
208
224
  help="""Exclude the code definition of the Resource from the pull response. \
209
225
  Should only be used for debugging purposes.""",
210
226
  )
227
+ @click.option(
228
+ "--strict",
229
+ is_flag=True,
230
+ help="""Raises an error immediately if there are any issues with the pulling of the Resource.""",
231
+ )
232
+ @click.option(
233
+ "--include-sandbox",
234
+ is_flag=True,
235
+ help="""Generates a runnable sandbox.py file containing test data from the Resource's sandbox. \
236
+ Helpful for running and debugging resources locally.""",
237
+ )
211
238
  def pull_module(
212
239
  ctx: click.Context,
213
240
  include_json: Optional[bool],
214
241
  exclude_code: Optional[bool],
242
+ strict: Optional[bool],
243
+ include_sandbox: Optional[bool],
215
244
  ) -> None:
216
245
  """Pull a specific module from Vellum"""
217
246
 
@@ -220,6 +249,8 @@ def pull_module(
220
249
  module=ctx.parent.invoked_subcommand,
221
250
  include_json=include_json,
222
251
  exclude_code=exclude_code,
252
+ strict=strict,
253
+ include_sandbox=include_sandbox,
223
254
  )
224
255
 
225
256
 
vellum_cli/pull.py CHANGED
@@ -84,6 +84,7 @@ def pull_command(
84
84
  include_json: Optional[bool] = None,
85
85
  exclude_code: Optional[bool] = None,
86
86
  strict: Optional[bool] = None,
87
+ include_sandbox: Optional[bool] = None,
87
88
  ) -> None:
88
89
  load_dotenv()
89
90
  logger = load_cli_logger()
@@ -114,6 +115,8 @@ def pull_command(
114
115
  query_parameters["exclude_code"] = exclude_code
115
116
  if strict:
116
117
  query_parameters["strict"] = strict
118
+ if include_sandbox:
119
+ query_parameters["include_sandbox"] = include_sandbox
117
120
 
118
121
  response = client.workflows.pull(
119
122
  pk,
@@ -166,6 +169,17 @@ def pull_command(
166
169
  Its schema should be considered unstable and subject to change at any time."""
167
170
  )
168
171
 
172
+ if include_sandbox:
173
+ if not workflow_config.ignore:
174
+ workflow_config.ignore = "sandbox.py"
175
+ save_lock_file = True
176
+ elif isinstance(workflow_config.ignore, str) and "sandbox.py" != workflow_config.ignore:
177
+ workflow_config.ignore = [workflow_config.ignore, "sandbox.py"]
178
+ save_lock_file = True
179
+ elif isinstance(workflow_config.ignore, list) and "sandbox.py" not in workflow_config.ignore:
180
+ workflow_config.ignore.append("sandbox.py")
181
+ save_lock_file = True
182
+
169
183
  if save_lock_file:
170
184
  config.save()
171
185
 
@@ -486,3 +486,32 @@ def test_pull__strict__with_error(vellum_client, mock_module):
486
486
  vellum_client.workflows.pull.assert_called_once()
487
487
  call_args = vellum_client.workflows.pull.call_args.kwargs
488
488
  assert call_args["request_options"]["additional_query_parameters"] == {"strict": True}
489
+
490
+
491
+ def test_pull__include_sandbox(vellum_client, mock_module):
492
+ # GIVEN a module on the user's filesystem
493
+ module = mock_module.module
494
+ temp_dir = mock_module.temp_dir
495
+
496
+ # AND the workflow pull API call returns a zip file
497
+ vellum_client.workflows.pull.return_value = iter(
498
+ [_zip_file_map({"workflow.py": "print('hello')", "sandbox.py": "print('hello')"})]
499
+ )
500
+
501
+ # WHEN the user runs the pull command
502
+ runner = CliRunner()
503
+ result = runner.invoke(cli_main, ["pull", module, "--include-sandbox"])
504
+
505
+ # THEN the command returns successfully
506
+ assert result.exit_code == 0, result.output
507
+
508
+ # AND the pull api is called with include_sandbox=True
509
+ vellum_client.workflows.pull.assert_called_once()
510
+ call_args = vellum_client.workflows.pull.call_args.kwargs
511
+ assert call_args["request_options"]["additional_query_parameters"] == {"include_sandbox": True}
512
+
513
+ # AND the sandbox.py should be added to the ignore list
514
+ lock_json = os.path.join(temp_dir, "vellum.lock.json")
515
+ with open(lock_json) as f:
516
+ lock_data = json.load(f)
517
+ assert lock_data["workflows"][0]["ignore"] == "sandbox.py"
@@ -27,8 +27,8 @@ class BaseCodeExecutionNodeDisplay(BaseNodeVellumDisplay[_CodeExecutionNodeType]
27
27
  node = self._node
28
28
  node_id = self.node_id
29
29
  raw_code = raise_if_descriptor(node.code)
30
- code_value = None
31
30
 
31
+ code_value: Optional[str]
32
32
  if raw_code:
33
33
  code_value = raw_code
34
34
  else:
@@ -104,13 +104,12 @@ class BaseInlinePromptNodeDisplay(BaseNodeVellumDisplay[_InlinePromptNodeType],
104
104
  chat_properties: JsonObject = {
105
105
  "chat_role": prompt_block.chat_role,
106
106
  "chat_source": prompt_block.chat_source,
107
+ "chat_message_unterminated": bool(prompt_block.chat_message_unterminated),
107
108
  "blocks": [
108
109
  self._generate_prompt_block(block, input_variable_id_by_name, path + [i])
109
110
  for i, block in enumerate(prompt_block.blocks)
110
111
  ],
111
112
  }
112
- if prompt_block.chat_message_unterminated is not None:
113
- chat_properties["chat_message_unterminated"] = prompt_block.chat_message_unterminated
114
113
 
115
114
  block = {
116
115
  "block_type": "CHAT_MESSAGE",