vellum-ai 0.14.51__py3-none-any.whl → 0.14.52__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
vellum/client/__init__.py CHANGED
@@ -133,11 +133,15 @@ class Vellum:
133
133
  self._client_wrapper = SyncClientWrapper(
134
134
  environment=environment,
135
135
  api_key=api_key,
136
- httpx_client=httpx_client
137
- if httpx_client is not None
138
- else httpx.Client(timeout=_defaulted_timeout, follow_redirects=follow_redirects)
139
- if follow_redirects is not None
140
- else httpx.Client(timeout=_defaulted_timeout),
136
+ httpx_client=(
137
+ httpx_client
138
+ if httpx_client is not None
139
+ else (
140
+ httpx.Client(timeout=_defaulted_timeout, follow_redirects=follow_redirects)
141
+ if follow_redirects is not None
142
+ else httpx.Client(timeout=_defaulted_timeout)
143
+ )
144
+ ),
141
145
  timeout=_defaulted_timeout,
142
146
  )
143
147
  self.ad_hoc = AdHocClient(client_wrapper=self._client_wrapper)
@@ -1442,7 +1446,9 @@ class Vellum:
1442
1446
  method="POST",
1443
1447
  json={
1444
1448
  "actuals": convert_and_respect_annotation_metadata(
1445
- object_=actuals, annotation=typing.Sequence[SubmitWorkflowExecutionActualRequest], direction="write"
1449
+ object_=actuals,
1450
+ annotation=typing.Sequence[SubmitWorkflowExecutionActualRequest],
1451
+ direction="write",
1446
1452
  ),
1447
1453
  "execution_id": execution_id,
1448
1454
  "external_id": external_id,
@@ -1509,11 +1515,15 @@ class AsyncVellum:
1509
1515
  self._client_wrapper = AsyncClientWrapper(
1510
1516
  environment=environment,
1511
1517
  api_key=api_key,
1512
- httpx_client=httpx_client
1513
- if httpx_client is not None
1514
- else httpx.AsyncClient(timeout=_defaulted_timeout, follow_redirects=follow_redirects)
1515
- if follow_redirects is not None
1516
- else httpx.AsyncClient(timeout=_defaulted_timeout),
1518
+ httpx_client=(
1519
+ httpx_client
1520
+ if httpx_client is not None
1521
+ else (
1522
+ httpx.AsyncClient(timeout=_defaulted_timeout, follow_redirects=follow_redirects)
1523
+ if follow_redirects is not None
1524
+ else httpx.AsyncClient(timeout=_defaulted_timeout)
1525
+ )
1526
+ ),
1517
1527
  timeout=_defaulted_timeout,
1518
1528
  )
1519
1529
  self.ad_hoc = AsyncAdHocClient(client_wrapper=self._client_wrapper)
@@ -2906,7 +2916,9 @@ class AsyncVellum:
2906
2916
  method="POST",
2907
2917
  json={
2908
2918
  "actuals": convert_and_respect_annotation_metadata(
2909
- object_=actuals, annotation=typing.Sequence[SubmitWorkflowExecutionActualRequest], direction="write"
2919
+ object_=actuals,
2920
+ annotation=typing.Sequence[SubmitWorkflowExecutionActualRequest],
2921
+ direction="write",
2910
2922
  ),
2911
2923
  "execution_id": execution_id,
2912
2924
  "external_id": external_id,
@@ -18,7 +18,7 @@ class BaseClientWrapper:
18
18
  headers: typing.Dict[str, str] = {
19
19
  "X-Fern-Language": "Python",
20
20
  "X-Fern-SDK-Name": "vellum-ai",
21
- "X-Fern-SDK-Version": "0.14.51",
21
+ "X-Fern-SDK-Version": "0.14.52",
22
22
  }
23
23
  headers["X-API-KEY"] = self.api_key
24
24
  return headers
@@ -69,8 +69,7 @@ class WorkflowsClient:
69
69
  try:
70
70
  if 200 <= _response.status_code < 300:
71
71
  _chunk_size = request_options.get("chunk_size", None) if request_options is not None else None
72
- for _chunk in _response.iter_bytes(chunk_size=_chunk_size):
73
- yield _chunk
72
+ yield from _response.iter_bytes(chunk_size=_chunk_size)
74
73
  return
75
74
  _response.read()
76
75
  if _response.status_code == 400:
@@ -8,8 +8,8 @@ import pydantic
8
8
 
9
9
 
10
10
  class MlModelUsageWrapper(UniversalBaseModel):
11
- ml_model_usage: MlModelUsage
12
11
  ml_model_name: str
12
+ ml_model_usage: MlModelUsage
13
13
 
14
14
  if IS_PYDANTIC_V2:
15
15
  model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
@@ -194,7 +194,7 @@ def test_map_node_parallel_execution_with_workflow():
194
194
  thread_ids[self.item] = current_thread_id
195
195
 
196
196
  # Simulate work
197
- time.sleep(0.01)
197
+ time.sleep(0.1)
198
198
 
199
199
  end = time.time()
200
200
  end_str = datetime.datetime.fromtimestamp(end).strftime("%Y-%m-%d %H:%M:%S.%f")
@@ -18,6 +18,7 @@ from vellum import (
18
18
  )
19
19
  from vellum.client import ApiError, RequestOptions
20
20
  from vellum.client.types.chat_message_request import ChatMessageRequest
21
+ from vellum.client.types.prompt_exec_config import PromptExecConfig
21
22
  from vellum.client.types.prompt_settings import PromptSettings
22
23
  from vellum.client.types.rich_text_child_block import RichTextChildBlock
23
24
  from vellum.workflows.context import get_execution_context
@@ -134,6 +135,23 @@ class BaseInlinePromptNode(BasePromptNode[StateType], Generic[StateType]):
134
135
  )
135
136
 
136
137
  def _process_prompt_event_stream(self) -> Generator[BaseOutput, None, Optional[List[PromptOutput]]]:
138
+ try:
139
+ # Compile dict blocks into PromptBlocks
140
+ exec_config = PromptExecConfig.model_validate(
141
+ {
142
+ "ml_model": "",
143
+ "input_variables": [],
144
+ "parameters": {},
145
+ "blocks": self.blocks,
146
+ }
147
+ )
148
+ self.blocks = exec_config.blocks # type: ignore
149
+ except Exception:
150
+ raise NodeException(
151
+ message="Failed to compile blocks",
152
+ code=WorkflowErrorCode.INVALID_INPUTS,
153
+ )
154
+
137
155
  self._validate()
138
156
  try:
139
157
  prompt_event_stream = self._get_prompt_event_stream()
@@ -473,3 +473,149 @@ def test_inline_prompt_node__json_output_with_streaming_disabled(vellum_adhoc_pr
473
473
  request_options=mock.ANY,
474
474
  settings=PromptSettings(stream_enabled=False),
475
475
  )
476
+
477
+
478
+ def test_inline_prompt_node__dict_blocks(vellum_adhoc_prompt_client):
479
+ # GIVEN a node that has dict blocks
480
+ class MyInlinePromptNode(InlinePromptNode):
481
+ ml_model = "gpt-4o"
482
+ blocks = [
483
+ { # type: ignore
484
+ "state": None,
485
+ "blocks": [
486
+ {
487
+ "state": None,
488
+ "blocks": [
489
+ {
490
+ "text": "You are a weather expert",
491
+ "state": None,
492
+ "block_type": "PLAIN_TEXT",
493
+ "cache_config": None,
494
+ }
495
+ ],
496
+ "block_type": "RICH_TEXT",
497
+ "cache_config": None,
498
+ }
499
+ ],
500
+ "chat_role": "SYSTEM",
501
+ "block_type": "CHAT_MESSAGE",
502
+ "chat_source": None,
503
+ "cache_config": None,
504
+ "chat_message_unterminated": None,
505
+ },
506
+ { # type: ignore
507
+ "state": None,
508
+ "blocks": [
509
+ {
510
+ "state": None,
511
+ "blocks": [
512
+ {
513
+ "state": None,
514
+ "block_type": "VARIABLE",
515
+ "cache_config": None,
516
+ "input_variable": "question",
517
+ }
518
+ ],
519
+ "block_type": "RICH_TEXT",
520
+ "cache_config": None,
521
+ }
522
+ ],
523
+ "chat_role": "USER",
524
+ "block_type": "CHAT_MESSAGE",
525
+ "chat_source": None,
526
+ "cache_config": None,
527
+ "chat_message_unterminated": None,
528
+ },
529
+ VariablePromptBlock(block_type="VARIABLE", state=None, cache_config=None, input_variable="chat_history"),
530
+ ]
531
+ prompt_inputs = {
532
+ "question": "What is the weather in Tokyo?",
533
+ "chat_history": "You are a weather expert",
534
+ }
535
+ settings = PromptSettings(stream_enabled=False)
536
+
537
+ # AND a known JSON response from invoking an inline prompt
538
+ expected_json = {"result": "Hello, world!"}
539
+ expected_outputs: List[PromptOutput] = [
540
+ StringVellumValue(value=json.dumps(expected_json)),
541
+ ]
542
+
543
+ def generate_prompt_event(*args: Any, **kwargs: Any) -> AdHocExecutePromptEvent:
544
+ execution_id = str(uuid4())
545
+ return FulfilledAdHocExecutePromptEvent(
546
+ execution_id=execution_id,
547
+ outputs=expected_outputs,
548
+ )
549
+
550
+ vellum_adhoc_prompt_client.adhoc_execute_prompt.side_effect = generate_prompt_event
551
+
552
+ # WHEN the node is run
553
+ node = MyInlinePromptNode()
554
+ outputs = [o for o in node.run()]
555
+
556
+ # THEN the node should have produced the outputs we expect
557
+ results_output = outputs[0]
558
+ assert results_output.name == "results"
559
+ assert results_output.value == expected_outputs
560
+
561
+ text_output = outputs[1]
562
+ assert text_output.name == "text"
563
+ assert text_output.value == '{"result": "Hello, world!"}'
564
+
565
+
566
+ def test_inline_prompt_node__dict_blocks_error(vellum_adhoc_prompt_client):
567
+ # GIVEN a node that has an error (wrong block type)
568
+ class MyInlinePromptNode(InlinePromptNode):
569
+ ml_model = "gpt-4o"
570
+ blocks = [
571
+ { # type: ignore
572
+ "state": None,
573
+ "blocks": [
574
+ {
575
+ "state": None,
576
+ "blocks": [
577
+ {
578
+ "text": "You are a weather expert",
579
+ "state": None,
580
+ "block_type": "PLAIN_TEXT",
581
+ "cache_config": None,
582
+ }
583
+ ],
584
+ "block_type": "WRONG_BLOCK_TYPE",
585
+ "cache_config": None,
586
+ }
587
+ ],
588
+ "chat_role": "SYSTEM",
589
+ "block_type": "CHAT_MESSAGE",
590
+ "chat_source": None,
591
+ "cache_config": None,
592
+ "chat_message_unterminated": None,
593
+ },
594
+ ]
595
+ prompt_inputs = {
596
+ "question": "What is the weather in Tokyo?",
597
+ }
598
+ settings = PromptSettings(stream_enabled=False)
599
+
600
+ # AND a known JSON response from invoking an inline prompt
601
+ expected_json = {"result": "Hello, world!"}
602
+ expected_outputs: List[PromptOutput] = [
603
+ StringVellumValue(value=json.dumps(expected_json)),
604
+ ]
605
+
606
+ def generate_prompt_event(*args: Any, **kwargs: Any) -> AdHocExecutePromptEvent:
607
+ execution_id = str(uuid4())
608
+ return FulfilledAdHocExecutePromptEvent(
609
+ execution_id=execution_id,
610
+ outputs=expected_outputs,
611
+ )
612
+
613
+ vellum_adhoc_prompt_client.adhoc_execute_prompt.side_effect = generate_prompt_event
614
+
615
+ node = MyInlinePromptNode()
616
+ with pytest.raises(NodeException) as excinfo:
617
+ list(node.run())
618
+
619
+ # THEN the node should raise the correct NodeException
620
+ assert excinfo.value.code == WorkflowErrorCode.INVALID_INPUTS
621
+ assert "Failed to compile blocks" == str(excinfo.value)
@@ -1,6 +1,8 @@
1
+ import json
1
2
  from typing import ClassVar
2
3
 
3
4
  from vellum.workflows.nodes.displayable.bases import BaseSearchNode as BaseSearchNode
5
+ from vellum.workflows.state.encoder import DefaultStateEncoder
4
6
  from vellum.workflows.types import MergeBehavior
5
7
  from vellum.workflows.types.generics import StateType
6
8
 
@@ -33,6 +35,9 @@ class SearchNode(BaseSearchNode[StateType]):
33
35
  text: str
34
36
 
35
37
  def run(self) -> Outputs:
38
+ if not isinstance(self.query, str):
39
+ self.query = json.dumps(self.query, cls=DefaultStateEncoder)
40
+
36
41
  results = self._perform_search().results
37
42
  text = self.chunk_separator.join([r.text for r in results])
38
43
  return self.Outputs(results=results, text=text)
@@ -1,4 +1,7 @@
1
+ import json
2
+
1
3
  from vellum import SearchResponse, SearchResult, SearchResultDocument
4
+ from vellum.client.types.chat_message import ChatMessage
2
5
  from vellum.client.types.json_vellum_value_request import JsonVellumValueRequest
3
6
  from vellum.client.types.search_filters_request import SearchFiltersRequest
4
7
  from vellum.client.types.search_request_options_request import SearchRequestOptionsRequest
@@ -13,6 +16,7 @@ from vellum.workflows.nodes.displayable.bases.types import (
13
16
  SearchFilters,
14
17
  )
15
18
  from vellum.workflows.nodes.displayable.search_node.node import SearchNode
19
+ from vellum.workflows.state.base import BaseState
16
20
 
17
21
 
18
22
  def test_run_workflow__happy_path(vellum_client):
@@ -172,3 +176,42 @@ def test_run_workflow__happy_path__options_attribute(vellum_client):
172
176
  ),
173
177
  ),
174
178
  )
179
+
180
+
181
+ def test_run_workflow__chat_history_as_query(vellum_client):
182
+ """
183
+ Confirm that we can successfully invoke a Search node with a chat history as the query param,
184
+ backwards compatible with original workflows
185
+ """
186
+
187
+ # GIVEN a state definition with a chat history
188
+ class MyState(BaseState):
189
+ chat_history: list[ChatMessage]
190
+
191
+ # AND a Search Node that uses the chat history as the query param
192
+ class MySearchNode(SearchNode[MyState]):
193
+ query = MyState.chat_history # type: ignore[assignment]
194
+ document_index = "document_index"
195
+ limit = 1
196
+
197
+ # AND a Search request that will return a 200 ok resposne
198
+ search_response = SearchResponse(
199
+ results=[
200
+ SearchResult(
201
+ text="Search query", score="0.0", keywords=["keywords"], document=SearchResultDocument(label="label")
202
+ )
203
+ ]
204
+ )
205
+
206
+ vellum_client.search.return_value = search_response
207
+
208
+ # WHEN we run the workflow
209
+ outputs = MySearchNode(state=MyState(chat_history=[ChatMessage(role="USER", text="Hello, world!")])).run()
210
+
211
+ # THEN the workflow should have completed successfully
212
+ assert outputs.text == "Search query"
213
+
214
+ # AND the options should be as expected
215
+ assert json.loads(vellum_client.search.call_args.kwargs["query"]) == [
216
+ {"role": "USER", "text": "Hello, world!", "source": None, "content": None}
217
+ ]
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: vellum-ai
3
- Version: 0.14.51
3
+ Version: 0.14.52
4
4
  Summary:
5
5
  License: MIT
6
6
  Requires-Python: >=3.9,<4.0
@@ -1,14 +1,14 @@
1
1
  vellum_cli/CONTRIBUTING.md,sha256=FtDC7BGxSeMnwCXAUssFsAIElXtmJE-O5Z7BpolcgvI,2935
2
2
  vellum_cli/README.md,sha256=2NudRoLzWxNKqnuVy1JuQ7DerIaxWGYkrH8kMd-asIE,90
3
- vellum_cli/__init__.py,sha256=sCNP_hmFCexEWp1oQdpj8QsIUiAbo4MIlTalZEFruD8,12398
3
+ vellum_cli/__init__.py,sha256=2_6oGoVcLFUh4L63Kz4SBL4Y6XevJ70oYbg7BJ3cb5Q,12569
4
4
  vellum_cli/aliased_group.py,sha256=ugW498j0yv4ALJ8vS9MsO7ctDW7Jlir9j6nE_uHAP8c,3363
5
5
  vellum_cli/config.py,sha256=v5BmZ-t_v4Jmqd7KVuQMZF2pRI-rbMspSkVYXIRoTmI,9448
6
6
  vellum_cli/image_push.py,sha256=skFXf25ixMOX1yfcyAtii-RivYYv-_hsv-Z-bVB6m5Q,7380
7
7
  vellum_cli/init.py,sha256=WpnMXPItPmh0f0bBGIer3p-e5gu8DUGwSArT_FuoMEw,5093
8
8
  vellum_cli/logger.py,sha256=PuRFa0WCh4sAGFS5aqWB0QIYpS6nBWwPJrIXpWxugV4,1022
9
9
  vellum_cli/ping.py,sha256=p_BCCRjgPhng6JktuECtkDQLbhopt6JpmrtGoLnLJT8,1161
10
- vellum_cli/pull.py,sha256=2hSJGeqooevMb--mcvRLQ1GYT-9290cI7VdSRifzmTg,12561
11
- vellum_cli/push.py,sha256=nWHLDi_w0LXycNkVv00CiNwY469BcTNBn7NphWpCA7E,9711
10
+ vellum_cli/pull.py,sha256=M50yXzA_35N35gk1Y8KjLbXrzdRG86--XFQvEukxGtA,13371
11
+ vellum_cli/push.py,sha256=9oYmYhIWln3U0g7AstWEOA6ng5W_RthUA-Fie8FalFE,9846
12
12
  vellum_cli/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
13
13
  vellum_cli/tests/conftest.py,sha256=AFYZryKA2qnUuCPBxBKmHLFoPiE0WhBFFej9tNwSHdc,1526
14
14
  vellum_cli/tests/test_config.py,sha256=uvKGDc8BoVyT9_H0Z-g8469zVxomn6Oi3Zj-vK7O_wU,2631
@@ -16,8 +16,8 @@ vellum_cli/tests/test_image_push.py,sha256=QM-JlR_aJappvwbCLteQZZf76sd7SE1sRj3ar
16
16
  vellum_cli/tests/test_init.py,sha256=8UOc_ThfouR4ja5cCl_URuLk7ohr9JXfCnG4yka1OUQ,18754
17
17
  vellum_cli/tests/test_main.py,sha256=qDZG-aQauPwBwM6A2DIu1494n47v3pL28XakTbLGZ-k,272
18
18
  vellum_cli/tests/test_ping.py,sha256=3ucVRThEmTadlV9LrJdCCrr1Ofj3rOjG6ue0BNR2UC0,2523
19
- vellum_cli/tests/test_pull.py,sha256=iTxVbJGuehvgNt8Vp9W3Y5Bvaocfws8bl8LMGEbc_qQ,47508
20
- vellum_cli/tests/test_push.py,sha256=uNMmPG9Z0uRN6xYYMzBGZaCXJZkAHWcbcSJRJ4eQk70,31182
19
+ vellum_cli/tests/test_pull.py,sha256=7HRAhIdkVW5mR2VckEaNDjp4rt-MlIxOWMMI2XNUPE8,49814
20
+ vellum_cli/tests/test_push.py,sha256=K-TaOjU4mc-x0-ee1DNXT7yZBC0pEM-R9VY57kdMdmY,32849
21
21
  vellum_ee/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
22
22
  vellum_ee/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
23
23
  vellum_ee/workflows/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -55,7 +55,7 @@ vellum_ee/workflows/display/nodes/vellum/tests/test_code_execution_node.py,sha25
55
55
  vellum_ee/workflows/display/nodes/vellum/tests/test_error_node.py,sha256=540FoWMpJ3EN_DPjHsr9ODJWCRVcUa5hZBn-5T2GiHU,1665
56
56
  vellum_ee/workflows/display/nodes/vellum/tests/test_note_node.py,sha256=uiMB0cOxKZzos7YKnj4ef4DFa2bOvZJWIv-hfbUV6Go,1218
57
57
  vellum_ee/workflows/display/nodes/vellum/tests/test_prompt_deployment_node.py,sha256=G-qJyTNJkpqJiEZ3kCJl86CXJINLeFyf2lM0bQHCCOs,3822
58
- vellum_ee/workflows/display/nodes/vellum/tests/test_prompt_node.py,sha256=9bNpdCBUSLTUmCh04Z-kgXxJ5dKWFJ53V6xrQMEVxyU,9942
58
+ vellum_ee/workflows/display/nodes/vellum/tests/test_prompt_node.py,sha256=7GGbGhcaXkWqLoOU9dWWKFnjVVE_dId9vcKFYzpIHKg,9945
59
59
  vellum_ee/workflows/display/nodes/vellum/tests/test_retry_node.py,sha256=h93ysolmbo2viisyhRnXKHPxiDK0I_dSAbYoHFYIoO4,1953
60
60
  vellum_ee/workflows/display/nodes/vellum/tests/test_subworkflow_deployment_node.py,sha256=BUzHJgjdWnPeZxjFjHfDBKnbFjYjnbXPjc-1hne1B2Y,3965
61
61
  vellum_ee/workflows/display/nodes/vellum/tests/test_templating_node.py,sha256=LSk2gx9TpGXbAqKe8dggQW8yJZqj-Cf0EGJFeGGlEcw,3321
@@ -95,7 +95,7 @@ vellum_ee/workflows/display/tests/workflow_serialization/test_complex_terminal_n
95
95
  vellum_ee/workflows/display/types.py,sha256=i4T7ElU5b5h-nA1i3scmEhO1BqmNDc4eJDHavATD88w,2821
96
96
  vellum_ee/workflows/display/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
97
97
  vellum_ee/workflows/display/utils/exceptions.py,sha256=LSwwxCYNxFkf5XMUcFkaZKpQ13OSrI7y_bpEUwbKVk0,169
98
- vellum_ee/workflows/display/utils/expressions.py,sha256=qsKRgxm9zKFgAgjc9LqKEWP1rtdzXA1NDsXu9kyhf60,12416
98
+ vellum_ee/workflows/display/utils/expressions.py,sha256=8rwEsT99kSiCemIR7oFjlmphHoNqE5-7Fft5NdkWJ98,12419
99
99
  vellum_ee/workflows/display/utils/registry.py,sha256=fWIm5Jj-10gNFjgn34iBu4RWv3Vd15ijtSN0V97bpW8,1513
100
100
  vellum_ee/workflows/display/utils/vellum.py,sha256=mtoXmSYwR7rvrq-d6CzCW_auaJXTct0Mi1F0xpRCiNQ,5627
101
101
  vellum_ee/workflows/display/vellum.py,sha256=o7mq_vk2Yapu9DDKRz5l76h8EmCAypWGQYe6pryrbB8,3576
@@ -130,10 +130,10 @@ vellum_ee/workflows/tests/test_server.py,sha256=SsOkS6sGO7uGC4mxvk4iv8AtcXs058P9
130
130
  vellum_ee/workflows/tests/test_virtual_files.py,sha256=TJEcMR0v2S8CkloXNmCHA0QW0K6pYNGaIjraJz7sFvY,2762
131
131
  vellum/__init__.py,sha256=Hqfl49WZJzzqOKzVsTGi-j9twIqFOoRmACJsrEsjL44,41918
132
132
  vellum/client/README.md,sha256=qmaVIP42MnxAu8jV7u-CsgVFfs3-pHQODrXdZdFxtaw,4749
133
- vellum/client/__init__.py,sha256=PEnFl7LbXQcvAi3bVN2qyt5xm2FtVtq7xWKkcWM3Tg4,120166
133
+ vellum/client/__init__.py,sha256=nv_MItkRFOTsTDcray01bea7NvO-P9bAj8lnUfTbxOo,120440
134
134
  vellum/client/core/__init__.py,sha256=SQ85PF84B9MuKnBwHNHWemSGuy-g_515gFYNFhvEE0I,1438
135
135
  vellum/client/core/api_error.py,sha256=RE8LELok2QCjABadECTvtDp7qejA1VmINCh6TbqPwSE,426
136
- vellum/client/core/client_wrapper.py,sha256=0O1XpnpyuPVD86JFBVwkbpWYMkAmrvehoYpQg2THQRM,1869
136
+ vellum/client/core/client_wrapper.py,sha256=P667a77GUeHTNshBFAxTS1VkSHNO_joyM4HtqkCS-8o,1869
137
137
  vellum/client/core/datetime_utils.py,sha256=nBys2IsYrhPdszxGKCNRPSOCwa-5DWOHG95FB8G9PKo,1047
138
138
  vellum/client/core/file.py,sha256=d4NNbX8XvXP32z8KpK2Xovv33nFfruIrpz0QWxlgpZk,2663
139
139
  vellum/client/core/http_client.py,sha256=Z77OIxIbL4OAB2IDqjRq_sYa5yNYAWfmdhdCSSvh6Y4,19552
@@ -196,7 +196,7 @@ vellum/client/resources/workflow_sandboxes/client.py,sha256=XfMcbvSTF1_iTGIXsk1F
196
196
  vellum/client/resources/workflow_sandboxes/types/__init__.py,sha256=EaGVRU1w6kJiiHrbZOeEa0c3ggjfgv_jBqsyOkCRWOI,212
197
197
  vellum/client/resources/workflow_sandboxes/types/list_workflow_sandbox_examples_request_tag.py,sha256=TEwWit20W3X-zWPPLAhmUG05UudG9gaBSJ4Q4-rNJws,188
198
198
  vellum/client/resources/workflows/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
199
- vellum/client/resources/workflows/client.py,sha256=uDC61aybVmgxPiLKuLpAB-fK3sagnFFX06zzmQngInA,11285
199
+ vellum/client/resources/workflows/client.py,sha256=OwpMojUEZ6DdtqW5Q-165SCthYFbzt3IBVKHR5-4h-0,11244
200
200
  vellum/client/resources/workspace_secrets/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
201
201
  vellum/client/resources/workspace_secrets/client.py,sha256=zlBdbeTP6sqvtyl_DlrpfG-W5hSP7tJ1NYLSygi4CLU,8205
202
202
  vellum/client/resources/workspaces/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
@@ -404,7 +404,7 @@ vellum/client/types/metric_definition_input.py,sha256=4nmwpPqbeNQYCzLkXCkc-FGV5K
404
404
  vellum/client/types/metric_node_result.py,sha256=YdKq1DZiBD1RBtjyMejImylv3BqrwY8B_UF4Ij-6_64,660
405
405
  vellum/client/types/ml_model_read.py,sha256=Vr5KjaS2Tca0GXsltfSYQpuyGYpgIahPEFfS6HfFGSo,706
406
406
  vellum/client/types/ml_model_usage.py,sha256=WcZ2F1hfxyTwe-spOVwv-qJYDjs4hf9sn7BF2abawPo,910
407
- vellum/client/types/ml_model_usage_wrapper.py,sha256=K0V5O-NqB12FAKUcBb9r3b25u4x-4jFcef2-V1PQhMU,645
407
+ vellum/client/types/ml_model_usage_wrapper.py,sha256=Vi7urVmTn1E_aZV6TxnW-qjDayRv7A_6JDk84KqAIa0,645
408
408
  vellum/client/types/named_scenario_input_chat_history_variable_value_request.py,sha256=aVZmAxu-47c34NyhSkfi9tQqIPy29cdJ7Pb4MIgKeNw,862
409
409
  vellum/client/types/named_scenario_input_json_variable_value_request.py,sha256=UgnKv70zFviv1kl4nM7aM7IFA-7xyDOtglW4Y3GBZ28,757
410
410
  vellum/client/types/named_scenario_input_request.py,sha256=Pi8l377OHvKBwvPu9slZ1omf_NJ9S1mCQ5Wr-Ux5KVg,611
@@ -1558,7 +1558,7 @@ vellum/workflows/nodes/core/inline_subworkflow_node/tests/test_node.py,sha256=kU
1558
1558
  vellum/workflows/nodes/core/map_node/__init__.py,sha256=MXpZYmGfhsMJHqqlpd64WiJRtbAtAMQz-_3fCU_cLV0,56
1559
1559
  vellum/workflows/nodes/core/map_node/node.py,sha256=rbF7fLAU0vUDEpgtWqeQTZFlhWOhJw38tgxWJ6exud8,9313
1560
1560
  vellum/workflows/nodes/core/map_node/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
1561
- vellum/workflows/nodes/core/map_node/tests/test_node.py,sha256=f3lSPYAU1vJUCLCujNOo0EAeBbOM9hnY5A1Wy58korc,6905
1561
+ vellum/workflows/nodes/core/map_node/tests/test_node.py,sha256=rf7CCDtjHxoPKeEtm9a8v_MNvkvu5UThH4xRXYrdEl8,6904
1562
1562
  vellum/workflows/nodes/core/retry_node/__init__.py,sha256=lN2bIy5a3Uzhs_FYCrooADyYU6ZGShtvLKFWpelwPvo,60
1563
1563
  vellum/workflows/nodes/core/retry_node/node.py,sha256=abtGvinLfi1tKqYIsWQKZtBUisF2Qw2yT1YoPw9cVk4,5297
1564
1564
  vellum/workflows/nodes/core/retry_node/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -1582,9 +1582,9 @@ vellum/workflows/nodes/displayable/bases/base_prompt_node/__init__.py,sha256=Org
1582
1582
  vellum/workflows/nodes/displayable/bases/base_prompt_node/node.py,sha256=amBXi7Tv50AbGLhfWbwX83PlOdV1XyYRyQmpa6_afE4,3511
1583
1583
  vellum/workflows/nodes/displayable/bases/inline_prompt_node/__init__.py,sha256=Hl35IAoepRpE-j4cALaXVJIYTYOF3qszyVbxTj4kS1s,82
1584
1584
  vellum/workflows/nodes/displayable/bases/inline_prompt_node/constants.py,sha256=fnjiRWLoRlC4Puo5oQcpZD5Hd-EesxsAo9l5tGAkpZQ,270
1585
- vellum/workflows/nodes/displayable/bases/inline_prompt_node/node.py,sha256=eG_buxb4DlgvBzewseQDiUu7Vc2uaoOariVVsWt1068,10579
1585
+ vellum/workflows/nodes/displayable/bases/inline_prompt_node/node.py,sha256=87Z4n1gsI3LMm0C4TaJ7nfykY8zKaKigOg7Da65E8YQ,11223
1586
1586
  vellum/workflows/nodes/displayable/bases/inline_prompt_node/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
1587
- vellum/workflows/nodes/displayable/bases/inline_prompt_node/tests/test_inline_prompt_node.py,sha256=inTS8OyGe_62rV4S77HwhqhlTAeJgZlqieeGhdK_ecs,16030
1587
+ vellum/workflows/nodes/displayable/bases/inline_prompt_node/tests/test_inline_prompt_node.py,sha256=5CNag1_aEFZbCL0nrOC5e1L-t90-4rp2xDwh0h52hVI,21407
1588
1588
  vellum/workflows/nodes/displayable/bases/prompt_deployment_node.py,sha256=T99UWACTD9ytVDVHa6W2go00V7HNwDxOyBFyMM2GnhQ,9567
1589
1589
  vellum/workflows/nodes/displayable/bases/search_node.py,sha256=3UtbqY3QO4kzfJHbmUNZGnEEfJmaoiF892u8H6TGjp8,5381
1590
1590
  vellum/workflows/nodes/displayable/bases/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -1623,9 +1623,9 @@ vellum/workflows/nodes/displayable/prompt_deployment_node/node.py,sha256=eUiQYdq
1623
1623
  vellum/workflows/nodes/displayable/prompt_deployment_node/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
1624
1624
  vellum/workflows/nodes/displayable/prompt_deployment_node/tests/test_node.py,sha256=c_nuuqrwiIjgj4qIbVypfDuOc-3TlgO6CbXFqQl2Nqw,19725
1625
1625
  vellum/workflows/nodes/displayable/search_node/__init__.py,sha256=hpBpvbrDYf43DElRZFLzieSn8weXiwNiiNOJurERQbs,62
1626
- vellum/workflows/nodes/displayable/search_node/node.py,sha256=_VHHuTNN4icZBgc7O5U9SVKrv1zgKipU72fOtxTyrQU,1453
1626
+ vellum/workflows/nodes/displayable/search_node/node.py,sha256=vUTDyurYKw6KLABuVml_N_fbnNBDv5dBtejdoj82hWs,1646
1627
1627
  vellum/workflows/nodes/displayable/search_node/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
1628
- vellum/workflows/nodes/displayable/search_node/tests/test_node.py,sha256=2-QCV7Vk_-YMls33p0GOUtCv3f2uPNZCjkB2CRjek7o,6562
1628
+ vellum/workflows/nodes/displayable/search_node/tests/test_node.py,sha256=OserVd6jPe6t49MQF0cxphI2irBLaC_GceMr0acFqoY,8075
1629
1629
  vellum/workflows/nodes/displayable/subworkflow_deployment_node/__init__.py,sha256=9yYM6001YZeqI1VOk1QuEM_yrffk_EdsO7qaPzINKds,92
1630
1630
  vellum/workflows/nodes/displayable/subworkflow_deployment_node/node.py,sha256=biv1H4gIX4B4VMFJ3Rp82NjE65GhmzLq7pREL0ozB2E,9484
1631
1631
  vellum/workflows/nodes/displayable/subworkflow_deployment_node/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -1707,8 +1707,8 @@ vellum/workflows/workflows/event_filters.py,sha256=GSxIgwrX26a1Smfd-6yss2abGCnad
1707
1707
  vellum/workflows/workflows/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
1708
1708
  vellum/workflows/workflows/tests/test_base_workflow.py,sha256=8P5YIsNMO78_CR1NNK6wkEdkMB4b3Q_Ni1qxh78OnHo,20481
1709
1709
  vellum/workflows/workflows/tests/test_context.py,sha256=VJBUcyWVtMa_lE5KxdhgMu0WYNYnUQUDvTF7qm89hJ0,2333
1710
- vellum_ai-0.14.51.dist-info/LICENSE,sha256=hOypcdt481qGNISA784bnAGWAE6tyIf9gc2E78mYC3E,1574
1711
- vellum_ai-0.14.51.dist-info/METADATA,sha256=ngfPkauTzHEOvea_irMQiqBgZSWEPIppPpKxA3VmlA0,5484
1712
- vellum_ai-0.14.51.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
1713
- vellum_ai-0.14.51.dist-info/entry_points.txt,sha256=HCH4yc_V3J_nDv3qJzZ_nYS8llCHZViCDP1ejgCc5Ak,42
1714
- vellum_ai-0.14.51.dist-info/RECORD,,
1710
+ vellum_ai-0.14.52.dist-info/LICENSE,sha256=hOypcdt481qGNISA784bnAGWAE6tyIf9gc2E78mYC3E,1574
1711
+ vellum_ai-0.14.52.dist-info/METADATA,sha256=-vGzZDBmw_wd9r-qwKB7WAO8eJvsTB0_OGcEFjPwVU0,5484
1712
+ vellum_ai-0.14.52.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
1713
+ vellum_ai-0.14.52.dist-info/entry_points.txt,sha256=HCH4yc_V3J_nDv3qJzZ_nYS8llCHZViCDP1ejgCc5Ak,42
1714
+ vellum_ai-0.14.52.dist-info/RECORD,,
vellum_cli/__init__.py CHANGED
@@ -259,6 +259,11 @@ Helpful for running and debugging workflows locally.""",
259
259
  help="""Directory to pull the workflow into. If not specified, \
260
260
  the workflow will be pulled into the current working directory.""",
261
261
  )
262
+ @click.option(
263
+ "--workspace",
264
+ type=str,
265
+ help="The specific Workspace config to use when pulling",
266
+ )
262
267
  def workflows_pull(
263
268
  module: Optional[str],
264
269
  include_json: Optional[bool],
@@ -268,6 +273,7 @@ def workflows_pull(
268
273
  strict: Optional[bool],
269
274
  include_sandbox: Optional[bool],
270
275
  target_directory: Optional[str],
276
+ workspace: Optional[str],
271
277
  ) -> None:
272
278
  """
273
279
  Pull Workflows from Vellum. If a module is provided, only the Workflow for that module will be pulled.
@@ -283,6 +289,7 @@ def workflows_pull(
283
289
  strict=strict,
284
290
  include_sandbox=include_sandbox,
285
291
  target_directory=target_directory,
292
+ workspace=workspace,
286
293
  )
287
294
 
288
295
 
vellum_cli/pull.py CHANGED
@@ -13,7 +13,13 @@ from vellum.client.core.api_error import ApiError
13
13
  from vellum.client.core.pydantic_utilities import UniversalBaseModel
14
14
  from vellum.utils.uuid import is_valid_uuid
15
15
  from vellum.workflows.vellum_client import create_vellum_client
16
- from vellum_cli.config import VellumCliConfig, WorkflowConfig, WorkflowDeploymentConfig, load_vellum_cli_config
16
+ from vellum_cli.config import (
17
+ DEFAULT_WORKSPACE_CONFIG,
18
+ VellumCliConfig,
19
+ WorkflowConfig,
20
+ WorkflowDeploymentConfig,
21
+ load_vellum_cli_config,
22
+ )
17
23
  from vellum_cli.logger import load_cli_logger
18
24
 
19
25
  ERROR_LOG_FILE_NAME = "error.log"
@@ -43,6 +49,7 @@ def _resolve_workflow_config(
43
49
  module: Optional[str] = None,
44
50
  workflow_sandbox_id: Optional[str] = None,
45
51
  workflow_deployment: Optional[str] = None,
52
+ workspace: Optional[str] = None,
46
53
  ) -> WorkflowConfigResolutionResult:
47
54
  if workflow_sandbox_id and workflow_deployment:
48
55
  raise ValueError("Cannot specify both workflow_sandbox_id and workflow_deployment")
@@ -53,6 +60,7 @@ def _resolve_workflow_config(
53
60
  workflow_config = WorkflowConfig(
54
61
  workflow_sandbox_id=workflow_sandbox_id,
55
62
  module=module,
63
+ workspace=workspace or DEFAULT_WORKSPACE_CONFIG.name,
56
64
  )
57
65
  config.workflows.append(workflow_config)
58
66
  return WorkflowConfigResolutionResult(
@@ -132,8 +140,9 @@ def pull_command(
132
140
  strict: Optional[bool] = None,
133
141
  include_sandbox: Optional[bool] = None,
134
142
  target_directory: Optional[str] = None,
143
+ workspace: Optional[str] = None,
135
144
  ) -> None:
136
- load_dotenv()
145
+ load_dotenv(dotenv_path=os.path.join(os.getcwd(), ".env"))
137
146
  logger = load_cli_logger()
138
147
  config = load_vellum_cli_config()
139
148
 
@@ -142,6 +151,7 @@ def pull_command(
142
151
  module=module,
143
152
  workflow_sandbox_id=workflow_sandbox_id,
144
153
  workflow_deployment=workflow_deployment,
154
+ workspace=workspace,
145
155
  )
146
156
 
147
157
  workflow_config = workflow_config_result.workflow_config
@@ -157,7 +167,20 @@ def pull_command(
157
167
  else:
158
168
  logger.info(f"Pulling workflow from {pk}...")
159
169
 
160
- client = create_vellum_client()
170
+ resolved_workspace = workspace or workflow_config.workspace or DEFAULT_WORKSPACE_CONFIG.name
171
+ workspace_config = (
172
+ next((w for w in config.workspaces if w.name == resolved_workspace), DEFAULT_WORKSPACE_CONFIG)
173
+ if workspace
174
+ else DEFAULT_WORKSPACE_CONFIG
175
+ )
176
+ api_key = os.getenv(workspace_config.api_key)
177
+ if not api_key:
178
+ raise ValueError(f"No API key value found in environment for workspace '{workspace_config.name}'.")
179
+
180
+ client = create_vellum_client(
181
+ api_key=api_key,
182
+ api_url=workspace_config.api_url,
183
+ )
161
184
  query_parameters = {}
162
185
 
163
186
  if include_json:
vellum_cli/push.py CHANGED
@@ -45,8 +45,11 @@ def push_command(
45
45
  else config.workflows
46
46
  )
47
47
 
48
- if len(workflow_configs) > 1 and workspace:
49
- workflow_configs = [w for w in workflow_configs if w.workspace == workspace]
48
+ if len(workflow_configs) > 1:
49
+ if workspace:
50
+ workflow_configs = [w for w in workflow_configs if w.workspace == workspace]
51
+ else:
52
+ workflow_configs = [w for w in workflow_configs if w.workspace == DEFAULT_WORKSPACE_CONFIG.name]
50
53
 
51
54
  if len(workflow_configs) == 0:
52
55
  if module and module_exists(module):
@@ -3,6 +3,7 @@ import io
3
3
  import json
4
4
  import os
5
5
  import tempfile
6
+ from unittest import mock
6
7
  from uuid import uuid4
7
8
  import zipfile
8
9
 
@@ -1284,3 +1285,74 @@ def test_pull__workflow_deployment_with_name_and_id(vellum_client):
1284
1285
  assert lock_data["workflows"][0]["deployments"][0]["label"] == deployment_label
1285
1286
 
1286
1287
  os.chdir(current_dir)
1288
+
1289
+
1290
+ def test_pull__workspace_option__uses_different_api_key(mock_module, vellum_client_class):
1291
+ # GIVEN a module and workflow_sandbox_id
1292
+ temp_dir = mock_module.temp_dir
1293
+ module = mock_module.module
1294
+ set_pyproject_toml = mock_module.set_pyproject_toml
1295
+ workflow_sandbox_id = str(uuid4())
1296
+
1297
+ # AND a different workspace is set in the pyproject.toml
1298
+ set_pyproject_toml(
1299
+ {
1300
+ "workflows": [],
1301
+ "workspaces": [
1302
+ {
1303
+ "name": "my_other_workspace",
1304
+ "api_key": "MY_OTHER_VELLUM_API_KEY",
1305
+ }
1306
+ ],
1307
+ }
1308
+ )
1309
+
1310
+ # AND the .env file has the other api key stored
1311
+ with open(os.path.join(temp_dir, ".env"), "w") as f:
1312
+ f.write(
1313
+ """
1314
+ VELLUM_API_KEY=abcdef123456
1315
+ MY_OTHER_VELLUM_API_KEY=aaabbbcccddd
1316
+ """
1317
+ )
1318
+
1319
+ # AND the workflow pull API call returns a zip file
1320
+ vellum_client_class.return_value.workflows.pull.return_value = iter(
1321
+ [_zip_file_map({"workflow.py": "print('hello')"})]
1322
+ )
1323
+
1324
+ # WHEN calling `vellum pull` with --workspace
1325
+ runner = CliRunner()
1326
+ result = runner.invoke(
1327
+ cli_main,
1328
+ [
1329
+ "workflows",
1330
+ "pull",
1331
+ module,
1332
+ "--workflow-sandbox-id",
1333
+ workflow_sandbox_id,
1334
+ "--workspace",
1335
+ "my_other_workspace",
1336
+ ],
1337
+ )
1338
+
1339
+ # THEN it should succeed
1340
+ assert result.exit_code == 0, result.output
1341
+
1342
+ # AND we should have called the vellum client with the correct api key
1343
+ vellum_client_class.assert_called_once_with(
1344
+ api_key="aaabbbcccddd",
1345
+ environment=mock.ANY,
1346
+ )
1347
+
1348
+ # AND the vellum lock file should have been updated with the correct workspace
1349
+ with open(os.path.join(temp_dir, "vellum.lock.json")) as f:
1350
+ lock_file_content = json.load(f)
1351
+ assert lock_file_content["workflows"][0]["workspace"] == "my_other_workspace"
1352
+ assert lock_file_content["workflows"][0]["workflow_sandbox_id"] == workflow_sandbox_id
1353
+
1354
+ # AND the workflow.py file is written as expected
1355
+ workflow_py = os.path.join(temp_dir, *module.split("."), "workflow.py")
1356
+ assert os.path.exists(workflow_py)
1357
+ with open(workflow_py) as f:
1358
+ assert f.read() == "print('hello')"
@@ -870,3 +870,45 @@ def test_push__create_new_config_for_existing_module(mock_module, vellum_client)
870
870
  new_config = new_configs[0]
871
871
  assert new_config["workflow_sandbox_id"] == new_workflow_sandbox_id
872
872
  assert new_config["workspace"] == "default"
873
+
874
+
875
+ def test_push__use_default_workspace_if_not_specified__multiple_workflows_configured(mock_module, vellum_client):
876
+ # GIVEN a config with a workspace configured
877
+ temp_dir = mock_module.temp_dir
878
+ module = mock_module.module
879
+ workflow_sandbox_id = str(uuid4())
880
+ mock_module.set_pyproject_toml(
881
+ {
882
+ "workspaces": [
883
+ {"name": "my_other_workspace"},
884
+ ],
885
+ "workflows": [
886
+ {"module": module, "workflow_sandbox_id": workflow_sandbox_id, "workspace": "default"},
887
+ {"module": module, "workflow_sandbox_id": str(uuid4()), "workspace": "my_other_workspace"},
888
+ ],
889
+ }
890
+ )
891
+
892
+ # AND a workflow exists in the module successfully
893
+ _ensure_workflow_py(temp_dir, module)
894
+
895
+ # AND the push API call returns successfully
896
+ vellum_client.workflows.push.return_value = WorkflowPushResponse(
897
+ workflow_sandbox_id=workflow_sandbox_id,
898
+ )
899
+
900
+ # WHEN calling `vellum push` with a module without a workspace specified
901
+ runner = CliRunner()
902
+ result = runner.invoke(cli_main, ["workflows", "push", module])
903
+
904
+ # THEN it should succeed
905
+ assert result.exit_code == 0, result.output
906
+
907
+ # AND check that lockfile should maintain that this workflow is using the default workspace
908
+ with open(os.path.join(temp_dir, "vellum.lock.json")) as f:
909
+ lock_file_content = json.load(f)
910
+ configs = [w for w in lock_file_content["workflows"] if w["module"] == module]
911
+ assert len(configs) == 2
912
+ config = configs[0]
913
+ assert config["workflow_sandbox_id"] == workflow_sandbox_id
914
+ assert config["workspace"] == "default"
@@ -198,7 +198,7 @@ def test_serialize_node__prompt_inputs__state_reference():
198
198
  {
199
199
  "key": "foo",
200
200
  "value": {
201
- "type": "STATE_VALUE",
201
+ "type": "WORKFLOW_STATE",
202
202
  "state_variable_id": "45649791-c642-4405-aff9-a1fafd780ea1",
203
203
  },
204
204
  },
@@ -207,7 +207,7 @@ def serialize_value(display_context: "WorkflowDisplayContext", value: Any) -> Js
207
207
  if isinstance(value, StateValueReference):
208
208
  state_value_display = display_context.global_state_value_displays[value]
209
209
  return {
210
- "type": "STATE_VALUE",
210
+ "type": "WORKFLOW_STATE",
211
211
  "state_variable_id": str(state_value_display.id),
212
212
  }
213
213