vellum-ai 0.12.7__py3-none-any.whl → 0.12.8__py3-none-any.whl

Sign up to get free protection for your applications and to get access to all the features.
Files changed (27) hide show
  1. vellum/client/core/client_wrapper.py +1 -1
  2. vellum/evaluations/resources.py +1 -1
  3. vellum/prompts/__init__.py +0 -0
  4. vellum/prompts/blocks/__init__.py +0 -0
  5. vellum/prompts/blocks/compilation.py +190 -0
  6. vellum/prompts/blocks/exceptions.py +2 -0
  7. vellum/prompts/blocks/tests/__init__.py +0 -0
  8. vellum/prompts/blocks/tests/test_compilation.py +110 -0
  9. vellum/prompts/blocks/types.py +36 -0
  10. vellum/utils/__init__.py +0 -0
  11. vellum/utils/templating/__init__.py +0 -0
  12. vellum/utils/templating/constants.py +28 -0
  13. vellum/{workflows/nodes/core/templating_node → utils/templating}/render.py +1 -1
  14. vellum/workflows/nodes/core/templating_node/node.py +5 -31
  15. vellum/workflows/nodes/experimental/README.md +6 -0
  16. vellum/workflows/nodes/experimental/__init__.py +0 -0
  17. vellum/workflows/nodes/experimental/openai_chat_completion_node/__init__.py +5 -0
  18. vellum/workflows/nodes/experimental/openai_chat_completion_node/node.py +260 -0
  19. vellum/workflows/sandbox.py +3 -0
  20. {vellum_ai-0.12.7.dist-info → vellum_ai-0.12.8.dist-info}/METADATA +2 -1
  21. {vellum_ai-0.12.7.dist-info → vellum_ai-0.12.8.dist-info}/RECORD +27 -13
  22. /vellum/{workflows/nodes/core/templating_node → utils/templating}/custom_filters.py +0 -0
  23. /vellum/{workflows/nodes/core/templating_node → utils/templating}/exceptions.py +0 -0
  24. /vellum/{evaluations/utils → utils}/typing.py +0 -0
  25. {vellum_ai-0.12.7.dist-info → vellum_ai-0.12.8.dist-info}/LICENSE +0 -0
  26. {vellum_ai-0.12.7.dist-info → vellum_ai-0.12.8.dist-info}/WHEEL +0 -0
  27. {vellum_ai-0.12.7.dist-info → vellum_ai-0.12.8.dist-info}/entry_points.txt +0 -0
@@ -18,7 +18,7 @@ class BaseClientWrapper:
18
18
  headers: typing.Dict[str, str] = {
19
19
  "X-Fern-Language": "Python",
20
20
  "X-Fern-SDK-Name": "vellum-ai",
21
- "X-Fern-SDK-Version": "0.12.7",
21
+ "X-Fern-SDK-Version": "0.12.8",
22
22
  }
23
23
  headers["X_API_KEY"] = self.api_key
24
24
  return headers
@@ -12,7 +12,6 @@ from vellum.evaluations.constants import DEFAULT_MAX_POLLING_DURATION_MS, DEFAUL
12
12
  from vellum.evaluations.exceptions import TestSuiteRunResultsException
13
13
  from vellum.evaluations.utils.env import get_api_key
14
14
  from vellum.evaluations.utils.paginator import PaginatedResults, get_all_results
15
- from vellum.evaluations.utils.typing import cast_not_optional
16
15
  from vellum.evaluations.utils.uuid import is_valid_uuid
17
16
  from vellum.types import (
18
17
  ExternalTestCaseExecutionRequest,
@@ -24,6 +23,7 @@ from vellum.types import (
24
23
  TestSuiteRunMetricOutput,
25
24
  TestSuiteRunState,
26
25
  )
26
+ from vellum.utils.typing import cast_not_optional
27
27
 
28
28
  logger = logging.getLogger(__name__)
29
29
 
File without changes
File without changes
@@ -0,0 +1,190 @@
1
+ import json
2
+ from typing import Optional, cast
3
+
4
+ from vellum import (
5
+ ChatMessage,
6
+ JsonVellumValue,
7
+ PromptBlock,
8
+ PromptRequestInput,
9
+ RichTextPromptBlock,
10
+ StringVellumValue,
11
+ VellumVariable,
12
+ )
13
+ from vellum.prompts.blocks.exceptions import PromptCompilationError
14
+ from vellum.prompts.blocks.types import CompiledChatMessagePromptBlock, CompiledPromptBlock, CompiledValuePromptBlock
15
+ from vellum.utils.templating.constants import DEFAULT_JINJA_CUSTOM_FILTERS
16
+ from vellum.utils.templating.render import render_sandboxed_jinja_template
17
+ from vellum.utils.typing import cast_not_optional
18
+
19
+
20
+ def compile_prompt_blocks(
21
+ blocks: list[PromptBlock],
22
+ inputs: list[PromptRequestInput],
23
+ input_variables: list[VellumVariable],
24
+ ) -> list[CompiledPromptBlock]:
25
+ """Compiles a list of Prompt Blocks, performing all variable substitutions and Jinja templating needed."""
26
+
27
+ sanitized_inputs = _sanitize_inputs(inputs)
28
+
29
+ compiled_blocks: list[CompiledPromptBlock] = []
30
+ for block in blocks:
31
+ if block.state == "DISABLED":
32
+ continue
33
+
34
+ if block.block_type == "CHAT_MESSAGE":
35
+ chat_role = cast_not_optional(block.chat_role)
36
+ inner_blocks = cast_not_optional(block.blocks)
37
+ unterminated = block.chat_message_unterminated or False
38
+
39
+ inner_prompt_blocks = compile_prompt_blocks(
40
+ inner_blocks,
41
+ sanitized_inputs,
42
+ input_variables,
43
+ )
44
+ if not inner_prompt_blocks:
45
+ continue
46
+
47
+ compiled_blocks.append(
48
+ CompiledChatMessagePromptBlock(
49
+ role=chat_role,
50
+ unterminated=unterminated,
51
+ source=block.chat_source,
52
+ blocks=[inner for inner in inner_prompt_blocks if inner.block_type == "VALUE"],
53
+ cache_config=block.cache_config,
54
+ )
55
+ )
56
+
57
+ elif block.block_type == "JINJA":
58
+ if block.template is None:
59
+ continue
60
+
61
+ rendered_template = render_sandboxed_jinja_template(
62
+ template=block.template,
63
+ input_values={input_.key: input_.value for input_ in sanitized_inputs},
64
+ jinja_custom_filters=DEFAULT_JINJA_CUSTOM_FILTERS,
65
+ jinja_globals=DEFAULT_JINJA_CUSTOM_FILTERS,
66
+ )
67
+ jinja_content = StringVellumValue(value=rendered_template)
68
+
69
+ compiled_blocks.append(
70
+ CompiledValuePromptBlock(
71
+ content=jinja_content,
72
+ cache_config=block.cache_config,
73
+ )
74
+ )
75
+
76
+ elif block.block_type == "VARIABLE":
77
+ compiled_input: Optional[PromptRequestInput] = next(
78
+ (input_ for input_ in sanitized_inputs if input_.key == str(block.input_variable)), None
79
+ )
80
+ if compiled_input is None:
81
+ raise PromptCompilationError(f"Input variable '{block.input_variable}' not found")
82
+
83
+ if compiled_input.type == "CHAT_HISTORY":
84
+ history = cast(list[ChatMessage], compiled_input.value)
85
+ chat_message_blocks = _compile_chat_messages_as_prompt_blocks(history)
86
+ compiled_blocks.extend(chat_message_blocks)
87
+ continue
88
+
89
+ if compiled_input.type == "STRING":
90
+ compiled_blocks.append(
91
+ CompiledValuePromptBlock(
92
+ content=StringVellumValue(value=compiled_input.value),
93
+ cache_config=block.cache_config,
94
+ )
95
+ )
96
+ elif compiled_input == "JSON":
97
+ compiled_blocks.append(
98
+ CompiledValuePromptBlock(
99
+ content=JsonVellumValue(value=compiled_input.value),
100
+ cache_config=block.cache_config,
101
+ )
102
+ )
103
+ elif compiled_input.type == "CHAT_HISTORY":
104
+ chat_message_blocks = _compile_chat_messages_as_prompt_blocks(compiled_input.value)
105
+ compiled_blocks.extend(chat_message_blocks)
106
+ else:
107
+ raise ValueError(f"Invalid input type for variable block: {compiled_input.type}")
108
+
109
+ elif block.block_type == "RICH_TEXT":
110
+ value_block = _compile_rich_text_block_as_value_block(block=block, inputs=sanitized_inputs)
111
+ compiled_blocks.append(value_block)
112
+
113
+ elif block.block_type == "FUNCTION_DEFINITION":
114
+ raise ValueError("Function definitions shouldn't go through compilation process")
115
+ else:
116
+ raise ValueError(f"Unknown block_type: {block.block_type}")
117
+
118
+ return compiled_blocks
119
+
120
+
121
+ def _compile_chat_messages_as_prompt_blocks(chat_messages: list[ChatMessage]) -> list[CompiledChatMessagePromptBlock]:
122
+ blocks: list[CompiledChatMessagePromptBlock] = []
123
+ for chat_message in chat_messages:
124
+ if chat_message.content is None:
125
+ continue
126
+
127
+ chat_message_blocks = (
128
+ [
129
+ CompiledValuePromptBlock(
130
+ content=item,
131
+ )
132
+ for item in chat_message.content.value
133
+ ]
134
+ if chat_message.content.type == "ARRAY"
135
+ else [
136
+ CompiledValuePromptBlock(
137
+ content=chat_message.content,
138
+ )
139
+ ]
140
+ )
141
+
142
+ blocks.append(
143
+ CompiledChatMessagePromptBlock(
144
+ role=chat_message.role,
145
+ unterminated=False,
146
+ blocks=chat_message_blocks,
147
+ source=chat_message.source,
148
+ )
149
+ )
150
+
151
+ return blocks
152
+
153
+
154
+ def _compile_rich_text_block_as_value_block(
155
+ block: RichTextPromptBlock,
156
+ inputs: list[PromptRequestInput],
157
+ ) -> CompiledValuePromptBlock:
158
+ value: str = ""
159
+ for child_block in block.blocks:
160
+ if child_block.block_type == "PLAIN_TEXT":
161
+ value += child_block.text
162
+ elif child_block.block_type == "VARIABLE":
163
+ variable = next((input_ for input_ in inputs if input_.key == str(child_block.input_variable)), None)
164
+ if variable is None:
165
+ raise PromptCompilationError(f"Input variable '{child_block.input_variable}' not found")
166
+ elif variable.type == "STRING":
167
+ value += str(variable.value)
168
+ elif variable.type == "JSON":
169
+ value += json.dumps(variable.value, indent=4)
170
+ else:
171
+ raise PromptCompilationError(
172
+ f"Input variable '{child_block.input_variable}' must be of type STRING or JSON"
173
+ )
174
+ else:
175
+ raise ValueError(f"Invalid child block_type for RICH_TEXT: {child_block.block_type}")
176
+
177
+ return CompiledValuePromptBlock(content=StringVellumValue(value=value), cache_config=block.cache_config)
178
+
179
+
180
+ def _sanitize_inputs(inputs: list[PromptRequestInput]) -> list[PromptRequestInput]:
181
+ sanitized_inputs: list[PromptRequestInput] = []
182
+ for input_ in inputs:
183
+ if input_.type == "CHAT_HISTORY" and input_.value is None:
184
+ sanitized_inputs.append(input_.model_copy(update={"value": cast(list[ChatMessage], [])}))
185
+ elif input_.type == "STRING" and input_.value is None:
186
+ sanitized_inputs.append(input_.model_copy(update={"value": ""}))
187
+ else:
188
+ sanitized_inputs.append(input_)
189
+
190
+ return sanitized_inputs
@@ -0,0 +1,2 @@
1
+ class PromptCompilationError(Exception):
2
+ pass
File without changes
@@ -0,0 +1,110 @@
1
+ import pytest
2
+
3
+ from vellum import (
4
+ ChatMessagePromptBlock,
5
+ JinjaPromptBlock,
6
+ PlainTextPromptBlock,
7
+ PromptRequestStringInput,
8
+ RichTextPromptBlock,
9
+ StringVellumValue,
10
+ VariablePromptBlock,
11
+ VellumVariable,
12
+ )
13
+ from vellum.prompts.blocks.compilation import compile_prompt_blocks
14
+ from vellum.prompts.blocks.types import CompiledChatMessagePromptBlock, CompiledValuePromptBlock
15
+
16
+
17
+ @pytest.mark.parametrize(
18
+ ["blocks", "inputs", "input_variables", "expected"],
19
+ [
20
+ # Empty
21
+ ([], [], [], []),
22
+ # Jinja
23
+ (
24
+ [JinjaPromptBlock(template="Hello, world!")],
25
+ [],
26
+ [],
27
+ [
28
+ CompiledValuePromptBlock(content=StringVellumValue(value="Hello, world!")),
29
+ ],
30
+ ),
31
+ (
32
+ [JinjaPromptBlock(template="Repeat back to me {{ echo }}")],
33
+ [PromptRequestStringInput(key="echo", value="Hello, world!")],
34
+ [VellumVariable(id="1", type="STRING", key="echo")],
35
+ [
36
+ CompiledValuePromptBlock(content=StringVellumValue(value="Repeat back to me Hello, world!")),
37
+ ],
38
+ ),
39
+ # Rich Text
40
+ (
41
+ [
42
+ RichTextPromptBlock(
43
+ blocks=[
44
+ PlainTextPromptBlock(text="Hello, world!"),
45
+ ]
46
+ )
47
+ ],
48
+ [],
49
+ [],
50
+ [
51
+ CompiledValuePromptBlock(content=StringVellumValue(value="Hello, world!")),
52
+ ],
53
+ ),
54
+ (
55
+ [
56
+ RichTextPromptBlock(
57
+ blocks=[
58
+ PlainTextPromptBlock(text='Repeat back to me "'),
59
+ VariablePromptBlock(input_variable="echo"),
60
+ PlainTextPromptBlock(text='".'),
61
+ ]
62
+ )
63
+ ],
64
+ [PromptRequestStringInput(key="echo", value="Hello, world!")],
65
+ [VellumVariable(id="901ec2d6-430c-4341-b963-ca689006f5cc", type="STRING", key="echo")],
66
+ [
67
+ CompiledValuePromptBlock(content=StringVellumValue(value='Repeat back to me "Hello, world!".')),
68
+ ],
69
+ ),
70
+ # Chat Message
71
+ (
72
+ [
73
+ ChatMessagePromptBlock(
74
+ chat_role="USER",
75
+ blocks=[
76
+ RichTextPromptBlock(
77
+ blocks=[
78
+ PlainTextPromptBlock(text='Repeat back to me "'),
79
+ VariablePromptBlock(input_variable="echo"),
80
+ PlainTextPromptBlock(text='".'),
81
+ ]
82
+ )
83
+ ],
84
+ )
85
+ ],
86
+ [PromptRequestStringInput(key="echo", value="Hello, world!")],
87
+ [VellumVariable(id="901ec2d6-430c-4341-b963-ca689006f5cc", type="STRING", key="echo")],
88
+ [
89
+ CompiledChatMessagePromptBlock(
90
+ role="USER",
91
+ blocks=[
92
+ CompiledValuePromptBlock(content=StringVellumValue(value='Repeat back to me "Hello, world!".'))
93
+ ],
94
+ ),
95
+ ],
96
+ ),
97
+ ],
98
+ ids=[
99
+ "empty",
100
+ "jinja-no-variables",
101
+ "jinja-with-variables",
102
+ "rich-text-no-variables",
103
+ "rich-text-with-variables",
104
+ "chat-message",
105
+ ],
106
+ )
107
+ def test_compile_prompt_blocks__happy(blocks, inputs, input_variables, expected):
108
+ actual = compile_prompt_blocks(blocks=blocks, inputs=inputs, input_variables=input_variables)
109
+
110
+ assert actual == expected
@@ -0,0 +1,36 @@
1
+ from __future__ import annotations
2
+
3
+ from typing import Annotated, Literal, Optional, Union
4
+
5
+ from vellum import ArrayVellumValue, ChatMessageRole, EphemeralPromptCacheConfig, VellumValue
6
+ from vellum.client.core import UniversalBaseModel
7
+
8
+
9
+ class BaseCompiledPromptBlock(UniversalBaseModel):
10
+ cache_config: Optional[EphemeralPromptCacheConfig] = None
11
+
12
+
13
+ class CompiledValuePromptBlock(BaseCompiledPromptBlock):
14
+ block_type: Literal["VALUE"] = "VALUE"
15
+ content: VellumValue
16
+
17
+
18
+ class CompiledChatMessagePromptBlock(BaseCompiledPromptBlock):
19
+ block_type: Literal["CHAT_MESSAGE"] = "CHAT_MESSAGE"
20
+ role: ChatMessageRole = "ASSISTANT"
21
+ unterminated: bool = False
22
+ blocks: list[CompiledValuePromptBlock] = []
23
+ source: Optional[str] = None
24
+
25
+
26
+ CompiledPromptBlock = Annotated[
27
+ Union[
28
+ CompiledValuePromptBlock,
29
+ CompiledChatMessagePromptBlock,
30
+ ],
31
+ "block_type",
32
+ ]
33
+
34
+ ArrayVellumValue.model_rebuild()
35
+
36
+ CompiledValuePromptBlock.model_rebuild()
File without changes
File without changes
@@ -0,0 +1,28 @@
1
+ import datetime
2
+ import itertools
3
+ import json
4
+ import random
5
+ import re
6
+ from typing import Any, Callable, Dict, Union
7
+
8
+ import dateutil
9
+ import pydash
10
+ import pytz
11
+ import yaml
12
+
13
+ from vellum.utils.templating.custom_filters import is_valid_json_string
14
+
15
+ DEFAULT_JINJA_GLOBALS: Dict[str, Any] = {
16
+ "datetime": datetime,
17
+ "dateutil": dateutil,
18
+ "itertools": itertools,
19
+ "json": json,
20
+ "pydash": pydash,
21
+ "pytz": pytz,
22
+ "random": random,
23
+ "re": re,
24
+ "yaml": yaml,
25
+ }
26
+ DEFAULT_JINJA_CUSTOM_FILTERS: Dict[str, Callable[[Union[str, bytes]], bool]] = {
27
+ "is_valid_json_string": is_valid_json_string,
28
+ }
@@ -3,7 +3,7 @@ from typing import Any, Callable, Dict, Optional, Union
3
3
 
4
4
  from jinja2.sandbox import SandboxedEnvironment
5
5
 
6
- from vellum.workflows.nodes.core.templating_node.exceptions import JinjaTemplateError
6
+ from vellum.utils.templating.exceptions import JinjaTemplateError
7
7
  from vellum.workflows.state.encoder import DefaultStateEncoder
8
8
 
9
9
 
@@ -1,42 +1,16 @@
1
- import datetime
2
- import itertools
3
- import json
4
- import random
5
- import re
6
1
  from typing import Any, Callable, ClassVar, Dict, Generic, Mapping, Tuple, Type, TypeVar, Union, get_args
7
2
 
8
- import dateutil.parser
9
- import pydash
10
- import pytz
11
- import yaml
12
-
3
+ from vellum.utils.templating.constants import DEFAULT_JINJA_CUSTOM_FILTERS, DEFAULT_JINJA_GLOBALS
4
+ from vellum.utils.templating.exceptions import JinjaTemplateError
5
+ from vellum.utils.templating.render import render_sandboxed_jinja_template
13
6
  from vellum.workflows.errors import WorkflowErrorCode
14
7
  from vellum.workflows.exceptions import NodeException
15
8
  from vellum.workflows.nodes.bases import BaseNode
16
9
  from vellum.workflows.nodes.bases.base import BaseNodeMeta
17
- from vellum.workflows.nodes.core.templating_node.custom_filters import is_valid_json_string
18
- from vellum.workflows.nodes.core.templating_node.exceptions import JinjaTemplateError
19
- from vellum.workflows.nodes.core.templating_node.render import render_sandboxed_jinja_template
20
10
  from vellum.workflows.types.core import EntityInputsInterface
21
11
  from vellum.workflows.types.generics import StateType
22
12
  from vellum.workflows.types.utils import get_original_base
23
13
 
24
- _DEFAULT_JINJA_GLOBALS: Dict[str, Any] = {
25
- "datetime": datetime,
26
- "dateutil": dateutil,
27
- "itertools": itertools,
28
- "json": json,
29
- "pydash": pydash,
30
- "pytz": pytz,
31
- "random": random,
32
- "re": re,
33
- "yaml": yaml,
34
- }
35
-
36
- _DEFAULT_JINJA_CUSTOM_FILTERS: Dict[str, Callable[[Union[str, bytes]], bool]] = {
37
- "is_valid_json_string": is_valid_json_string,
38
- }
39
-
40
14
  _OutputType = TypeVar("_OutputType")
41
15
 
42
16
 
@@ -78,8 +52,8 @@ class TemplatingNode(BaseNode[StateType], Generic[StateType, _OutputType], metac
78
52
  # The inputs to render the template with.
79
53
  inputs: ClassVar[EntityInputsInterface]
80
54
 
81
- jinja_globals: Dict[str, Any] = _DEFAULT_JINJA_GLOBALS
82
- jinja_custom_filters: Mapping[str, Callable[[Union[str, bytes]], bool]] = _DEFAULT_JINJA_CUSTOM_FILTERS
55
+ jinja_globals: Dict[str, Any] = DEFAULT_JINJA_GLOBALS
56
+ jinja_custom_filters: Mapping[str, Callable[[Union[str, bytes]], bool]] = DEFAULT_JINJA_CUSTOM_FILTERS
83
57
 
84
58
  class Outputs(BaseNode.Outputs):
85
59
  """
@@ -0,0 +1,6 @@
1
+ # 🧪 Experimental
2
+
3
+ This section is a proofing ground for new ideas and concepts. It's not meant to be used in production and is
4
+ subject to breaking changes at any time.
5
+
6
+ If a concept within is sufficiently interesting and validated, then it may be introduced as a first-class node.
File without changes
@@ -0,0 +1,5 @@
1
+ from .node import OpenAIChatCompletionNode
2
+
3
+ __all__ = [
4
+ "OpenAIChatCompletionNode",
5
+ ]
@@ -0,0 +1,260 @@
1
+ import json
2
+ import logging
3
+ import os
4
+ from uuid import uuid4
5
+ from typing import Any, Iterable, Iterator, List, Literal, Union, cast
6
+
7
+ from openai import OpenAI
8
+ from openai.types.chat import (
9
+ ChatCompletionAssistantMessageParam,
10
+ ChatCompletionContentPartImageParam,
11
+ ChatCompletionContentPartInputAudioParam,
12
+ ChatCompletionContentPartParam,
13
+ ChatCompletionContentPartRefusalParam,
14
+ ChatCompletionContentPartTextParam,
15
+ ChatCompletionMessageParam,
16
+ ChatCompletionSystemMessageParam,
17
+ ChatCompletionUserMessageParam,
18
+ )
19
+ from openai.types.chat.chat_completion_chunk import Choice
20
+
21
+ from vellum import (
22
+ AdHocExecutePromptEvent,
23
+ FulfilledAdHocExecutePromptEvent,
24
+ InitiatedAdHocExecutePromptEvent,
25
+ RejectedAdHocExecutePromptEvent,
26
+ StreamingAdHocExecutePromptEvent,
27
+ StringVellumValue,
28
+ VellumAudio,
29
+ VellumError,
30
+ )
31
+ from vellum.prompts.blocks.compilation import compile_prompt_blocks
32
+ from vellum.prompts.blocks.types import CompiledChatMessagePromptBlock
33
+ from vellum.workflows.errors import WorkflowErrorCode
34
+ from vellum.workflows.exceptions import NodeException
35
+ from vellum.workflows.nodes import InlinePromptNode
36
+ from vellum.workflows.types.generics import StateType
37
+
38
+ logger = logging.getLogger(__name__)
39
+
40
+
41
+ class OpenAIChatCompletionNode(InlinePromptNode[StateType]):
42
+ """
43
+ Used to execute a Prompt using the OpenAI API.
44
+ """
45
+
46
+ # Override
47
+ def _get_prompt_event_stream(self) -> Iterator[AdHocExecutePromptEvent]:
48
+ client = self._get_client()
49
+
50
+ execution_id = str(uuid4())
51
+
52
+ yield InitiatedAdHocExecutePromptEvent(
53
+ execution_id=execution_id,
54
+ )
55
+
56
+ try:
57
+ stream = client.chat.completions.create(
58
+ messages=self._get_messages(),
59
+ model=self.ml_model,
60
+ # TODO: Add support for additional parameters
61
+ stream=True,
62
+ )
63
+ except Exception as exc:
64
+ yield RejectedAdHocExecutePromptEvent(
65
+ error=VellumError(
66
+ code=WorkflowErrorCode.PROVIDER_ERROR,
67
+ message=exc.args[0],
68
+ ),
69
+ execution_id=execution_id,
70
+ )
71
+ return
72
+
73
+ combined_delta_content = ""
74
+ for chunk in stream:
75
+ choices: List[Choice] = chunk.choices
76
+ if len(choices) != 1:
77
+ yield RejectedAdHocExecutePromptEvent(
78
+ error=VellumError(
79
+ code=WorkflowErrorCode.PROVIDER_ERROR,
80
+ message="Expected one choice per chunk, but found more than one.",
81
+ ),
82
+ execution_id=execution_id,
83
+ )
84
+ return
85
+
86
+ choice = choices[0]
87
+ delta = choice.delta
88
+
89
+ if delta.tool_calls:
90
+ # TODO: Add support for tool calls
91
+ raise NotImplementedError("This node hasn't been extended to support tool calling yet.")
92
+
93
+ if delta.content:
94
+ combined_delta_content += delta.content
95
+
96
+ StreamingAdHocExecutePromptEvent(
97
+ output=StringVellumValue(value=delta.content),
98
+ # TODO: Add support for multiple outputs
99
+ output_index=1,
100
+ execution_id=execution_id,
101
+ )
102
+
103
+ yield FulfilledAdHocExecutePromptEvent(
104
+ # TODO: Add support for multiple outputs
105
+ outputs=[
106
+ StringVellumValue(value=combined_delta_content),
107
+ ],
108
+ execution_id=execution_id,
109
+ )
110
+
111
+ def _get_client(self) -> OpenAI:
112
+ """Used to retrieve an API client for interacting with the OpenAI API.
113
+
114
+ Note: This method can be overridden if you'd like to use your own API client that conforms to the same
115
+ interfaces as that of OpenAI.
116
+ """
117
+
118
+ openai_api_key = os.environ.get("OPENAI_API_KEY")
119
+
120
+ if not openai_api_key:
121
+ raise NodeException(
122
+ code=WorkflowErrorCode.INTERNAL_ERROR,
123
+ message="Unable to determine an OpenAI API key.",
124
+ )
125
+
126
+ client = OpenAI(api_key=openai_api_key)
127
+ return client
128
+
129
+ def _get_messages(self) -> Iterable[ChatCompletionMessageParam]:
130
+ input_variables, input_values = self._compile_prompt_inputs()
131
+
132
+ compiled_blocks = compile_prompt_blocks(
133
+ blocks=self.blocks, inputs=input_values, input_variables=input_variables
134
+ )
135
+
136
+ chat_message_blocks: list[CompiledChatMessagePromptBlock] = [
137
+ block for block in compiled_blocks if block.block_type == "CHAT_MESSAGE"
138
+ ]
139
+ messages = [self._create_message(block) for block in chat_message_blocks]
140
+
141
+ return messages
142
+
143
+ @classmethod
144
+ def _create_message(cls, chat_message_block: CompiledChatMessagePromptBlock) -> ChatCompletionMessageParam:
145
+ name = chat_message_block.source
146
+ content = cls._create_message_content(chat_message_block)
147
+
148
+ if chat_message_block.role == "SYSTEM":
149
+ relevant_system_content = [
150
+ cast(ChatCompletionContentPartTextParam, c) for c in content if c["type"] == "text"
151
+ ]
152
+ system_message: ChatCompletionSystemMessageParam = {
153
+ "content": relevant_system_content,
154
+ "role": "system",
155
+ }
156
+ if name:
157
+ system_message["name"] = name
158
+
159
+ return system_message
160
+ elif chat_message_block.role == "USER":
161
+ user_message: ChatCompletionUserMessageParam = {
162
+ "content": content,
163
+ "role": "user",
164
+ }
165
+ if name:
166
+ user_message["name"] = name
167
+
168
+ return user_message
169
+ elif chat_message_block.role == "ASSISTANT":
170
+ relevant_assistant_content = [
171
+ cast(Union[ChatCompletionContentPartTextParam, ChatCompletionContentPartRefusalParam], c)
172
+ for c in content
173
+ if c["type"] in ["text", "refusal"]
174
+ ]
175
+ assistant_message: ChatCompletionAssistantMessageParam = {
176
+ "content": relevant_assistant_content,
177
+ "role": "assistant",
178
+ }
179
+ if name:
180
+ assistant_message["name"] = name
181
+
182
+ return assistant_message
183
+ else:
184
+ logger.error(f"Unexpected role: {chat_message_block.role}")
185
+ raise NodeException(
186
+ code=WorkflowErrorCode.INTERNAL_ERROR, message="Unexpected role found when compiling prompt blocks"
187
+ )
188
+
189
+ @classmethod
190
+ def _create_message_content(
191
+ cls,
192
+ chat_message_block: CompiledChatMessagePromptBlock,
193
+ ) -> List[ChatCompletionContentPartParam]:
194
+ content: List[ChatCompletionContentPartParam] = []
195
+ for block in chat_message_block.blocks:
196
+ if block.content.type == "STRING":
197
+ string_value = cast(str, block.content.value)
198
+ string_content_item: ChatCompletionContentPartTextParam = {"type": "text", "text": string_value}
199
+ content.append(string_content_item)
200
+ elif block.content.type == "JSON":
201
+ json_value = cast(Any, block.content.value)
202
+ json_content_item: ChatCompletionContentPartTextParam = {"type": "text", "text": json.dumps(json_value)}
203
+ content.append(json_content_item)
204
+ elif block.content.type == "IMAGE":
205
+ image_value = cast(VellumAudio, block.content.value)
206
+ image_content_item: ChatCompletionContentPartImageParam = {
207
+ "type": "image_url",
208
+ "image_url": {"url": image_value.src},
209
+ }
210
+ if image_value.metadata and image_value.metadata.get("detail"):
211
+ detail = image_value.metadata["detail"]
212
+
213
+ if detail not in ["auto", "low", "high"]:
214
+ raise NodeException(
215
+ code=WorkflowErrorCode.INTERNAL_ERROR,
216
+ message="Image detail must be one of 'auto', 'low', or 'high.",
217
+ )
218
+
219
+ image_content_item["image_url"]["detail"] = cast(Literal["auto", "low", "high"], detail)
220
+
221
+ content.append(image_content_item)
222
+ elif block.content.type == "AUDIO":
223
+ audio_value = cast(VellumAudio, block.content.value)
224
+ audio_value_src_parts = audio_value.src.split(",")
225
+ if len(audio_value_src_parts) != 2:
226
+ raise NodeException(
227
+ code=WorkflowErrorCode.INTERNAL_ERROR, message="Audio data is not properly encoded."
228
+ )
229
+ _, cleaned_audio_value = audio_value_src_parts
230
+ if not audio_value.metadata:
231
+ raise NodeException(
232
+ code=WorkflowErrorCode.INTERNAL_ERROR, message="Audio metadata is required for audio input."
233
+ )
234
+ audio_format = audio_value.metadata.get("format")
235
+ if not audio_format:
236
+ raise NodeException(
237
+ code=WorkflowErrorCode.INTERNAL_ERROR, message="Audio format is required for audio input."
238
+ )
239
+ if audio_format not in {"wav", "mp3"}:
240
+ raise NodeException(
241
+ code=WorkflowErrorCode.INTERNAL_ERROR,
242
+ message="Audio format must be one of 'wav' or 'mp3'.",
243
+ )
244
+
245
+ audio_content_item: ChatCompletionContentPartInputAudioParam = {
246
+ "type": "input_audio",
247
+ "input_audio": {
248
+ "data": cleaned_audio_value,
249
+ "format": cast(Literal["wav", "mp3"], audio_format),
250
+ },
251
+ }
252
+
253
+ content.append(audio_content_item)
254
+ else:
255
+ raise NodeException(
256
+ code=WorkflowErrorCode.INTERNAL_ERROR,
257
+ message=f"Failed to parse chat message block {block.content.type}",
258
+ )
259
+
260
+ return content
@@ -49,3 +49,6 @@ class SandboxRunner(Generic[WorkflowType]):
49
49
  self._logger.info(f"Just started Node: {event.node_definition.__name__}")
50
50
  elif event.name == "node.execution.fulfilled":
51
51
  self._logger.info(f"Just finished Node: {event.node_definition.__name__}")
52
+ elif event.name == "node.execution.rejected":
53
+ self._logger.debug(f"Error: {event.error}")
54
+ self._logger.error(f"Failed to run Node: {event.node_definition.__name__}")
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: vellum-ai
3
- Version: 0.12.7
3
+ Version: 0.12.8
4
4
  Summary:
5
5
  License: MIT
6
6
  Requires-Python: >=3.9,<4.0
@@ -25,6 +25,7 @@ Requires-Dist: cdktf (>=0.20.5,<0.21.0)
25
25
  Requires-Dist: click (==8.1.7)
26
26
  Requires-Dist: docker (==7.1.0)
27
27
  Requires-Dist: httpx (>=0.21.2)
28
+ Requires-Dist: openai (>=1.0.0)
28
29
  Requires-Dist: orderly-set (>=5.2.2,<6.0.0)
29
30
  Requires-Dist: publication (==0.0.3)
30
31
  Requires-Dist: pydantic (>=1.9.2)
@@ -76,7 +76,7 @@ vellum/client/README.md,sha256=JkCJjmMZl4jrPj46pkmL9dpK4gSzQQmP5I7z4aME4LY,4749
76
76
  vellum/client/__init__.py,sha256=o4m7iRZWEV8rP3GkdaztHAjNmjxjWERlarviFoHzuKI,110927
77
77
  vellum/client/core/__init__.py,sha256=SQ85PF84B9MuKnBwHNHWemSGuy-g_515gFYNFhvEE0I,1438
78
78
  vellum/client/core/api_error.py,sha256=RE8LELok2QCjABadECTvtDp7qejA1VmINCh6TbqPwSE,426
79
- vellum/client/core/client_wrapper.py,sha256=RUr-cX1j7KxfabSh_XadU_QRvmolEu273OpLtcRINs8,1868
79
+ vellum/client/core/client_wrapper.py,sha256=oWk19_A_3mvqQM_W3wJgQnObX6YIE918NEp9olVAuEo,1868
80
80
  vellum/client/core/datetime_utils.py,sha256=nBys2IsYrhPdszxGKCNRPSOCwa-5DWOHG95FB8G9PKo,1047
81
81
  vellum/client/core/file.py,sha256=X9IbmkZmB2bB_DpmZAO3crWdXagOakAyn6UCOCImCPg,2322
82
82
  vellum/client/core/http_client.py,sha256=R0pQpCppnEtxccGvXl4uJ76s7ro_65Fo_erlNNLp_AI,19228
@@ -649,17 +649,23 @@ vellum/errors/not_found_error.py,sha256=gC71YBdPyHR46l3RNTs0v9taVvAY0gWRFrcKpKzb
649
649
  vellum/evaluations/__init__.py,sha256=hNsLoHSykqXDJP-MwFvu2lExImxo9KEyEJjt_fdAzpE,77
650
650
  vellum/evaluations/constants.py,sha256=Vteml4_csZsMgo_q3-71E3JRCAoN6308TXLu5nfLhmU,116
651
651
  vellum/evaluations/exceptions.py,sha256=6Xacoyv43fJvVf6Dt6Io5a-f9vF12Tx51jzsQRNSqhY,56
652
- vellum/evaluations/resources.py,sha256=8-gqAxLnVtDQrHYy73RAGIzomeD6uX_3b_yhmFR9I3g,12688
652
+ vellum/evaluations/resources.py,sha256=33IDRHOjrRsC20JHs0BtRlaSYCY9Gg-AkA6qkPjBrWo,12676
653
653
  vellum/evaluations/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
654
654
  vellum/evaluations/utils/env.py,sha256=Xj_nxsoU5ox06EOTjRopR4lrigQI6Le6qbWGltYoEGU,276
655
655
  vellum/evaluations/utils/exceptions.py,sha256=dXMAkzqbHV_AP5FjjbegPlfUE0zQDlpA3qOsoOJUxfg,49
656
656
  vellum/evaluations/utils/paginator.py,sha256=rEED_BJAXAM6tM1yMwHePNzszjq_tTq4NbQvi1jWQ_Q,697
657
- vellum/evaluations/utils/typing.py,sha256=wx_daFqD69cYkuJTVnvNrpjhqC3uuhbnyJ9_bIwC9OU,327
658
657
  vellum/evaluations/utils/uuid.py,sha256=Ch6wWRgwICxLxJCTl5iE3EdRlZj2zADR-zUMUtjcMWM,214
659
658
  vellum/plugins/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
660
659
  vellum/plugins/pydantic.py,sha256=EbI0pJMhUS9rLPSkzmAELfnCHrWCJzOrU06T8ommwdw,2334
661
660
  vellum/plugins/utils.py,sha256=U9ZY9KdE3RRvbcG01hXxu9CvfJD6Fo7nJDgcHjQn0FI,606
662
661
  vellum/plugins/vellum_mypy.py,sha256=VC15EzjTsXOb9uF1bky4rcxePP-0epMVmCsLB2z4Dh8,24816
662
+ vellum/prompts/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
663
+ vellum/prompts/blocks/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
664
+ vellum/prompts/blocks/compilation.py,sha256=hBN_ajq77tSkVyUIcDlO-Qu77PSeJ1OzVhp70NmQd2k,7458
665
+ vellum/prompts/blocks/exceptions.py,sha256=vmk5PV6Vyw9nKjZYQDUDW0LH8MfQNIgFvFb_mFWdIRI,50
666
+ vellum/prompts/blocks/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
667
+ vellum/prompts/blocks/tests/test_compilation.py,sha256=0DhMoc4huHR6YnNL-0aBLmWSyUfw2BpRq_gEdKsQmAc,3693
668
+ vellum/prompts/blocks/types.py,sha256=6aSJQco-5kKeadfKVVXF_SrQPlIJgMYVNc-C7so1sY8,975
663
669
  vellum/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
664
670
  vellum/resources/__init__.py,sha256=sQWK7g_Z4EM7pa7fy6vy3d_DMdTJ4wVcozBn3Lx4Qpo,141
665
671
  vellum/resources/ad_hoc/__init__.py,sha256=UD01D9nS_M7sRKmMbEg4Tv9SlfFj3cWahVxwUEaSLAY,148
@@ -1197,6 +1203,13 @@ vellum/types/workflow_result_event_output_data_search_results.py,sha256=UNfCHLQ0
1197
1203
  vellum/types/workflow_result_event_output_data_string.py,sha256=rHEVbN0nyf-xoDoSIUEKlUKh6DDoguer4w0iN18JQ2I,178
1198
1204
  vellum/types/workflow_stream_event.py,sha256=PjHGgN0eJm5w-5FJ6__ASC1FU94Gsav_ko5JWkpVvK8,159
1199
1205
  vellum/types/workspace_secret_read.py,sha256=Z6QNXHxVHRdrLXSI31KxngePRwJTVoJYMXVbtPQwrxs,159
1206
+ vellum/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
1207
+ vellum/utils/templating/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
1208
+ vellum/utils/templating/constants.py,sha256=XTNmDsKa7byjw4GMZmzx2dUeYUTeMLZrPgRHcc80Kvc,613
1209
+ vellum/utils/templating/custom_filters.py,sha256=Q0DahYRHP4KfaUXDt9XxN-DFLBrAxlv90yaVqxScoUw,264
1210
+ vellum/utils/templating/exceptions.py,sha256=cDp140PP4OnInW4qAvg3KqiSiF70C71UyEAKRBR1Abo,46
1211
+ vellum/utils/templating/render.py,sha256=0vgkwhu2A6o64aT4fUdTSLFCEMbeRjAKAuvv2k2LYGY,1772
1212
+ vellum/utils/typing.py,sha256=wx_daFqD69cYkuJTVnvNrpjhqC3uuhbnyJ9_bIwC9OU,327
1200
1213
  vellum/version.py,sha256=jq-1PlAYxN9AXuaZqbYk9ak27SgE2lw9Ia5gx1b1gVI,76
1201
1214
  vellum/workflows/README.md,sha256=MLNm-ihc0ao6I8gwwOhXQQBf0jOf-EsA9C519ALYI1o,3610
1202
1215
  vellum/workflows/__init__.py,sha256=CssPsbNvN6rDhoLuqpEv7MMKGa51vE6dvAh6U31Pcio,71
@@ -1278,10 +1291,7 @@ vellum/workflows/nodes/core/retry_node/node.py,sha256=IjNcpzFmHyBUjOHEoULLbKf85B
1278
1291
  vellum/workflows/nodes/core/retry_node/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
1279
1292
  vellum/workflows/nodes/core/retry_node/tests/test_node.py,sha256=QXTnHwmJHISxXjvZMeuuEo0iVugVMJyaJoggI8yKXfI,3132
1280
1293
  vellum/workflows/nodes/core/templating_node/__init__.py,sha256=GmyuYo81_A1_Bz6id69ozVFS6FKiuDsZTiA3I6MaL2U,70
1281
- vellum/workflows/nodes/core/templating_node/custom_filters.py,sha256=Q0DahYRHP4KfaUXDt9XxN-DFLBrAxlv90yaVqxScoUw,264
1282
- vellum/workflows/nodes/core/templating_node/exceptions.py,sha256=cDp140PP4OnInW4qAvg3KqiSiF70C71UyEAKRBR1Abo,46
1283
- vellum/workflows/nodes/core/templating_node/node.py,sha256=Q5U5lAW1nhGFJPc94Gxnq1s5RWrbOo7G_AY4n-ABmGg,4504
1284
- vellum/workflows/nodes/core/templating_node/render.py,sha256=OpJp0NAH6qcEL6K9lxR0qjpFb75TYNttJR5iCos8tmg,1792
1294
+ vellum/workflows/nodes/core/templating_node/node.py,sha256=mWNzsa6TLosrD3higKIsiqEv54OIfNY7YzFDkuQgm6k,3964
1285
1295
  vellum/workflows/nodes/core/templating_node/tests/test_templating_node.py,sha256=0BtXeSix7KGIuKzlPFTMLATpNnFPhut1UV_srGptkt0,1120
1286
1296
  vellum/workflows/nodes/core/try_node/__init__.py,sha256=JVD4DrldTIqFQQFrubs9KtWCCc0YCAc7Fzol5ZWIWeM,56
1287
1297
  vellum/workflows/nodes/core/try_node/node.py,sha256=_xkpUNIfHXuwjivMBxdU5DegsNge2ITW5nBifWmBPTY,6670
@@ -1331,6 +1341,10 @@ vellum/workflows/nodes/displayable/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5
1331
1341
  vellum/workflows/nodes/displayable/tests/test_inline_text_prompt_node.py,sha256=UI_RMmXn9qwB-StnFPvkDd9FctBQAg43wrfouqvPepk,4701
1332
1342
  vellum/workflows/nodes/displayable/tests/test_search_node_wth_text_output.py,sha256=4CMwDtXwTaEvFfDpA6j2iLqc7S6IICSkvVZOobEpeps,6954
1333
1343
  vellum/workflows/nodes/displayable/tests/test_text_prompt_deployment_node.py,sha256=KqKJtJ0vuNoPuUPMdILmBTt4a2fBBxxun-nmOI7T8jo,2585
1344
+ vellum/workflows/nodes/experimental/README.md,sha256=eF6DfIL8t-HbF9-mcofOMymKrraiBHDLKTlnBa51ZiE,284
1345
+ vellum/workflows/nodes/experimental/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
1346
+ vellum/workflows/nodes/experimental/openai_chat_completion_node/__init__.py,sha256=lsyD9laR9p7kx5-BXGH2gUTM242UhKy8SMV0SR6S2iE,90
1347
+ vellum/workflows/nodes/experimental/openai_chat_completion_node/node.py,sha256=1EGeiaT-Zoo6pttQFKKBcdf3dmhAbjKGaErYD5FFwlc,10185
1334
1348
  vellum/workflows/nodes/utils.py,sha256=EZt7CzJmgQBR_GWFpZr8d-oaoti3tolTd2Cv9wm7dKo,1087
1335
1349
  vellum/workflows/outputs/__init__.py,sha256=AyZ4pRh_ACQIGvkf0byJO46EDnSix1ZCAXfvh-ms1QE,94
1336
1350
  vellum/workflows/outputs/base.py,sha256=a7W6rNSDSawwGAXYjNTF2iHb9lnZu7WFSOagZIyy__k,7976
@@ -1353,7 +1367,7 @@ vellum/workflows/resolvers/__init__.py,sha256=eH6hTvZO4IciDaf_cf7aM2vs-DkBDyJPyc
1353
1367
  vellum/workflows/resolvers/base.py,sha256=WHra9LRtlTuB1jmuNqkfVE2JUgB61Cyntn8f0b0WZg4,411
1354
1368
  vellum/workflows/runner/__init__.py,sha256=i1iG5sAhtpdsrlvwgH6B-m49JsINkiWyPWs8vyT-bqM,72
1355
1369
  vellum/workflows/runner/runner.py,sha256=RXnLEmSJFbp0u4vKF7rvD2fscuYfcRYkspIJINnvFAI,27607
1356
- vellum/workflows/sandbox.py,sha256=wNyOfd3gb6-O85EQcBIHNCnSYPH7Oufh2z4hQnR2HFU,2059
1370
+ vellum/workflows/sandbox.py,sha256=1aL5BoUgJX6dbIN3pqui20Wk3VyzXV16BUaZz-clmFs,2269
1357
1371
  vellum/workflows/state/__init__.py,sha256=yUUdR-_Vl7UiixNDYQZ-GEM_kJI9dnOia75TtuNEsnE,60
1358
1372
  vellum/workflows/state/base.py,sha256=jpSzF1OQd3-fqi6dMGlNsQl-7JnJxCdzWIigmX8Wz-I,14425
1359
1373
  vellum/workflows/state/context.py,sha256=oXiEdNsWJi1coRB85IreTgUeR6_CrWWBXndtLff9S7M,1272
@@ -1384,8 +1398,8 @@ vellum/workflows/vellum_client.py,sha256=ODrq_TSl-drX2aezXegf7pizpWDVJuTXH-j6528
1384
1398
  vellum/workflows/workflows/__init__.py,sha256=KY45TqvavCCvXIkyCFMEc0dc6jTMOUci93U2DUrlZYc,66
1385
1399
  vellum/workflows/workflows/base.py,sha256=zpspOEdO5Ye_0ZvN-Wkzv9iQSiF1sD201ba8lhbnPbs,17086
1386
1400
  vellum/workflows/workflows/event_filters.py,sha256=GSxIgwrX26a1Smfd-6yss2abGCnadGsrSZGa7t7LpJA,2008
1387
- vellum_ai-0.12.7.dist-info/LICENSE,sha256=hOypcdt481qGNISA784bnAGWAE6tyIf9gc2E78mYC3E,1574
1388
- vellum_ai-0.12.7.dist-info/METADATA,sha256=czpJHlepxy2KrSpZireMIgDjpv_QdMBAK-g33oxoI3U,5128
1389
- vellum_ai-0.12.7.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
1390
- vellum_ai-0.12.7.dist-info/entry_points.txt,sha256=HCH4yc_V3J_nDv3qJzZ_nYS8llCHZViCDP1ejgCc5Ak,42
1391
- vellum_ai-0.12.7.dist-info/RECORD,,
1401
+ vellum_ai-0.12.8.dist-info/LICENSE,sha256=hOypcdt481qGNISA784bnAGWAE6tyIf9gc2E78mYC3E,1574
1402
+ vellum_ai-0.12.8.dist-info/METADATA,sha256=FI7BG5Gx3Dd3exq5uqBWnHu8kuqFrVpE_9tcdUuYwdk,5160
1403
+ vellum_ai-0.12.8.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
1404
+ vellum_ai-0.12.8.dist-info/entry_points.txt,sha256=HCH4yc_V3J_nDv3qJzZ_nYS8llCHZViCDP1ejgCc5Ak,42
1405
+ vellum_ai-0.12.8.dist-info/RECORD,,
File without changes