vellum-ai 0.9.8__py3-none-any.whl → 0.9.10__py3-none-any.whl
Sign up to get free protection for your applications and to get access to all the features.
- vellum/__init__.py +0 -6
- vellum/core/client_wrapper.py +1 -1
- vellum/resources/ad_hoc/client.py +4 -10
- vellum/types/__init__.py +0 -6
- vellum/types/chat_message_prompt_block.py +6 -3
- vellum/types/function_definition_prompt_block.py +5 -2
- vellum/types/jinja_prompt_block.py +3 -2
- {vellum_ai-0.9.8.dist-info → vellum_ai-0.9.10.dist-info}/METADATA +1 -1
- {vellum_ai-0.9.8.dist-info → vellum_ai-0.9.10.dist-info}/RECORD +11 -14
- vellum/types/chat_message_prompt_block_properties.py +0 -36
- vellum/types/function_definition_prompt_block_properties.py +0 -42
- vellum/types/jinja_prompt_block_properties.py +0 -21
- {vellum_ai-0.9.8.dist-info → vellum_ai-0.9.10.dist-info}/LICENSE +0 -0
- {vellum_ai-0.9.8.dist-info → vellum_ai-0.9.10.dist-info}/WHEEL +0 -0
vellum/__init__.py
CHANGED
@@ -38,7 +38,6 @@ from .types import (
|
|
38
38
|
ChatMessageContent,
|
39
39
|
ChatMessageContentRequest,
|
40
40
|
ChatMessagePromptBlock,
|
41
|
-
ChatMessagePromptBlockProperties,
|
42
41
|
ChatMessageRequest,
|
43
42
|
ChatMessageRole,
|
44
43
|
CodeExecutionNodeArrayResult,
|
@@ -135,7 +134,6 @@ from .types import (
|
|
135
134
|
FunctionCallVellumValue,
|
136
135
|
FunctionCallVellumValueRequest,
|
137
136
|
FunctionDefinitionPromptBlock,
|
138
|
-
FunctionDefinitionPromptBlockProperties,
|
139
137
|
GenerateOptionsRequest,
|
140
138
|
GenerateRequest,
|
141
139
|
GenerateResponse,
|
@@ -169,7 +167,6 @@ from .types import (
|
|
169
167
|
InstructorVectorizerConfigRequest,
|
170
168
|
IterationStateEnum,
|
171
169
|
JinjaPromptBlock,
|
172
|
-
JinjaPromptBlockProperties,
|
173
170
|
JsonInputRequest,
|
174
171
|
JsonVariableValue,
|
175
172
|
JsonVellumValue,
|
@@ -549,7 +546,6 @@ __all__ = [
|
|
549
546
|
"ChatMessageContent",
|
550
547
|
"ChatMessageContentRequest",
|
551
548
|
"ChatMessagePromptBlock",
|
552
|
-
"ChatMessagePromptBlockProperties",
|
553
549
|
"ChatMessageRequest",
|
554
550
|
"ChatMessageRole",
|
555
551
|
"CodeExecutionNodeArrayResult",
|
@@ -650,7 +646,6 @@ __all__ = [
|
|
650
646
|
"FunctionCallVellumValue",
|
651
647
|
"FunctionCallVellumValueRequest",
|
652
648
|
"FunctionDefinitionPromptBlock",
|
653
|
-
"FunctionDefinitionPromptBlockProperties",
|
654
649
|
"GenerateOptionsRequest",
|
655
650
|
"GenerateRequest",
|
656
651
|
"GenerateResponse",
|
@@ -685,7 +680,6 @@ __all__ = [
|
|
685
680
|
"InternalServerError",
|
686
681
|
"IterationStateEnum",
|
687
682
|
"JinjaPromptBlock",
|
688
|
-
"JinjaPromptBlockProperties",
|
689
683
|
"JsonInputRequest",
|
690
684
|
"JsonVariableValue",
|
691
685
|
"JsonVellumValue",
|
vellum/core/client_wrapper.py
CHANGED
@@ -17,7 +17,7 @@ class BaseClientWrapper:
|
|
17
17
|
headers: typing.Dict[str, str] = {
|
18
18
|
"X-Fern-Language": "Python",
|
19
19
|
"X-Fern-SDK-Name": "vellum-ai",
|
20
|
-
"X-Fern-SDK-Version": "0.9.
|
20
|
+
"X-Fern-SDK-Version": "0.9.10",
|
21
21
|
}
|
22
22
|
headers["X_API_KEY"] = self.api_key
|
23
23
|
return headers
|
@@ -73,7 +73,6 @@ class AdHocClient:
|
|
73
73
|
AdHocExpandMeta,
|
74
74
|
EphemeralPromptCacheConfig,
|
75
75
|
JinjaPromptBlock,
|
76
|
-
JinjaPromptBlockProperties,
|
77
76
|
PromptParameters,
|
78
77
|
PromptRequestStringInput,
|
79
78
|
PromptSettings,
|
@@ -126,10 +125,8 @@ class AdHocClient:
|
|
126
125
|
JinjaPromptBlock(
|
127
126
|
state="ENABLED",
|
128
127
|
cache_config=EphemeralPromptCacheConfig(),
|
129
|
-
|
130
|
-
|
131
|
-
template_type="STRING",
|
132
|
-
),
|
128
|
+
template="string",
|
129
|
+
template_type="STRING",
|
133
130
|
)
|
134
131
|
],
|
135
132
|
expand_meta=AdHocExpandMeta(
|
@@ -275,7 +272,6 @@ class AsyncAdHocClient:
|
|
275
272
|
AsyncVellum,
|
276
273
|
EphemeralPromptCacheConfig,
|
277
274
|
JinjaPromptBlock,
|
278
|
-
JinjaPromptBlockProperties,
|
279
275
|
PromptParameters,
|
280
276
|
PromptRequestStringInput,
|
281
277
|
PromptSettings,
|
@@ -330,10 +326,8 @@ class AsyncAdHocClient:
|
|
330
326
|
JinjaPromptBlock(
|
331
327
|
state="ENABLED",
|
332
328
|
cache_config=EphemeralPromptCacheConfig(),
|
333
|
-
|
334
|
-
|
335
|
-
template_type="STRING",
|
336
|
-
),
|
329
|
+
template="string",
|
330
|
+
template_type="STRING",
|
337
331
|
)
|
338
332
|
],
|
339
333
|
expand_meta=AdHocExpandMeta(
|
vellum/types/__init__.py
CHANGED
@@ -45,7 +45,6 @@ from .chat_message import ChatMessage
|
|
45
45
|
from .chat_message_content import ChatMessageContent
|
46
46
|
from .chat_message_content_request import ChatMessageContentRequest
|
47
47
|
from .chat_message_prompt_block import ChatMessagePromptBlock
|
48
|
-
from .chat_message_prompt_block_properties import ChatMessagePromptBlockProperties
|
49
48
|
from .chat_message_request import ChatMessageRequest
|
50
49
|
from .chat_message_role import ChatMessageRole
|
51
50
|
from .code_execution_node_array_result import CodeExecutionNodeArrayResult
|
@@ -142,7 +141,6 @@ from .function_call_variable_value import FunctionCallVariableValue
|
|
142
141
|
from .function_call_vellum_value import FunctionCallVellumValue
|
143
142
|
from .function_call_vellum_value_request import FunctionCallVellumValueRequest
|
144
143
|
from .function_definition_prompt_block import FunctionDefinitionPromptBlock
|
145
|
-
from .function_definition_prompt_block_properties import FunctionDefinitionPromptBlockProperties
|
146
144
|
from .generate_options_request import GenerateOptionsRequest
|
147
145
|
from .generate_request import GenerateRequest
|
148
146
|
from .generate_response import GenerateResponse
|
@@ -180,7 +178,6 @@ from .instructor_vectorizer_config import InstructorVectorizerConfig
|
|
180
178
|
from .instructor_vectorizer_config_request import InstructorVectorizerConfigRequest
|
181
179
|
from .iteration_state_enum import IterationStateEnum
|
182
180
|
from .jinja_prompt_block import JinjaPromptBlock
|
183
|
-
from .jinja_prompt_block_properties import JinjaPromptBlockProperties
|
184
181
|
from .json_input_request import JsonInputRequest
|
185
182
|
from .json_variable_value import JsonVariableValue
|
186
183
|
from .json_vellum_value import JsonVellumValue
|
@@ -534,7 +531,6 @@ __all__ = [
|
|
534
531
|
"ChatMessageContent",
|
535
532
|
"ChatMessageContentRequest",
|
536
533
|
"ChatMessagePromptBlock",
|
537
|
-
"ChatMessagePromptBlockProperties",
|
538
534
|
"ChatMessageRequest",
|
539
535
|
"ChatMessageRole",
|
540
536
|
"CodeExecutionNodeArrayResult",
|
@@ -631,7 +627,6 @@ __all__ = [
|
|
631
627
|
"FunctionCallVellumValue",
|
632
628
|
"FunctionCallVellumValueRequest",
|
633
629
|
"FunctionDefinitionPromptBlock",
|
634
|
-
"FunctionDefinitionPromptBlockProperties",
|
635
630
|
"GenerateOptionsRequest",
|
636
631
|
"GenerateRequest",
|
637
632
|
"GenerateResponse",
|
@@ -665,7 +660,6 @@ __all__ = [
|
|
665
660
|
"InstructorVectorizerConfigRequest",
|
666
661
|
"IterationStateEnum",
|
667
662
|
"JinjaPromptBlock",
|
668
|
-
"JinjaPromptBlockProperties",
|
669
663
|
"JsonInputRequest",
|
670
664
|
"JsonVariableValue",
|
671
665
|
"JsonVellumValue",
|
@@ -5,6 +5,7 @@ from ..core.pydantic_utilities import UniversalBaseModel
|
|
5
5
|
import typing
|
6
6
|
from .prompt_block_state import PromptBlockState
|
7
7
|
from .ephemeral_prompt_cache_config import EphemeralPromptCacheConfig
|
8
|
+
from .chat_message_role import ChatMessageRole
|
8
9
|
from ..core.pydantic_utilities import IS_PYDANTIC_V2
|
9
10
|
import pydantic
|
10
11
|
from ..core.pydantic_utilities import update_forward_refs
|
@@ -18,7 +19,10 @@ class ChatMessagePromptBlock(UniversalBaseModel):
|
|
18
19
|
state: typing.Optional[PromptBlockState] = None
|
19
20
|
cache_config: typing.Optional[EphemeralPromptCacheConfig] = None
|
20
21
|
block_type: typing.Literal["CHAT_MESSAGE"] = "CHAT_MESSAGE"
|
21
|
-
|
22
|
+
chat_role: ChatMessageRole
|
23
|
+
chat_source: typing.Optional[str] = None
|
24
|
+
chat_message_unterminated: typing.Optional[bool] = None
|
25
|
+
blocks: typing.List["PromptBlock"]
|
22
26
|
|
23
27
|
if IS_PYDANTIC_V2:
|
24
28
|
model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
|
@@ -30,7 +34,6 @@ class ChatMessagePromptBlock(UniversalBaseModel):
|
|
30
34
|
extra = pydantic.Extra.allow
|
31
35
|
|
32
36
|
|
33
|
-
from .
|
37
|
+
from .prompt_block import PromptBlock # noqa: E402
|
34
38
|
|
35
|
-
update_forward_refs(ChatMessagePromptBlockProperties, ChatMessagePromptBlock=ChatMessagePromptBlock)
|
36
39
|
update_forward_refs(ChatMessagePromptBlock)
|
@@ -4,7 +4,6 @@ from ..core.pydantic_utilities import UniversalBaseModel
|
|
4
4
|
import typing
|
5
5
|
from .prompt_block_state import PromptBlockState
|
6
6
|
from .ephemeral_prompt_cache_config import EphemeralPromptCacheConfig
|
7
|
-
from .function_definition_prompt_block_properties import FunctionDefinitionPromptBlockProperties
|
8
7
|
from ..core.pydantic_utilities import IS_PYDANTIC_V2
|
9
8
|
import pydantic
|
10
9
|
|
@@ -17,7 +16,11 @@ class FunctionDefinitionPromptBlock(UniversalBaseModel):
|
|
17
16
|
state: typing.Optional[PromptBlockState] = None
|
18
17
|
cache_config: typing.Optional[EphemeralPromptCacheConfig] = None
|
19
18
|
block_type: typing.Literal["FUNCTION_DEFINITION"] = "FUNCTION_DEFINITION"
|
20
|
-
|
19
|
+
function_name: typing.Optional[str] = None
|
20
|
+
function_description: typing.Optional[str] = None
|
21
|
+
function_parameters: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = None
|
22
|
+
function_forced: typing.Optional[bool] = None
|
23
|
+
function_strict: typing.Optional[bool] = None
|
21
24
|
|
22
25
|
if IS_PYDANTIC_V2:
|
23
26
|
model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
|
@@ -4,7 +4,7 @@ from ..core.pydantic_utilities import UniversalBaseModel
|
|
4
4
|
import typing
|
5
5
|
from .prompt_block_state import PromptBlockState
|
6
6
|
from .ephemeral_prompt_cache_config import EphemeralPromptCacheConfig
|
7
|
-
from .
|
7
|
+
from .vellum_variable_type import VellumVariableType
|
8
8
|
from ..core.pydantic_utilities import IS_PYDANTIC_V2
|
9
9
|
import pydantic
|
10
10
|
|
@@ -17,7 +17,8 @@ class JinjaPromptBlock(UniversalBaseModel):
|
|
17
17
|
state: typing.Optional[PromptBlockState] = None
|
18
18
|
cache_config: typing.Optional[EphemeralPromptCacheConfig] = None
|
19
19
|
block_type: typing.Literal["JINJA"] = "JINJA"
|
20
|
-
|
20
|
+
template: str
|
21
|
+
template_type: VellumVariableType
|
21
22
|
|
22
23
|
if IS_PYDANTIC_V2:
|
23
24
|
model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
|
@@ -1,8 +1,8 @@
|
|
1
|
-
vellum/__init__.py,sha256=
|
1
|
+
vellum/__init__.py,sha256=NEsCiACEkYsvptuTcfLFTiuhK3a17YvN0m-S6aHergA,34246
|
2
2
|
vellum/client.py,sha256=kG4b9g1Jjm6zgzGBXCAYXcM_3xNQfBsa2Xut6F0eHQM,115201
|
3
3
|
vellum/core/__init__.py,sha256=SQ85PF84B9MuKnBwHNHWemSGuy-g_515gFYNFhvEE0I,1438
|
4
4
|
vellum/core/api_error.py,sha256=RE8LELok2QCjABadECTvtDp7qejA1VmINCh6TbqPwSE,426
|
5
|
-
vellum/core/client_wrapper.py,sha256=
|
5
|
+
vellum/core/client_wrapper.py,sha256=U6Ex7HIzvqWq868iJvr4NDoHi1-B3Uivw7LAjR1IK0o,1890
|
6
6
|
vellum/core/datetime_utils.py,sha256=nBys2IsYrhPdszxGKCNRPSOCwa-5DWOHG95FB8G9PKo,1047
|
7
7
|
vellum/core/file.py,sha256=X9IbmkZmB2bB_DpmZAO3crWdXagOakAyn6UCOCImCPg,2322
|
8
8
|
vellum/core/http_client.py,sha256=R0pQpCppnEtxccGvXl4uJ76s7ro_65Fo_erlNNLp_AI,19228
|
@@ -32,7 +32,7 @@ vellum/lib/utils/uuid.py,sha256=nedyhTNQDS2YvrU5gL3PtvG9cgGH87yKOcpGDJAe44E,214
|
|
32
32
|
vellum/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
33
33
|
vellum/resources/__init__.py,sha256=6tqe3AwLJGLW38iua0Tje0n3uz3a4vkqMFxbUJGRs98,1346
|
34
34
|
vellum/resources/ad_hoc/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
|
35
|
-
vellum/resources/ad_hoc/client.py,sha256=
|
35
|
+
vellum/resources/ad_hoc/client.py,sha256=YHFtDXXbWcwFBRi9iI-Q3EDhRFrPiKj3wOgMuWLOcjg,16199
|
36
36
|
vellum/resources/container_images/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
|
37
37
|
vellum/resources/container_images/client.py,sha256=jK1n-NFsdBKCeEKh-EIqvw7R8AG9PP4GxxcoH9F0GYs,15463
|
38
38
|
vellum/resources/deployments/__init__.py,sha256=m64MNuPx3qVazOnTNwOY8oEeDrAkNwMJvUEe5xoMDvs,239
|
@@ -82,7 +82,7 @@ vellum/terraform/ml_model/__init__.py,sha256=I8h1Ru-Rb-Hi_HusK6G7nJQZEKQGsAAHMmw
|
|
82
82
|
vellum/terraform/provider/__init__.py,sha256=-06xKmAmknpohVzw5TD-t1bnUHta8OrQYqvMd04XM-U,12684
|
83
83
|
vellum/terraform/py.typed,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
|
84
84
|
vellum/terraform/versions.json,sha256=45c7jjRD5i4w9DJQHs5ZqLLVXRnQwP9Rirq3mWY-xEo,56
|
85
|
-
vellum/types/__init__.py,sha256=
|
85
|
+
vellum/types/__init__.py,sha256=fpJM6RPsqMqKJ0V7w4e4aab8jORBgSoAY4_FRqurz9s,51740
|
86
86
|
vellum/types/ad_hoc_execute_prompt_event.py,sha256=bCjujA2XsOgyF3bRZbcEqV2rOIymRgsLoIRtZpB14xg,607
|
87
87
|
vellum/types/ad_hoc_expand_meta.py,sha256=1gv-NCsy_6xBYupLvZH979yf2VMdxAU-l0y0ynMKZaw,1331
|
88
88
|
vellum/types/ad_hoc_fulfilled_prompt_execution_meta.py,sha256=Bfvf1d_dkmshxRACVM5vcxbH_7AQY23RmrrnPc0ytYY,939
|
@@ -119,8 +119,7 @@ vellum/types/chat_history_vellum_value_request.py,sha256=HzAiysncG5unJ-tlb43HhGZ
|
|
119
119
|
vellum/types/chat_message.py,sha256=EOA8v5Ebx2KS9BtwBBGbuvSK-pn4xWYZiioHuuPWvzw,916
|
120
120
|
vellum/types/chat_message_content.py,sha256=DQLB5rG40qLRLsmKWWo-XKa4rhk9TGQs_eFTFow2zEM,607
|
121
121
|
vellum/types/chat_message_content_request.py,sha256=iFT_PmN6sUjeN1_fZXr2ePJEbSq_GZcClBvtu8SdVmQ,724
|
122
|
-
vellum/types/chat_message_prompt_block.py,sha256=
|
123
|
-
vellum/types/chat_message_prompt_block_properties.py,sha256=_XukG09TPcw7kYiXHZrp9vdYAmVVtNwACiiddOql378,1268
|
122
|
+
vellum/types/chat_message_prompt_block.py,sha256=K0D42GPIgJYfYYigXU6ggCUbwHcB1LwMNDjXDybh3MQ,1351
|
124
123
|
vellum/types/chat_message_request.py,sha256=r2EW1pfnvNYx2fo6mBqU5HQrUzp67WXuE5G-XK281E4,945
|
125
124
|
vellum/types/chat_message_role.py,sha256=-i0Jrcbwf72MkMoaFTGyxRduvlN7f5Y9ULhCXR5KNdA,182
|
126
125
|
vellum/types/code_execution_node_array_result.py,sha256=KCdbmjXjReO-hPPpBsSR17h_roDUpc4R-92cmIn59ck,952
|
@@ -216,8 +215,7 @@ vellum/types/function_call_request.py,sha256=eJBIN-wLkkkDUIwAy1nMeWHu3MZ5aJpOXyW
|
|
216
215
|
vellum/types/function_call_variable_value.py,sha256=VQKCiEtJsmIK3i7CtFV_2ZpxeX70rqpUViXIvAci8L0,702
|
217
216
|
vellum/types/function_call_vellum_value.py,sha256=lLJb-S_-S_UXm6una1BMyCbqLpMhbbMcaVIYNO45h5o,759
|
218
217
|
vellum/types/function_call_vellum_value_request.py,sha256=oUteuCfWcj7UJbSE_Vywmmva9kyTaeL9iv5WJHabDVs,788
|
219
|
-
vellum/types/function_definition_prompt_block.py,sha256=
|
220
|
-
vellum/types/function_definition_prompt_block_properties.py,sha256=0D1yrSnCKQoJQlYu8Kk9AHItuc4LOIuz9LUoB4msTcw,1416
|
218
|
+
vellum/types/function_definition_prompt_block.py,sha256=SB84MTRAXXaXaCMGA2O47tuXIk4wvDAOs5pUD5SJ6qg,1261
|
221
219
|
vellum/types/generate_options_request.py,sha256=TUDqsH0tiPWDZH4T-p5gsvKvwVHEVZ_k6oI3qsjlsk4,782
|
222
220
|
vellum/types/generate_request.py,sha256=gL6ywAJe6YCJ5oKbtYwL2H_TMdC_6PJZAI7-P3UOF3I,1286
|
223
221
|
vellum/types/generate_response.py,sha256=QJmSRsYhZhtDmk2xpE9ueQEkHyXmYsaEQqqlKl9-bS4,699
|
@@ -250,8 +248,7 @@ vellum/types/initiated_workflow_node_result_event.py,sha256=Nu1J4iQYsW2HHjQFzQq-
|
|
250
248
|
vellum/types/instructor_vectorizer_config.py,sha256=7udlosXv4CUWTW_Q9m0mz3VRi1FKSbBhDIOhtxRd0-U,731
|
251
249
|
vellum/types/instructor_vectorizer_config_request.py,sha256=6LGFFQKntMfX7bdetUqEMVdr3KJHEps0oDp2bNmqWbM,738
|
252
250
|
vellum/types/iteration_state_enum.py,sha256=83JSh842OJgQiLtNn1KMimy6RlEYRVH3mDmYWS6Ewzo,180
|
253
|
-
vellum/types/jinja_prompt_block.py,sha256=
|
254
|
-
vellum/types/jinja_prompt_block_properties.py,sha256=zC0AfT86xaLVVX8WkWLADoMonegv1jQvpked0DIhlEs,712
|
251
|
+
vellum/types/jinja_prompt_block.py,sha256=eEoVmFeEjueNoNxrlUqmBtUhabQoOXna0NFHeqtBhNs,1030
|
255
252
|
vellum/types/json_input_request.py,sha256=x5sA-VXxF4QH-98xRcIKPZhsMVbnJNUQofiUQqyfGk4,768
|
256
253
|
vellum/types/json_variable_value.py,sha256=X7eBEWxuozfvIdqD5sIZ5L-L77Ou6IIsZaQVNXh5G2k,634
|
257
254
|
vellum/types/json_vellum_value.py,sha256=8irlw6NkRRVafysfTc1Q5BFFhRrWJYzdwrDYTdJK4JY,689
|
@@ -563,7 +560,7 @@ vellum/types/workflow_result_event_output_data_string.py,sha256=tM3kgh6tEhD0dFEb
|
|
563
560
|
vellum/types/workflow_stream_event.py,sha256=Wn3Yzuy9MqWAeo8tEaXDTKDEbJoA8DdYdMVq8EKuhu8,361
|
564
561
|
vellum/types/workspace_secret_read.py,sha256=3CnHDG72IAY0KRNvc31F0xLmhnpwjQHnDYCfQJzCxI0,714
|
565
562
|
vellum/version.py,sha256=jq-1PlAYxN9AXuaZqbYk9ak27SgE2lw9Ia5gx1b1gVI,76
|
566
|
-
vellum_ai-0.9.
|
567
|
-
vellum_ai-0.9.
|
568
|
-
vellum_ai-0.9.
|
569
|
-
vellum_ai-0.9.
|
563
|
+
vellum_ai-0.9.10.dist-info/LICENSE,sha256=CcaljEIoOBaU-wItPH4PmM_mDCGpyuUY0Er1BGu5Ti8,1073
|
564
|
+
vellum_ai-0.9.10.dist-info/METADATA,sha256=enIPlqGDzfVkfkWynD62Swt-e1UKjSeonRXPCRv5buQ,4395
|
565
|
+
vellum_ai-0.9.10.dist-info/WHEEL,sha256=Zb28QaM1gQi8f4VCBhsUklF61CTlNYfs9YAZn-TOGFk,88
|
566
|
+
vellum_ai-0.9.10.dist-info/RECORD,,
|
@@ -1,36 +0,0 @@
|
|
1
|
-
# This file was auto-generated by Fern from our API Definition.
|
2
|
-
|
3
|
-
from __future__ import annotations
|
4
|
-
from ..core.pydantic_utilities import UniversalBaseModel
|
5
|
-
import typing
|
6
|
-
from .chat_message_role import ChatMessageRole
|
7
|
-
from ..core.pydantic_utilities import IS_PYDANTIC_V2
|
8
|
-
import pydantic
|
9
|
-
from ..core.pydantic_utilities import update_forward_refs
|
10
|
-
|
11
|
-
|
12
|
-
class ChatMessagePromptBlockProperties(UniversalBaseModel):
|
13
|
-
"""
|
14
|
-
The properties of a ChatMessagePromptTemplateBlock
|
15
|
-
"""
|
16
|
-
|
17
|
-
blocks: typing.List["PromptBlock"]
|
18
|
-
chat_role: typing.Optional[ChatMessageRole] = None
|
19
|
-
chat_source: typing.Optional[str] = None
|
20
|
-
chat_message_unterminated: typing.Optional[bool] = None
|
21
|
-
|
22
|
-
if IS_PYDANTIC_V2:
|
23
|
-
model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
|
24
|
-
else:
|
25
|
-
|
26
|
-
class Config:
|
27
|
-
frozen = True
|
28
|
-
smart_union = True
|
29
|
-
extra = pydantic.Extra.allow
|
30
|
-
|
31
|
-
|
32
|
-
from .chat_message_prompt_block import ChatMessagePromptBlock # noqa: E402
|
33
|
-
from .prompt_block import PromptBlock # noqa: E402
|
34
|
-
|
35
|
-
update_forward_refs(ChatMessagePromptBlock, ChatMessagePromptBlockProperties=ChatMessagePromptBlockProperties)
|
36
|
-
update_forward_refs(ChatMessagePromptBlockProperties)
|
@@ -1,42 +0,0 @@
|
|
1
|
-
# This file was auto-generated by Fern from our API Definition.
|
2
|
-
|
3
|
-
from ..core.pydantic_utilities import UniversalBaseModel
|
4
|
-
import typing
|
5
|
-
import pydantic
|
6
|
-
from ..core.pydantic_utilities import IS_PYDANTIC_V2
|
7
|
-
|
8
|
-
|
9
|
-
class FunctionDefinitionPromptBlockProperties(UniversalBaseModel):
|
10
|
-
function_name: typing.Optional[str] = pydantic.Field(default=None)
|
11
|
-
"""
|
12
|
-
The name identifying the function.
|
13
|
-
"""
|
14
|
-
|
15
|
-
function_description: typing.Optional[str] = pydantic.Field(default=None)
|
16
|
-
"""
|
17
|
-
A description to help guide the model when to invoke this function.
|
18
|
-
"""
|
19
|
-
|
20
|
-
function_parameters: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = pydantic.Field(default=None)
|
21
|
-
"""
|
22
|
-
An OpenAPI specification of parameters that are supported by this function.
|
23
|
-
"""
|
24
|
-
|
25
|
-
function_forced: typing.Optional[bool] = pydantic.Field(default=None)
|
26
|
-
"""
|
27
|
-
Set this option to true to force the model to return a function call of this function.
|
28
|
-
"""
|
29
|
-
|
30
|
-
function_strict: typing.Optional[bool] = pydantic.Field(default=None)
|
31
|
-
"""
|
32
|
-
Set this option to use strict schema decoding when available.
|
33
|
-
"""
|
34
|
-
|
35
|
-
if IS_PYDANTIC_V2:
|
36
|
-
model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
|
37
|
-
else:
|
38
|
-
|
39
|
-
class Config:
|
40
|
-
frozen = True
|
41
|
-
smart_union = True
|
42
|
-
extra = pydantic.Extra.allow
|
@@ -1,21 +0,0 @@
|
|
1
|
-
# This file was auto-generated by Fern from our API Definition.
|
2
|
-
|
3
|
-
from ..core.pydantic_utilities import UniversalBaseModel
|
4
|
-
import typing
|
5
|
-
from .vellum_variable_type import VellumVariableType
|
6
|
-
from ..core.pydantic_utilities import IS_PYDANTIC_V2
|
7
|
-
import pydantic
|
8
|
-
|
9
|
-
|
10
|
-
class JinjaPromptBlockProperties(UniversalBaseModel):
|
11
|
-
template: typing.Optional[str] = None
|
12
|
-
template_type: typing.Optional[VellumVariableType] = None
|
13
|
-
|
14
|
-
if IS_PYDANTIC_V2:
|
15
|
-
model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
|
16
|
-
else:
|
17
|
-
|
18
|
-
class Config:
|
19
|
-
frozen = True
|
20
|
-
smart_union = True
|
21
|
-
extra = pydantic.Extra.allow
|
File without changes
|
File without changes
|