vellum-ai 0.9.6__py3-none-any.whl → 0.9.8__py3-none-any.whl

Sign up to get free protection for your applications and to get access to all the features.
Files changed (32) hide show
  1. vellum/__init__.py +38 -42
  2. vellum/core/client_wrapper.py +1 -1
  3. vellum/resources/ad_hoc/client.py +86 -90
  4. vellum/types/__init__.py +38 -42
  5. vellum/types/{ad_hoc_expand_meta_request.py → ad_hoc_expand_meta.py} +1 -1
  6. vellum/types/{chat_message_prompt_block_request.py → chat_message_prompt_block.py} +7 -9
  7. vellum/types/{chat_message_prompt_block_properties_request.py → chat_message_prompt_block_properties.py} +6 -8
  8. vellum/types/{ephemeral_prompt_cache_config_request.py → ephemeral_prompt_cache_config.py} +1 -1
  9. vellum/types/{function_definition_prompt_block_request.py → function_definition_prompt_block.py} +5 -5
  10. vellum/types/{function_definition_prompt_block_properties_request.py → function_definition_prompt_block_properties.py} +1 -1
  11. vellum/types/{jinja_prompt_block_request.py → jinja_prompt_block.py} +5 -5
  12. vellum/types/{jinja_prompt_block_properties_request.py → jinja_prompt_block_properties.py} +1 -1
  13. vellum/types/{plain_text_prompt_block_request.py → plain_text_prompt_block.py} +3 -3
  14. vellum/types/prompt_block.py +15 -0
  15. vellum/types/{prompt_parameters_request.py → prompt_parameters.py} +1 -1
  16. vellum/types/{prompt_request_chat_history_input_request.py → prompt_request_chat_history_input.py} +3 -3
  17. vellum/types/prompt_request_input.py +8 -0
  18. vellum/types/{prompt_request_json_input_request.py → prompt_request_json_input.py} +1 -1
  19. vellum/types/{prompt_request_string_input_request.py → prompt_request_string_input.py} +1 -1
  20. vellum/types/{prompt_settings_request.py → prompt_settings.py} +1 -1
  21. vellum/types/rich_text_child_block.py +7 -0
  22. vellum/types/{rich_text_prompt_block_request.py → rich_text_prompt_block.py} +6 -7
  23. vellum/types/{variable_prompt_block_request.py → variable_prompt_block.py} +3 -3
  24. {vellum_ai-0.9.6.dist-info → vellum_ai-0.9.8.dist-info}/METADATA +1 -1
  25. {vellum_ai-0.9.6.dist-info → vellum_ai-0.9.8.dist-info}/RECORD +27 -29
  26. vellum/types/prompt_block_request.py +0 -19
  27. vellum/types/prompt_request_input_request.py +0 -10
  28. vellum/types/rich_text_child_block_request.py +0 -7
  29. vellum/types/vellum_variable_extensions_request.py +0 -23
  30. vellum/types/vellum_variable_request.py +0 -33
  31. {vellum_ai-0.9.6.dist-info → vellum_ai-0.9.8.dist-info}/LICENSE +0 -0
  32. {vellum_ai-0.9.6.dist-info → vellum_ai-0.9.8.dist-info}/WHEEL +0 -0
vellum/__init__.py CHANGED
@@ -2,7 +2,7 @@
2
2
 
3
3
  from .types import (
4
4
  AdHocExecutePromptEvent,
5
- AdHocExpandMetaRequest,
5
+ AdHocExpandMeta,
6
6
  AdHocFulfilledPromptExecutionMeta,
7
7
  AdHocInitiatedPromptExecutionMeta,
8
8
  AdHocRejectedPromptExecutionMeta,
@@ -37,8 +37,8 @@ from .types import (
37
37
  ChatMessage,
38
38
  ChatMessageContent,
39
39
  ChatMessageContentRequest,
40
- ChatMessagePromptBlockPropertiesRequest,
41
- ChatMessagePromptBlockRequest,
40
+ ChatMessagePromptBlock,
41
+ ChatMessagePromptBlockProperties,
42
42
  ChatMessageRequest,
43
43
  ChatMessageRole,
44
44
  CodeExecutionNodeArrayResult,
@@ -84,7 +84,7 @@ from .types import (
84
84
  EntityStatus,
85
85
  EntityVisibility,
86
86
  EnvironmentEnum,
87
- EphemeralPromptCacheConfigRequest,
87
+ EphemeralPromptCacheConfig,
88
88
  EphemeralPromptCacheConfigTypeEnum,
89
89
  ErrorInputRequest,
90
90
  ErrorVariableValue,
@@ -134,8 +134,8 @@ from .types import (
134
134
  FunctionCallVariableValue,
135
135
  FunctionCallVellumValue,
136
136
  FunctionCallVellumValueRequest,
137
- FunctionDefinitionPromptBlockPropertiesRequest,
138
- FunctionDefinitionPromptBlockRequest,
137
+ FunctionDefinitionPromptBlock,
138
+ FunctionDefinitionPromptBlockProperties,
139
139
  GenerateOptionsRequest,
140
140
  GenerateRequest,
141
141
  GenerateResponse,
@@ -168,8 +168,8 @@ from .types import (
168
168
  InstructorVectorizerConfig,
169
169
  InstructorVectorizerConfigRequest,
170
170
  IterationStateEnum,
171
- JinjaPromptBlockPropertiesRequest,
172
- JinjaPromptBlockRequest,
171
+ JinjaPromptBlock,
172
+ JinjaPromptBlockProperties,
173
173
  JsonInputRequest,
174
174
  JsonVariableValue,
175
175
  JsonVellumValue,
@@ -254,11 +254,11 @@ from .types import (
254
254
  PaginatedWorkflowReleaseTagReadList,
255
255
  PdfSearchResultMetaSource,
256
256
  PdfSearchResultMetaSourceRequest,
257
- PlainTextPromptBlockRequest,
257
+ PlainTextPromptBlock,
258
258
  Price,
259
259
  ProcessingFailureReasonEnum,
260
260
  ProcessingStateEnum,
261
- PromptBlockRequest,
261
+ PromptBlock,
262
262
  PromptBlockState,
263
263
  PromptDeploymentExpandMetaRequest,
264
264
  PromptDeploymentInputRequest,
@@ -267,12 +267,12 @@ from .types import (
267
267
  PromptNodeResult,
268
268
  PromptNodeResultData,
269
269
  PromptOutput,
270
- PromptParametersRequest,
271
- PromptRequestChatHistoryInputRequest,
272
- PromptRequestInputRequest,
273
- PromptRequestJsonInputRequest,
274
- PromptRequestStringInputRequest,
275
- PromptSettingsRequest,
270
+ PromptParameters,
271
+ PromptRequestChatHistoryInput,
272
+ PromptRequestInput,
273
+ PromptRequestJsonInput,
274
+ PromptRequestStringInput,
275
+ PromptSettings,
276
276
  RawPromptExecutionOverridesRequest,
277
277
  ReductoChunkerConfig,
278
278
  ReductoChunkerConfigRequest,
@@ -286,8 +286,8 @@ from .types import (
286
286
  RejectedWorkflowNodeResultEvent,
287
287
  ReleaseTagSource,
288
288
  ReplaceTestSuiteTestCaseRequest,
289
- RichTextChildBlockRequest,
290
- RichTextPromptBlockRequest,
289
+ RichTextChildBlock,
290
+ RichTextPromptBlock,
291
291
  SandboxScenario,
292
292
  ScenarioInput,
293
293
  ScenarioInputChatHistoryVariableValue,
@@ -419,7 +419,7 @@ from .types import (
419
419
  UnitEnum,
420
420
  UploadDocumentResponse,
421
421
  UpsertTestSuiteTestCaseRequest,
422
- VariablePromptBlockRequest,
422
+ VariablePromptBlock,
423
423
  VellumAudio,
424
424
  VellumAudioRequest,
425
425
  VellumError,
@@ -434,8 +434,6 @@ from .types import (
434
434
  VellumValueRequest,
435
435
  VellumVariable,
436
436
  VellumVariableExtensions,
437
- VellumVariableExtensionsRequest,
438
- VellumVariableRequest,
439
437
  VellumVariableType,
440
438
  WorkflowDeploymentRead,
441
439
  WorkflowEventError,
@@ -513,7 +511,7 @@ from .version import __version__
513
511
 
514
512
  __all__ = [
515
513
  "AdHocExecutePromptEvent",
516
- "AdHocExpandMetaRequest",
514
+ "AdHocExpandMeta",
517
515
  "AdHocFulfilledPromptExecutionMeta",
518
516
  "AdHocInitiatedPromptExecutionMeta",
519
517
  "AdHocRejectedPromptExecutionMeta",
@@ -550,8 +548,8 @@ __all__ = [
550
548
  "ChatMessage",
551
549
  "ChatMessageContent",
552
550
  "ChatMessageContentRequest",
553
- "ChatMessagePromptBlockPropertiesRequest",
554
- "ChatMessagePromptBlockRequest",
551
+ "ChatMessagePromptBlock",
552
+ "ChatMessagePromptBlockProperties",
555
553
  "ChatMessageRequest",
556
554
  "ChatMessageRole",
557
555
  "CodeExecutionNodeArrayResult",
@@ -599,7 +597,7 @@ __all__ = [
599
597
  "EntityStatus",
600
598
  "EntityVisibility",
601
599
  "EnvironmentEnum",
602
- "EphemeralPromptCacheConfigRequest",
600
+ "EphemeralPromptCacheConfig",
603
601
  "EphemeralPromptCacheConfigTypeEnum",
604
602
  "ErrorInputRequest",
605
603
  "ErrorVariableValue",
@@ -651,8 +649,8 @@ __all__ = [
651
649
  "FunctionCallVariableValue",
652
650
  "FunctionCallVellumValue",
653
651
  "FunctionCallVellumValueRequest",
654
- "FunctionDefinitionPromptBlockPropertiesRequest",
655
- "FunctionDefinitionPromptBlockRequest",
652
+ "FunctionDefinitionPromptBlock",
653
+ "FunctionDefinitionPromptBlockProperties",
656
654
  "GenerateOptionsRequest",
657
655
  "GenerateRequest",
658
656
  "GenerateResponse",
@@ -686,8 +684,8 @@ __all__ = [
686
684
  "InstructorVectorizerConfigRequest",
687
685
  "InternalServerError",
688
686
  "IterationStateEnum",
689
- "JinjaPromptBlockPropertiesRequest",
690
- "JinjaPromptBlockRequest",
687
+ "JinjaPromptBlock",
688
+ "JinjaPromptBlockProperties",
691
689
  "JsonInputRequest",
692
690
  "JsonVariableValue",
693
691
  "JsonVellumValue",
@@ -775,11 +773,11 @@ __all__ = [
775
773
  "PaginatedWorkflowReleaseTagReadList",
776
774
  "PdfSearchResultMetaSource",
777
775
  "PdfSearchResultMetaSourceRequest",
778
- "PlainTextPromptBlockRequest",
776
+ "PlainTextPromptBlock",
779
777
  "Price",
780
778
  "ProcessingFailureReasonEnum",
781
779
  "ProcessingStateEnum",
782
- "PromptBlockRequest",
780
+ "PromptBlock",
783
781
  "PromptBlockState",
784
782
  "PromptDeploymentExpandMetaRequest",
785
783
  "PromptDeploymentInputRequest",
@@ -788,12 +786,12 @@ __all__ = [
788
786
  "PromptNodeResult",
789
787
  "PromptNodeResultData",
790
788
  "PromptOutput",
791
- "PromptParametersRequest",
792
- "PromptRequestChatHistoryInputRequest",
793
- "PromptRequestInputRequest",
794
- "PromptRequestJsonInputRequest",
795
- "PromptRequestStringInputRequest",
796
- "PromptSettingsRequest",
789
+ "PromptParameters",
790
+ "PromptRequestChatHistoryInput",
791
+ "PromptRequestInput",
792
+ "PromptRequestJsonInput",
793
+ "PromptRequestStringInput",
794
+ "PromptSettings",
797
795
  "RawPromptExecutionOverridesRequest",
798
796
  "ReductoChunkerConfig",
799
797
  "ReductoChunkerConfigRequest",
@@ -807,8 +805,8 @@ __all__ = [
807
805
  "RejectedWorkflowNodeResultEvent",
808
806
  "ReleaseTagSource",
809
807
  "ReplaceTestSuiteTestCaseRequest",
810
- "RichTextChildBlockRequest",
811
- "RichTextPromptBlockRequest",
808
+ "RichTextChildBlock",
809
+ "RichTextPromptBlock",
812
810
  "SandboxScenario",
813
811
  "ScenarioInput",
814
812
  "ScenarioInputChatHistoryVariableValue",
@@ -940,7 +938,7 @@ __all__ = [
940
938
  "UnitEnum",
941
939
  "UploadDocumentResponse",
942
940
  "UpsertTestSuiteTestCaseRequest",
943
- "VariablePromptBlockRequest",
941
+ "VariablePromptBlock",
944
942
  "Vellum",
945
943
  "VellumAudio",
946
944
  "VellumAudioRequest",
@@ -957,8 +955,6 @@ __all__ = [
957
955
  "VellumValueRequest",
958
956
  "VellumVariable",
959
957
  "VellumVariableExtensions",
960
- "VellumVariableExtensionsRequest",
961
- "VellumVariableRequest",
962
958
  "VellumVariableType",
963
959
  "WorkflowDeploymentRead",
964
960
  "WorkflowDeploymentsListRequestStatus",
@@ -17,7 +17,7 @@ class BaseClientWrapper:
17
17
  headers: typing.Dict[str, str] = {
18
18
  "X-Fern-Language": "Python",
19
19
  "X-Fern-SDK-Name": "vellum-ai",
20
- "X-Fern-SDK-Version": "0.9.6",
20
+ "X-Fern-SDK-Version": "0.9.8",
21
21
  }
22
22
  headers["X_API_KEY"] = self.api_key
23
23
  return headers
@@ -2,12 +2,12 @@
2
2
 
3
3
  import typing
4
4
  from ...core.client_wrapper import SyncClientWrapper
5
- from ...types.prompt_request_input_request import PromptRequestInputRequest
6
- from ...types.vellum_variable_request import VellumVariableRequest
7
- from ...types.prompt_parameters_request import PromptParametersRequest
8
- from ...types.prompt_block_request import PromptBlockRequest
9
- from ...types.prompt_settings_request import PromptSettingsRequest
10
- from ...types.ad_hoc_expand_meta_request import AdHocExpandMetaRequest
5
+ from ...types.prompt_request_input import PromptRequestInput
6
+ from ...types.vellum_variable import VellumVariable
7
+ from ...types.prompt_parameters import PromptParameters
8
+ from ...types.prompt_block import PromptBlock
9
+ from ...types.prompt_settings import PromptSettings
10
+ from ...types.ad_hoc_expand_meta import AdHocExpandMeta
11
11
  from ...core.request_options import RequestOptions
12
12
  from ...types.ad_hoc_execute_prompt_event import AdHocExecutePromptEvent
13
13
  from ...core.serialization import convert_and_respect_annotation_metadata
@@ -32,12 +32,12 @@ class AdHocClient:
32
32
  self,
33
33
  *,
34
34
  ml_model: str,
35
- input_values: typing.Sequence[PromptRequestInputRequest],
36
- input_variables: typing.Sequence[VellumVariableRequest],
37
- parameters: PromptParametersRequest,
38
- blocks: typing.Sequence[PromptBlockRequest],
39
- settings: typing.Optional[PromptSettingsRequest] = OMIT,
40
- expand_meta: typing.Optional[AdHocExpandMetaRequest] = OMIT,
35
+ input_values: typing.Sequence[PromptRequestInput],
36
+ input_variables: typing.Sequence[VellumVariable],
37
+ parameters: PromptParameters,
38
+ blocks: typing.Sequence[PromptBlock],
39
+ settings: typing.Optional[PromptSettings] = OMIT,
40
+ expand_meta: typing.Optional[AdHocExpandMeta] = OMIT,
41
41
  request_options: typing.Optional[RequestOptions] = None,
42
42
  ) -> typing.Iterator[AdHocExecutePromptEvent]:
43
43
  """
@@ -47,17 +47,17 @@ class AdHocClient:
47
47
  ----------
48
48
  ml_model : str
49
49
 
50
- input_values : typing.Sequence[PromptRequestInputRequest]
50
+ input_values : typing.Sequence[PromptRequestInput]
51
51
 
52
- input_variables : typing.Sequence[VellumVariableRequest]
52
+ input_variables : typing.Sequence[VellumVariable]
53
53
 
54
- parameters : PromptParametersRequest
54
+ parameters : PromptParameters
55
55
 
56
- blocks : typing.Sequence[PromptBlockRequest]
56
+ blocks : typing.Sequence[PromptBlock]
57
57
 
58
- settings : typing.Optional[PromptSettingsRequest]
58
+ settings : typing.Optional[PromptSettings]
59
59
 
60
- expand_meta : typing.Optional[AdHocExpandMetaRequest]
60
+ expand_meta : typing.Optional[AdHocExpandMeta]
61
61
 
62
62
  request_options : typing.Optional[RequestOptions]
63
63
  Request-specific configuration.
@@ -70,17 +70,17 @@ class AdHocClient:
70
70
  Examples
71
71
  --------
72
72
  from vellum import (
73
- AdHocExpandMetaRequest,
74
- EphemeralPromptCacheConfigRequest,
75
- JinjaPromptBlockPropertiesRequest,
76
- JinjaPromptBlockRequest,
77
- PromptParametersRequest,
78
- PromptRequestStringInputRequest,
79
- PromptSettingsRequest,
80
- StringVellumValueRequest,
73
+ AdHocExpandMeta,
74
+ EphemeralPromptCacheConfig,
75
+ JinjaPromptBlock,
76
+ JinjaPromptBlockProperties,
77
+ PromptParameters,
78
+ PromptRequestStringInput,
79
+ PromptSettings,
80
+ StringVellumValue,
81
81
  Vellum,
82
- VellumVariableExtensionsRequest,
83
- VellumVariableRequest,
82
+ VellumVariable,
83
+ VellumVariableExtensions,
84
84
  )
85
85
 
86
86
  client = Vellum(
@@ -89,26 +89,26 @@ class AdHocClient:
89
89
  response = client.ad_hoc.adhoc_execute_prompt_stream(
90
90
  ml_model="string",
91
91
  input_values=[
92
- PromptRequestStringInputRequest(
92
+ PromptRequestStringInput(
93
93
  key="string",
94
94
  value="string",
95
95
  )
96
96
  ],
97
97
  input_variables=[
98
- VellumVariableRequest(
98
+ VellumVariable(
99
99
  id="string",
100
100
  key="string",
101
101
  type="STRING",
102
102
  required=True,
103
- default=StringVellumValueRequest(
104
- value="string",
103
+ default=StringVellumValue(
104
+ value={"key": "value"},
105
105
  ),
106
- extensions=VellumVariableExtensionsRequest(
107
- color="string",
106
+ extensions=VellumVariableExtensions(
107
+ color={"key": "value"},
108
108
  ),
109
109
  )
110
110
  ],
111
- parameters=PromptParametersRequest(
111
+ parameters=PromptParameters(
112
112
  stop=["string"],
113
113
  temperature=1.1,
114
114
  max_tokens=1,
@@ -119,22 +119,20 @@ class AdHocClient:
119
119
  logit_bias={"string": {"key": "value"}},
120
120
  custom_parameters={"string": {"key": "value"}},
121
121
  ),
122
- settings=PromptSettingsRequest(
122
+ settings=PromptSettings(
123
123
  timeout=1.1,
124
124
  ),
125
125
  blocks=[
126
- JinjaPromptBlockRequest(
126
+ JinjaPromptBlock(
127
127
  state="ENABLED",
128
- cache_config=EphemeralPromptCacheConfigRequest(
129
- type={"key": "value"},
130
- ),
131
- properties=JinjaPromptBlockPropertiesRequest(
128
+ cache_config=EphemeralPromptCacheConfig(),
129
+ properties=JinjaPromptBlockProperties(
132
130
  template="string",
133
131
  template_type="STRING",
134
132
  ),
135
133
  )
136
134
  ],
137
- expand_meta=AdHocExpandMetaRequest(
135
+ expand_meta=AdHocExpandMeta(
138
136
  cost=True,
139
137
  model_name=True,
140
138
  usage=True,
@@ -151,22 +149,22 @@ class AdHocClient:
151
149
  json={
152
150
  "ml_model": ml_model,
153
151
  "input_values": convert_and_respect_annotation_metadata(
154
- object_=input_values, annotation=typing.Sequence[PromptRequestInputRequest], direction="write"
152
+ object_=input_values, annotation=typing.Sequence[PromptRequestInput], direction="write"
155
153
  ),
156
154
  "input_variables": convert_and_respect_annotation_metadata(
157
- object_=input_variables, annotation=typing.Sequence[VellumVariableRequest], direction="write"
155
+ object_=input_variables, annotation=typing.Sequence[VellumVariable], direction="write"
158
156
  ),
159
157
  "parameters": convert_and_respect_annotation_metadata(
160
- object_=parameters, annotation=PromptParametersRequest, direction="write"
158
+ object_=parameters, annotation=PromptParameters, direction="write"
161
159
  ),
162
160
  "settings": convert_and_respect_annotation_metadata(
163
- object_=settings, annotation=PromptSettingsRequest, direction="write"
161
+ object_=settings, annotation=PromptSettings, direction="write"
164
162
  ),
165
163
  "blocks": convert_and_respect_annotation_metadata(
166
- object_=blocks, annotation=typing.Sequence[PromptBlockRequest], direction="write"
164
+ object_=blocks, annotation=typing.Sequence[PromptBlock], direction="write"
167
165
  ),
168
166
  "expand_meta": convert_and_respect_annotation_metadata(
169
- object_=expand_meta, annotation=AdHocExpandMetaRequest, direction="write"
167
+ object_=expand_meta, annotation=AdHocExpandMeta, direction="write"
170
168
  ),
171
169
  },
172
170
  request_options=request_options,
@@ -233,12 +231,12 @@ class AsyncAdHocClient:
233
231
  self,
234
232
  *,
235
233
  ml_model: str,
236
- input_values: typing.Sequence[PromptRequestInputRequest],
237
- input_variables: typing.Sequence[VellumVariableRequest],
238
- parameters: PromptParametersRequest,
239
- blocks: typing.Sequence[PromptBlockRequest],
240
- settings: typing.Optional[PromptSettingsRequest] = OMIT,
241
- expand_meta: typing.Optional[AdHocExpandMetaRequest] = OMIT,
234
+ input_values: typing.Sequence[PromptRequestInput],
235
+ input_variables: typing.Sequence[VellumVariable],
236
+ parameters: PromptParameters,
237
+ blocks: typing.Sequence[PromptBlock],
238
+ settings: typing.Optional[PromptSettings] = OMIT,
239
+ expand_meta: typing.Optional[AdHocExpandMeta] = OMIT,
242
240
  request_options: typing.Optional[RequestOptions] = None,
243
241
  ) -> typing.AsyncIterator[AdHocExecutePromptEvent]:
244
242
  """
@@ -248,17 +246,17 @@ class AsyncAdHocClient:
248
246
  ----------
249
247
  ml_model : str
250
248
 
251
- input_values : typing.Sequence[PromptRequestInputRequest]
249
+ input_values : typing.Sequence[PromptRequestInput]
252
250
 
253
- input_variables : typing.Sequence[VellumVariableRequest]
251
+ input_variables : typing.Sequence[VellumVariable]
254
252
 
255
- parameters : PromptParametersRequest
253
+ parameters : PromptParameters
256
254
 
257
- blocks : typing.Sequence[PromptBlockRequest]
255
+ blocks : typing.Sequence[PromptBlock]
258
256
 
259
- settings : typing.Optional[PromptSettingsRequest]
257
+ settings : typing.Optional[PromptSettings]
260
258
 
261
- expand_meta : typing.Optional[AdHocExpandMetaRequest]
259
+ expand_meta : typing.Optional[AdHocExpandMeta]
262
260
 
263
261
  request_options : typing.Optional[RequestOptions]
264
262
  Request-specific configuration.
@@ -273,17 +271,17 @@ class AsyncAdHocClient:
273
271
  import asyncio
274
272
 
275
273
  from vellum import (
276
- AdHocExpandMetaRequest,
274
+ AdHocExpandMeta,
277
275
  AsyncVellum,
278
- EphemeralPromptCacheConfigRequest,
279
- JinjaPromptBlockPropertiesRequest,
280
- JinjaPromptBlockRequest,
281
- PromptParametersRequest,
282
- PromptRequestStringInputRequest,
283
- PromptSettingsRequest,
284
- StringVellumValueRequest,
285
- VellumVariableExtensionsRequest,
286
- VellumVariableRequest,
276
+ EphemeralPromptCacheConfig,
277
+ JinjaPromptBlock,
278
+ JinjaPromptBlockProperties,
279
+ PromptParameters,
280
+ PromptRequestStringInput,
281
+ PromptSettings,
282
+ StringVellumValue,
283
+ VellumVariable,
284
+ VellumVariableExtensions,
287
285
  )
288
286
 
289
287
  client = AsyncVellum(
@@ -295,26 +293,26 @@ class AsyncAdHocClient:
295
293
  response = await client.ad_hoc.adhoc_execute_prompt_stream(
296
294
  ml_model="string",
297
295
  input_values=[
298
- PromptRequestStringInputRequest(
296
+ PromptRequestStringInput(
299
297
  key="string",
300
298
  value="string",
301
299
  )
302
300
  ],
303
301
  input_variables=[
304
- VellumVariableRequest(
302
+ VellumVariable(
305
303
  id="string",
306
304
  key="string",
307
305
  type="STRING",
308
306
  required=True,
309
- default=StringVellumValueRequest(
310
- value="string",
307
+ default=StringVellumValue(
308
+ value={"key": "value"},
311
309
  ),
312
- extensions=VellumVariableExtensionsRequest(
313
- color="string",
310
+ extensions=VellumVariableExtensions(
311
+ color={"key": "value"},
314
312
  ),
315
313
  )
316
314
  ],
317
- parameters=PromptParametersRequest(
315
+ parameters=PromptParameters(
318
316
  stop=["string"],
319
317
  temperature=1.1,
320
318
  max_tokens=1,
@@ -325,22 +323,20 @@ class AsyncAdHocClient:
325
323
  logit_bias={"string": {"key": "value"}},
326
324
  custom_parameters={"string": {"key": "value"}},
327
325
  ),
328
- settings=PromptSettingsRequest(
326
+ settings=PromptSettings(
329
327
  timeout=1.1,
330
328
  ),
331
329
  blocks=[
332
- JinjaPromptBlockRequest(
330
+ JinjaPromptBlock(
333
331
  state="ENABLED",
334
- cache_config=EphemeralPromptCacheConfigRequest(
335
- type={"key": "value"},
336
- ),
337
- properties=JinjaPromptBlockPropertiesRequest(
332
+ cache_config=EphemeralPromptCacheConfig(),
333
+ properties=JinjaPromptBlockProperties(
338
334
  template="string",
339
335
  template_type="STRING",
340
336
  ),
341
337
  )
342
338
  ],
343
- expand_meta=AdHocExpandMetaRequest(
339
+ expand_meta=AdHocExpandMeta(
344
340
  cost=True,
345
341
  model_name=True,
346
342
  usage=True,
@@ -360,22 +356,22 @@ class AsyncAdHocClient:
360
356
  json={
361
357
  "ml_model": ml_model,
362
358
  "input_values": convert_and_respect_annotation_metadata(
363
- object_=input_values, annotation=typing.Sequence[PromptRequestInputRequest], direction="write"
359
+ object_=input_values, annotation=typing.Sequence[PromptRequestInput], direction="write"
364
360
  ),
365
361
  "input_variables": convert_and_respect_annotation_metadata(
366
- object_=input_variables, annotation=typing.Sequence[VellumVariableRequest], direction="write"
362
+ object_=input_variables, annotation=typing.Sequence[VellumVariable], direction="write"
367
363
  ),
368
364
  "parameters": convert_and_respect_annotation_metadata(
369
- object_=parameters, annotation=PromptParametersRequest, direction="write"
365
+ object_=parameters, annotation=PromptParameters, direction="write"
370
366
  ),
371
367
  "settings": convert_and_respect_annotation_metadata(
372
- object_=settings, annotation=PromptSettingsRequest, direction="write"
368
+ object_=settings, annotation=PromptSettings, direction="write"
373
369
  ),
374
370
  "blocks": convert_and_respect_annotation_metadata(
375
- object_=blocks, annotation=typing.Sequence[PromptBlockRequest], direction="write"
371
+ object_=blocks, annotation=typing.Sequence[PromptBlock], direction="write"
376
372
  ),
377
373
  "expand_meta": convert_and_respect_annotation_metadata(
378
- object_=expand_meta, annotation=AdHocExpandMetaRequest, direction="write"
374
+ object_=expand_meta, annotation=AdHocExpandMeta, direction="write"
379
375
  ),
380
376
  },
381
377
  request_options=request_options,