vellum-ai 0.1.9__py3-none-any.whl → 0.1.10__py3-none-any.whl
Sign up to get free protection for your applications and to get access to all the features.
vellum/client.py
CHANGED
@@ -86,6 +86,9 @@ class Vellum:
|
|
86
86
|
"""
|
87
87
|
Executes a deployed Prompt and returns the result.
|
88
88
|
|
89
|
+
Note: This endpoint temporarily does not support prompts with function calling, support is coming soon.
|
90
|
+
In the meantime, we recommend still using the `/generate` endpoint for prompts with function calling.
|
91
|
+
|
89
92
|
Parameters:
|
90
93
|
- inputs: typing.List[PromptDeploymentInputRequest].
|
91
94
|
|
@@ -161,6 +164,9 @@ class Vellum:
|
|
161
164
|
"""
|
162
165
|
Executes a deployed Prompt and streams back the results.
|
163
166
|
|
167
|
+
Note: This endpoint temporarily does not support prompts with function calling, support is coming soon.
|
168
|
+
In the meantime, we recommend still using the `/generate-stream` endpoint for prompts with function calling
|
169
|
+
|
164
170
|
Parameters:
|
165
171
|
- inputs: typing.List[PromptDeploymentInputRequest].
|
166
172
|
|
@@ -607,6 +613,9 @@ class AsyncVellum:
|
|
607
613
|
"""
|
608
614
|
Executes a deployed Prompt and returns the result.
|
609
615
|
|
616
|
+
Note: This endpoint temporarily does not support prompts with function calling, support is coming soon.
|
617
|
+
In the meantime, we recommend still using the `/generate` endpoint for prompts with function calling.
|
618
|
+
|
610
619
|
Parameters:
|
611
620
|
- inputs: typing.List[PromptDeploymentInputRequest].
|
612
621
|
|
@@ -682,6 +691,9 @@ class AsyncVellum:
|
|
682
691
|
"""
|
683
692
|
Executes a deployed Prompt and streams back the results.
|
684
693
|
|
694
|
+
Note: This endpoint temporarily does not support prompts with function calling, support is coming soon.
|
695
|
+
In the meantime, we recommend still using the `/generate-stream` endpoint for prompts with function calling
|
696
|
+
|
685
697
|
Parameters:
|
686
698
|
- inputs: typing.List[PromptDeploymentInputRequest].
|
687
699
|
|
vellum/core/client_wrapper.py
CHANGED
@@ -16,7 +16,7 @@ class BaseClientWrapper:
|
|
16
16
|
headers: typing.Dict[str, str] = {
|
17
17
|
"X-Fern-Language": "Python",
|
18
18
|
"X-Fern-SDK-Name": "vellum-ai",
|
19
|
-
"X-Fern-SDK-Version": "v0.1.
|
19
|
+
"X-Fern-SDK-Version": "v0.1.10",
|
20
20
|
}
|
21
21
|
headers["X_API_KEY"] = self.api_key
|
22
22
|
return headers
|
@@ -1,8 +1,8 @@
|
|
1
1
|
vellum/__init__.py,sha256=p7CWPTlrwlGlE9tu6qL8gfoI4-amEuZ-mH3Rhe9U0gU,19047
|
2
|
-
vellum/client.py,sha256=
|
2
|
+
vellum/client.py,sha256=oz0l_KYFAQq3VI2uGHdAluuk3iac0c45XCdyRg055Bw,55245
|
3
3
|
vellum/core/__init__.py,sha256=QJS3CJ2TYP2E1Tge0CS6Z7r8LTNzJHQVX1hD3558eP0,519
|
4
4
|
vellum/core/api_error.py,sha256=RE8LELok2QCjABadECTvtDp7qejA1VmINCh6TbqPwSE,426
|
5
|
-
vellum/core/client_wrapper.py,sha256=
|
5
|
+
vellum/core/client_wrapper.py,sha256=rhGN6OMXQDU8yR-8_i7MymJI4s65NwQw5O3RonH3pC4,1214
|
6
6
|
vellum/core/datetime_utils.py,sha256=nBys2IsYrhPdszxGKCNRPSOCwa-5DWOHG95FB8G9PKo,1047
|
7
7
|
vellum/core/jsonable_encoder.py,sha256=MTYkDov2EryHgee4QM46uZiBOuOXK9KTHlBdBwU-CpU,3799
|
8
8
|
vellum/core/remove_none_from_dict.py,sha256=8m91FC3YuVem0Gm9_sXhJ2tGvP33owJJdrqCLEdowGw,330
|
@@ -229,6 +229,6 @@ vellum/types/workflow_result_event_output_data_number.py,sha256=zWtQor27iaDDHmY5
|
|
229
229
|
vellum/types/workflow_result_event_output_data_search_results.py,sha256=frCaJ5kWrIqCeV-waBNfd7rO4fqWe5aYpSI8PM4-oRw,1323
|
230
230
|
vellum/types/workflow_result_event_output_data_string.py,sha256=TByZxyQh9ci4UIdEmoEi_JK1U_JwYCnVZeB_4kGuXKM,1405
|
231
231
|
vellum/types/workflow_stream_event.py,sha256=OQUSzwoM-OCfWxNzeOVVLsjCue_WWqin3tGMtwvp_rc,873
|
232
|
-
vellum_ai-0.1.
|
233
|
-
vellum_ai-0.1.
|
234
|
-
vellum_ai-0.1.
|
232
|
+
vellum_ai-0.1.10.dist-info/METADATA,sha256=FMHtByGy9cDhishaDiKO3lgp_dhTAaE74Loyb1vUlks,3487
|
233
|
+
vellum_ai-0.1.10.dist-info/WHEEL,sha256=Zb28QaM1gQi8f4VCBhsUklF61CTlNYfs9YAZn-TOGFk,88
|
234
|
+
vellum_ai-0.1.10.dist-info/RECORD,,
|
File without changes
|