label-studio-sdk 1.0.5__py3-none-any.whl → 1.0.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- label_studio_sdk/__init__.py +70 -0
- label_studio_sdk/_extensions/eval/categorical.py +83 -0
- label_studio_sdk/_extensions/label_studio_tools/core/utils/io.py +35 -17
- label_studio_sdk/annotations/__init__.py +3 -0
- label_studio_sdk/annotations/client.py +109 -0
- label_studio_sdk/annotations/types/__init__.py +5 -0
- label_studio_sdk/annotations/types/annotations_create_bulk_response_item.py +29 -0
- label_studio_sdk/base_client.py +9 -0
- label_studio_sdk/comments/__init__.py +2 -0
- label_studio_sdk/comments/client.py +512 -0
- label_studio_sdk/converter/converter.py +2 -0
- label_studio_sdk/converter/imports/coco.py +14 -13
- label_studio_sdk/converter/utils.py +72 -3
- label_studio_sdk/core/client_wrapper.py +1 -1
- label_studio_sdk/files/client.py +26 -16
- label_studio_sdk/label_interface/interface.py +38 -5
- label_studio_sdk/model_providers/__init__.py +2 -0
- label_studio_sdk/model_providers/client.py +190 -0
- label_studio_sdk/projects/client.py +32 -16
- label_studio_sdk/projects/exports/client.py +133 -40
- label_studio_sdk/prompts/__init__.py +21 -0
- label_studio_sdk/prompts/client.py +862 -0
- label_studio_sdk/prompts/indicators/__init__.py +2 -0
- label_studio_sdk/prompts/indicators/client.py +194 -0
- label_studio_sdk/prompts/runs/__init__.py +5 -0
- label_studio_sdk/prompts/runs/client.py +354 -0
- label_studio_sdk/prompts/runs/types/__init__.py +5 -0
- label_studio_sdk/prompts/runs/types/runs_list_request_project_subset.py +5 -0
- label_studio_sdk/prompts/types/__init__.py +15 -0
- label_studio_sdk/prompts/types/prompts_batch_failed_predictions_request_failed_predictions_item.py +42 -0
- label_studio_sdk/prompts/types/prompts_batch_failed_predictions_response.py +29 -0
- label_studio_sdk/prompts/types/prompts_batch_predictions_request_results_item.py +62 -0
- label_studio_sdk/prompts/types/prompts_batch_predictions_response.py +29 -0
- label_studio_sdk/prompts/versions/__init__.py +2 -0
- label_studio_sdk/prompts/versions/client.py +921 -0
- label_studio_sdk/types/__init__.py +52 -0
- label_studio_sdk/types/comment.py +39 -0
- label_studio_sdk/types/comment_created_by.py +5 -0
- label_studio_sdk/types/inference_run.py +43 -0
- label_studio_sdk/types/inference_run_created_by.py +5 -0
- label_studio_sdk/types/inference_run_organization.py +5 -0
- label_studio_sdk/types/inference_run_project_subset.py +5 -0
- label_studio_sdk/types/inference_run_status.py +7 -0
- label_studio_sdk/types/key_indicator_value.py +30 -0
- label_studio_sdk/types/key_indicators.py +7 -0
- label_studio_sdk/types/key_indicators_item.py +51 -0
- label_studio_sdk/types/key_indicators_item_additional_kpis_item.py +37 -0
- label_studio_sdk/types/key_indicators_item_extra_kpis_item.py +37 -0
- label_studio_sdk/types/model_provider_connection.py +41 -0
- label_studio_sdk/types/model_provider_connection_created_by.py +5 -0
- label_studio_sdk/types/model_provider_connection_organization.py +5 -0
- label_studio_sdk/types/model_provider_connection_provider.py +5 -0
- label_studio_sdk/types/model_provider_connection_scope.py +5 -0
- label_studio_sdk/types/prompt.py +79 -0
- label_studio_sdk/types/prompt_created_by.py +5 -0
- label_studio_sdk/types/prompt_organization.py +5 -0
- label_studio_sdk/types/prompt_version.py +41 -0
- label_studio_sdk/types/prompt_version_created_by.py +5 -0
- label_studio_sdk/types/prompt_version_organization.py +5 -0
- label_studio_sdk/types/prompt_version_provider.py +5 -0
- label_studio_sdk/types/refined_prompt_response.py +64 -0
- label_studio_sdk/types/refined_prompt_response_refinement_status.py +7 -0
- label_studio_sdk/webhooks/client.py +245 -36
- label_studio_sdk/workspaces/client.py +20 -20
- label_studio_sdk-1.0.7.dist-info/LICENSE +201 -0
- {label_studio_sdk-1.0.5.dist-info → label_studio_sdk-1.0.7.dist-info}/METADATA +17 -3
- {label_studio_sdk-1.0.5.dist-info → label_studio_sdk-1.0.7.dist-info}/RECORD +68 -19
- {label_studio_sdk-1.0.5.dist-info → label_studio_sdk-1.0.7.dist-info}/WHEEL +1 -1
|
@@ -0,0 +1,921 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
import datetime as dt
|
|
4
|
+
import typing
|
|
5
|
+
from json.decoder import JSONDecodeError
|
|
6
|
+
|
|
7
|
+
from ...core.api_error import ApiError
|
|
8
|
+
from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper
|
|
9
|
+
from ...core.jsonable_encoder import jsonable_encoder
|
|
10
|
+
from ...core.pydantic_utilities import pydantic_v1
|
|
11
|
+
from ...core.request_options import RequestOptions
|
|
12
|
+
from ...types.prompt_version import PromptVersion
|
|
13
|
+
from ...types.prompt_version_created_by import PromptVersionCreatedBy
|
|
14
|
+
from ...types.prompt_version_organization import PromptVersionOrganization
|
|
15
|
+
from ...types.prompt_version_provider import PromptVersionProvider
|
|
16
|
+
from ...types.refined_prompt_response import RefinedPromptResponse
|
|
17
|
+
|
|
18
|
+
# this is used as the default value for optional parameters
|
|
19
|
+
OMIT = typing.cast(typing.Any, ...)
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class VersionsClient:
|
|
23
|
+
def __init__(self, *, client_wrapper: SyncClientWrapper):
|
|
24
|
+
self._client_wrapper = client_wrapper
|
|
25
|
+
|
|
26
|
+
def list(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> typing.List[PromptVersion]:
|
|
27
|
+
"""
|
|
28
|
+
Get a list of prompt versions.
|
|
29
|
+
|
|
30
|
+
Parameters
|
|
31
|
+
----------
|
|
32
|
+
id : int
|
|
33
|
+
Prompt ID
|
|
34
|
+
|
|
35
|
+
request_options : typing.Optional[RequestOptions]
|
|
36
|
+
Request-specific configuration.
|
|
37
|
+
|
|
38
|
+
Returns
|
|
39
|
+
-------
|
|
40
|
+
typing.List[PromptVersion]
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
Examples
|
|
44
|
+
--------
|
|
45
|
+
from label_studio_sdk.client import LabelStudio
|
|
46
|
+
|
|
47
|
+
client = LabelStudio(
|
|
48
|
+
api_key="YOUR_API_KEY",
|
|
49
|
+
)
|
|
50
|
+
client.prompts.versions.list(
|
|
51
|
+
id=1,
|
|
52
|
+
)
|
|
53
|
+
"""
|
|
54
|
+
_response = self._client_wrapper.httpx_client.request(
|
|
55
|
+
f"api/prompts/{jsonable_encoder(id)}/versions", method="GET", request_options=request_options
|
|
56
|
+
)
|
|
57
|
+
try:
|
|
58
|
+
if 200 <= _response.status_code < 300:
|
|
59
|
+
return pydantic_v1.parse_obj_as(typing.List[PromptVersion], _response.json()) # type: ignore
|
|
60
|
+
_response_json = _response.json()
|
|
61
|
+
except JSONDecodeError:
|
|
62
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
63
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
64
|
+
|
|
65
|
+
def create(
|
|
66
|
+
self,
|
|
67
|
+
id: int,
|
|
68
|
+
*,
|
|
69
|
+
title: typing.Optional[str] = OMIT,
|
|
70
|
+
parent_model: typing.Optional[int] = OMIT,
|
|
71
|
+
model_provider_connection: typing.Optional[int] = OMIT,
|
|
72
|
+
prompt: typing.Optional[str] = OMIT,
|
|
73
|
+
provider: typing.Optional[PromptVersionProvider] = OMIT,
|
|
74
|
+
provider_model_id: typing.Optional[str] = OMIT,
|
|
75
|
+
created_by: typing.Optional[PromptVersionCreatedBy] = OMIT,
|
|
76
|
+
created_at: typing.Optional[dt.datetime] = OMIT,
|
|
77
|
+
updated_at: typing.Optional[dt.datetime] = OMIT,
|
|
78
|
+
organization: typing.Optional[PromptVersionOrganization] = OMIT,
|
|
79
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
80
|
+
) -> PromptVersion:
|
|
81
|
+
"""
|
|
82
|
+
Create a new version of a prompt.
|
|
83
|
+
|
|
84
|
+
Parameters
|
|
85
|
+
----------
|
|
86
|
+
id : int
|
|
87
|
+
Prompt ID
|
|
88
|
+
|
|
89
|
+
title : typing.Optional[str]
|
|
90
|
+
|
|
91
|
+
parent_model : typing.Optional[int]
|
|
92
|
+
|
|
93
|
+
model_provider_connection : typing.Optional[int]
|
|
94
|
+
|
|
95
|
+
prompt : typing.Optional[str]
|
|
96
|
+
|
|
97
|
+
provider : typing.Optional[PromptVersionProvider]
|
|
98
|
+
|
|
99
|
+
provider_model_id : typing.Optional[str]
|
|
100
|
+
|
|
101
|
+
created_by : typing.Optional[PromptVersionCreatedBy]
|
|
102
|
+
|
|
103
|
+
created_at : typing.Optional[dt.datetime]
|
|
104
|
+
|
|
105
|
+
updated_at : typing.Optional[dt.datetime]
|
|
106
|
+
|
|
107
|
+
organization : typing.Optional[PromptVersionOrganization]
|
|
108
|
+
|
|
109
|
+
request_options : typing.Optional[RequestOptions]
|
|
110
|
+
Request-specific configuration.
|
|
111
|
+
|
|
112
|
+
Returns
|
|
113
|
+
-------
|
|
114
|
+
PromptVersion
|
|
115
|
+
|
|
116
|
+
|
|
117
|
+
Examples
|
|
118
|
+
--------
|
|
119
|
+
from label_studio_sdk.client import LabelStudio
|
|
120
|
+
|
|
121
|
+
client = LabelStudio(
|
|
122
|
+
api_key="YOUR_API_KEY",
|
|
123
|
+
)
|
|
124
|
+
client.prompts.versions.create(
|
|
125
|
+
id=1,
|
|
126
|
+
)
|
|
127
|
+
"""
|
|
128
|
+
_response = self._client_wrapper.httpx_client.request(
|
|
129
|
+
f"api/prompts/{jsonable_encoder(id)}/versions",
|
|
130
|
+
method="POST",
|
|
131
|
+
json={
|
|
132
|
+
"title": title,
|
|
133
|
+
"parent_model": parent_model,
|
|
134
|
+
"model_provider_connection": model_provider_connection,
|
|
135
|
+
"prompt": prompt,
|
|
136
|
+
"provider": provider,
|
|
137
|
+
"provider_model_id": provider_model_id,
|
|
138
|
+
"created_by": created_by,
|
|
139
|
+
"created_at": created_at,
|
|
140
|
+
"updated_at": updated_at,
|
|
141
|
+
"organization": organization,
|
|
142
|
+
},
|
|
143
|
+
request_options=request_options,
|
|
144
|
+
omit=OMIT,
|
|
145
|
+
)
|
|
146
|
+
try:
|
|
147
|
+
if 200 <= _response.status_code < 300:
|
|
148
|
+
return pydantic_v1.parse_obj_as(PromptVersion, _response.json()) # type: ignore
|
|
149
|
+
_response_json = _response.json()
|
|
150
|
+
except JSONDecodeError:
|
|
151
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
152
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
153
|
+
|
|
154
|
+
def get(
|
|
155
|
+
self, id: int, version_id: int, *, request_options: typing.Optional[RequestOptions] = None
|
|
156
|
+
) -> PromptVersion:
|
|
157
|
+
"""
|
|
158
|
+
Get a prompt version by ID.
|
|
159
|
+
|
|
160
|
+
Parameters
|
|
161
|
+
----------
|
|
162
|
+
id : int
|
|
163
|
+
Prompt ID
|
|
164
|
+
|
|
165
|
+
version_id : int
|
|
166
|
+
Prompt Version ID
|
|
167
|
+
|
|
168
|
+
request_options : typing.Optional[RequestOptions]
|
|
169
|
+
Request-specific configuration.
|
|
170
|
+
|
|
171
|
+
Returns
|
|
172
|
+
-------
|
|
173
|
+
PromptVersion
|
|
174
|
+
|
|
175
|
+
|
|
176
|
+
Examples
|
|
177
|
+
--------
|
|
178
|
+
from label_studio_sdk.client import LabelStudio
|
|
179
|
+
|
|
180
|
+
client = LabelStudio(
|
|
181
|
+
api_key="YOUR_API_KEY",
|
|
182
|
+
)
|
|
183
|
+
client.prompts.versions.get(
|
|
184
|
+
id=1,
|
|
185
|
+
version_id=1,
|
|
186
|
+
)
|
|
187
|
+
"""
|
|
188
|
+
_response = self._client_wrapper.httpx_client.request(
|
|
189
|
+
f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}",
|
|
190
|
+
method="GET",
|
|
191
|
+
request_options=request_options,
|
|
192
|
+
)
|
|
193
|
+
try:
|
|
194
|
+
if 200 <= _response.status_code < 300:
|
|
195
|
+
return pydantic_v1.parse_obj_as(PromptVersion, _response.json()) # type: ignore
|
|
196
|
+
_response_json = _response.json()
|
|
197
|
+
except JSONDecodeError:
|
|
198
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
199
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
200
|
+
|
|
201
|
+
def delete(self, id: int, version_id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None:
|
|
202
|
+
"""
|
|
203
|
+
Delete a prompt version by ID.
|
|
204
|
+
|
|
205
|
+
Parameters
|
|
206
|
+
----------
|
|
207
|
+
id : int
|
|
208
|
+
Prompt ID
|
|
209
|
+
|
|
210
|
+
version_id : int
|
|
211
|
+
Prompt Version ID
|
|
212
|
+
|
|
213
|
+
request_options : typing.Optional[RequestOptions]
|
|
214
|
+
Request-specific configuration.
|
|
215
|
+
|
|
216
|
+
Returns
|
|
217
|
+
-------
|
|
218
|
+
None
|
|
219
|
+
|
|
220
|
+
Examples
|
|
221
|
+
--------
|
|
222
|
+
from label_studio_sdk.client import LabelStudio
|
|
223
|
+
|
|
224
|
+
client = LabelStudio(
|
|
225
|
+
api_key="YOUR_API_KEY",
|
|
226
|
+
)
|
|
227
|
+
client.prompts.versions.delete(
|
|
228
|
+
id=1,
|
|
229
|
+
version_id=1,
|
|
230
|
+
)
|
|
231
|
+
"""
|
|
232
|
+
_response = self._client_wrapper.httpx_client.request(
|
|
233
|
+
f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}",
|
|
234
|
+
method="DELETE",
|
|
235
|
+
request_options=request_options,
|
|
236
|
+
)
|
|
237
|
+
try:
|
|
238
|
+
if 200 <= _response.status_code < 300:
|
|
239
|
+
return
|
|
240
|
+
_response_json = _response.json()
|
|
241
|
+
except JSONDecodeError:
|
|
242
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
243
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
244
|
+
|
|
245
|
+
def update(
|
|
246
|
+
self,
|
|
247
|
+
id: int,
|
|
248
|
+
version_id: int,
|
|
249
|
+
*,
|
|
250
|
+
title: typing.Optional[str] = OMIT,
|
|
251
|
+
parent_model: typing.Optional[int] = OMIT,
|
|
252
|
+
model_provider_connection: typing.Optional[int] = OMIT,
|
|
253
|
+
prompt: typing.Optional[str] = OMIT,
|
|
254
|
+
provider: typing.Optional[PromptVersionProvider] = OMIT,
|
|
255
|
+
provider_model_id: typing.Optional[str] = OMIT,
|
|
256
|
+
created_by: typing.Optional[PromptVersionCreatedBy] = OMIT,
|
|
257
|
+
created_at: typing.Optional[dt.datetime] = OMIT,
|
|
258
|
+
updated_at: typing.Optional[dt.datetime] = OMIT,
|
|
259
|
+
organization: typing.Optional[PromptVersionOrganization] = OMIT,
|
|
260
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
261
|
+
) -> PromptVersion:
|
|
262
|
+
"""
|
|
263
|
+
Update a prompt version by ID.
|
|
264
|
+
|
|
265
|
+
Parameters
|
|
266
|
+
----------
|
|
267
|
+
id : int
|
|
268
|
+
Prompt ID
|
|
269
|
+
|
|
270
|
+
version_id : int
|
|
271
|
+
Prompt Version ID
|
|
272
|
+
|
|
273
|
+
title : typing.Optional[str]
|
|
274
|
+
|
|
275
|
+
parent_model : typing.Optional[int]
|
|
276
|
+
|
|
277
|
+
model_provider_connection : typing.Optional[int]
|
|
278
|
+
|
|
279
|
+
prompt : typing.Optional[str]
|
|
280
|
+
|
|
281
|
+
provider : typing.Optional[PromptVersionProvider]
|
|
282
|
+
|
|
283
|
+
provider_model_id : typing.Optional[str]
|
|
284
|
+
|
|
285
|
+
created_by : typing.Optional[PromptVersionCreatedBy]
|
|
286
|
+
|
|
287
|
+
created_at : typing.Optional[dt.datetime]
|
|
288
|
+
|
|
289
|
+
updated_at : typing.Optional[dt.datetime]
|
|
290
|
+
|
|
291
|
+
organization : typing.Optional[PromptVersionOrganization]
|
|
292
|
+
|
|
293
|
+
request_options : typing.Optional[RequestOptions]
|
|
294
|
+
Request-specific configuration.
|
|
295
|
+
|
|
296
|
+
Returns
|
|
297
|
+
-------
|
|
298
|
+
PromptVersion
|
|
299
|
+
|
|
300
|
+
|
|
301
|
+
Examples
|
|
302
|
+
--------
|
|
303
|
+
from label_studio_sdk.client import LabelStudio
|
|
304
|
+
|
|
305
|
+
client = LabelStudio(
|
|
306
|
+
api_key="YOUR_API_KEY",
|
|
307
|
+
)
|
|
308
|
+
client.prompts.versions.update(
|
|
309
|
+
id=1,
|
|
310
|
+
version_id=1,
|
|
311
|
+
)
|
|
312
|
+
"""
|
|
313
|
+
_response = self._client_wrapper.httpx_client.request(
|
|
314
|
+
f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}",
|
|
315
|
+
method="PATCH",
|
|
316
|
+
json={
|
|
317
|
+
"title": title,
|
|
318
|
+
"parent_model": parent_model,
|
|
319
|
+
"model_provider_connection": model_provider_connection,
|
|
320
|
+
"prompt": prompt,
|
|
321
|
+
"provider": provider,
|
|
322
|
+
"provider_model_id": provider_model_id,
|
|
323
|
+
"created_by": created_by,
|
|
324
|
+
"created_at": created_at,
|
|
325
|
+
"updated_at": updated_at,
|
|
326
|
+
"organization": organization,
|
|
327
|
+
},
|
|
328
|
+
request_options=request_options,
|
|
329
|
+
omit=OMIT,
|
|
330
|
+
)
|
|
331
|
+
try:
|
|
332
|
+
if 200 <= _response.status_code < 300:
|
|
333
|
+
return pydantic_v1.parse_obj_as(PromptVersion, _response.json()) # type: ignore
|
|
334
|
+
_response_json = _response.json()
|
|
335
|
+
except JSONDecodeError:
|
|
336
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
337
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
338
|
+
|
|
339
|
+
def get_refined_prompt(
|
|
340
|
+
self,
|
|
341
|
+
prompt_id: int,
|
|
342
|
+
version_id: int,
|
|
343
|
+
*,
|
|
344
|
+
refinement_job_id: str,
|
|
345
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
346
|
+
) -> RefinedPromptResponse:
|
|
347
|
+
"""
|
|
348
|
+
Get the refined prompt based on the `refinement_job_id`.
|
|
349
|
+
|
|
350
|
+
Parameters
|
|
351
|
+
----------
|
|
352
|
+
prompt_id : int
|
|
353
|
+
Prompt ID
|
|
354
|
+
|
|
355
|
+
version_id : int
|
|
356
|
+
Prompt Version ID
|
|
357
|
+
|
|
358
|
+
refinement_job_id : str
|
|
359
|
+
Refinement Job ID acquired from the `POST /api/prompts/{prompt_id}/versions/{version_id}/refine` endpoint
|
|
360
|
+
|
|
361
|
+
request_options : typing.Optional[RequestOptions]
|
|
362
|
+
Request-specific configuration.
|
|
363
|
+
|
|
364
|
+
Returns
|
|
365
|
+
-------
|
|
366
|
+
RefinedPromptResponse
|
|
367
|
+
|
|
368
|
+
|
|
369
|
+
Examples
|
|
370
|
+
--------
|
|
371
|
+
from label_studio_sdk.client import LabelStudio
|
|
372
|
+
|
|
373
|
+
client = LabelStudio(
|
|
374
|
+
api_key="YOUR_API_KEY",
|
|
375
|
+
)
|
|
376
|
+
client.prompts.versions.get_refined_prompt(
|
|
377
|
+
prompt_id=1,
|
|
378
|
+
version_id=1,
|
|
379
|
+
refinement_job_id="refinement_job_id",
|
|
380
|
+
)
|
|
381
|
+
"""
|
|
382
|
+
_response = self._client_wrapper.httpx_client.request(
|
|
383
|
+
f"api/prompts/{jsonable_encoder(prompt_id)}/versions/{jsonable_encoder(version_id)}/refine",
|
|
384
|
+
method="GET",
|
|
385
|
+
params={"refinement_job_id": refinement_job_id},
|
|
386
|
+
request_options=request_options,
|
|
387
|
+
)
|
|
388
|
+
try:
|
|
389
|
+
if 200 <= _response.status_code < 300:
|
|
390
|
+
return pydantic_v1.parse_obj_as(RefinedPromptResponse, _response.json()) # type: ignore
|
|
391
|
+
_response_json = _response.json()
|
|
392
|
+
except JSONDecodeError:
|
|
393
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
394
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
395
|
+
|
|
396
|
+
def refine_prompt(
|
|
397
|
+
self,
|
|
398
|
+
prompt_id: int,
|
|
399
|
+
version_id: int,
|
|
400
|
+
*,
|
|
401
|
+
async_: typing.Optional[bool] = None,
|
|
402
|
+
teacher_model_provider_connection_id: typing.Optional[int] = OMIT,
|
|
403
|
+
teacher_model_name: typing.Optional[str] = OMIT,
|
|
404
|
+
project_id: typing.Optional[int] = OMIT,
|
|
405
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
406
|
+
) -> RefinedPromptResponse:
|
|
407
|
+
"""
|
|
408
|
+
Refine a prompt version using a teacher model and save the refined prompt as a new version.
|
|
409
|
+
|
|
410
|
+
Parameters
|
|
411
|
+
----------
|
|
412
|
+
prompt_id : int
|
|
413
|
+
Prompt ID
|
|
414
|
+
|
|
415
|
+
version_id : int
|
|
416
|
+
Base Prompt Version ID
|
|
417
|
+
|
|
418
|
+
async_ : typing.Optional[bool]
|
|
419
|
+
Run the refinement job asynchronously
|
|
420
|
+
|
|
421
|
+
teacher_model_provider_connection_id : typing.Optional[int]
|
|
422
|
+
Model Provider Connection ID to use to refine the prompt
|
|
423
|
+
|
|
424
|
+
teacher_model_name : typing.Optional[str]
|
|
425
|
+
Name of the model to use to refine the prompt
|
|
426
|
+
|
|
427
|
+
project_id : typing.Optional[int]
|
|
428
|
+
Project ID to target the refined prompt for
|
|
429
|
+
|
|
430
|
+
request_options : typing.Optional[RequestOptions]
|
|
431
|
+
Request-specific configuration.
|
|
432
|
+
|
|
433
|
+
Returns
|
|
434
|
+
-------
|
|
435
|
+
RefinedPromptResponse
|
|
436
|
+
|
|
437
|
+
|
|
438
|
+
Examples
|
|
439
|
+
--------
|
|
440
|
+
from label_studio_sdk.client import LabelStudio
|
|
441
|
+
|
|
442
|
+
client = LabelStudio(
|
|
443
|
+
api_key="YOUR_API_KEY",
|
|
444
|
+
)
|
|
445
|
+
client.prompts.versions.refine_prompt(
|
|
446
|
+
prompt_id=1,
|
|
447
|
+
version_id=1,
|
|
448
|
+
)
|
|
449
|
+
"""
|
|
450
|
+
_response = self._client_wrapper.httpx_client.request(
|
|
451
|
+
f"api/prompts/{jsonable_encoder(prompt_id)}/versions/{jsonable_encoder(version_id)}/refine",
|
|
452
|
+
method="POST",
|
|
453
|
+
params={"async": async_},
|
|
454
|
+
json={
|
|
455
|
+
"teacher_model_provider_connection_id": teacher_model_provider_connection_id,
|
|
456
|
+
"teacher_model_name": teacher_model_name,
|
|
457
|
+
"project_id": project_id,
|
|
458
|
+
},
|
|
459
|
+
request_options=request_options,
|
|
460
|
+
omit=OMIT,
|
|
461
|
+
)
|
|
462
|
+
try:
|
|
463
|
+
if 200 <= _response.status_code < 300:
|
|
464
|
+
return pydantic_v1.parse_obj_as(RefinedPromptResponse, _response.json()) # type: ignore
|
|
465
|
+
_response_json = _response.json()
|
|
466
|
+
except JSONDecodeError:
|
|
467
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
468
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
469
|
+
|
|
470
|
+
|
|
471
|
+
class AsyncVersionsClient:
|
|
472
|
+
def __init__(self, *, client_wrapper: AsyncClientWrapper):
|
|
473
|
+
self._client_wrapper = client_wrapper
|
|
474
|
+
|
|
475
|
+
async def list(
|
|
476
|
+
self, id: int, *, request_options: typing.Optional[RequestOptions] = None
|
|
477
|
+
) -> typing.List[PromptVersion]:
|
|
478
|
+
"""
|
|
479
|
+
Get a list of prompt versions.
|
|
480
|
+
|
|
481
|
+
Parameters
|
|
482
|
+
----------
|
|
483
|
+
id : int
|
|
484
|
+
Prompt ID
|
|
485
|
+
|
|
486
|
+
request_options : typing.Optional[RequestOptions]
|
|
487
|
+
Request-specific configuration.
|
|
488
|
+
|
|
489
|
+
Returns
|
|
490
|
+
-------
|
|
491
|
+
typing.List[PromptVersion]
|
|
492
|
+
|
|
493
|
+
|
|
494
|
+
Examples
|
|
495
|
+
--------
|
|
496
|
+
from label_studio_sdk.client import AsyncLabelStudio
|
|
497
|
+
|
|
498
|
+
client = AsyncLabelStudio(
|
|
499
|
+
api_key="YOUR_API_KEY",
|
|
500
|
+
)
|
|
501
|
+
await client.prompts.versions.list(
|
|
502
|
+
id=1,
|
|
503
|
+
)
|
|
504
|
+
"""
|
|
505
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
506
|
+
f"api/prompts/{jsonable_encoder(id)}/versions", method="GET", request_options=request_options
|
|
507
|
+
)
|
|
508
|
+
try:
|
|
509
|
+
if 200 <= _response.status_code < 300:
|
|
510
|
+
return pydantic_v1.parse_obj_as(typing.List[PromptVersion], _response.json()) # type: ignore
|
|
511
|
+
_response_json = _response.json()
|
|
512
|
+
except JSONDecodeError:
|
|
513
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
514
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
515
|
+
|
|
516
|
+
async def create(
|
|
517
|
+
self,
|
|
518
|
+
id: int,
|
|
519
|
+
*,
|
|
520
|
+
title: typing.Optional[str] = OMIT,
|
|
521
|
+
parent_model: typing.Optional[int] = OMIT,
|
|
522
|
+
model_provider_connection: typing.Optional[int] = OMIT,
|
|
523
|
+
prompt: typing.Optional[str] = OMIT,
|
|
524
|
+
provider: typing.Optional[PromptVersionProvider] = OMIT,
|
|
525
|
+
provider_model_id: typing.Optional[str] = OMIT,
|
|
526
|
+
created_by: typing.Optional[PromptVersionCreatedBy] = OMIT,
|
|
527
|
+
created_at: typing.Optional[dt.datetime] = OMIT,
|
|
528
|
+
updated_at: typing.Optional[dt.datetime] = OMIT,
|
|
529
|
+
organization: typing.Optional[PromptVersionOrganization] = OMIT,
|
|
530
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
531
|
+
) -> PromptVersion:
|
|
532
|
+
"""
|
|
533
|
+
Create a new version of a prompt.
|
|
534
|
+
|
|
535
|
+
Parameters
|
|
536
|
+
----------
|
|
537
|
+
id : int
|
|
538
|
+
Prompt ID
|
|
539
|
+
|
|
540
|
+
title : typing.Optional[str]
|
|
541
|
+
|
|
542
|
+
parent_model : typing.Optional[int]
|
|
543
|
+
|
|
544
|
+
model_provider_connection : typing.Optional[int]
|
|
545
|
+
|
|
546
|
+
prompt : typing.Optional[str]
|
|
547
|
+
|
|
548
|
+
provider : typing.Optional[PromptVersionProvider]
|
|
549
|
+
|
|
550
|
+
provider_model_id : typing.Optional[str]
|
|
551
|
+
|
|
552
|
+
created_by : typing.Optional[PromptVersionCreatedBy]
|
|
553
|
+
|
|
554
|
+
created_at : typing.Optional[dt.datetime]
|
|
555
|
+
|
|
556
|
+
updated_at : typing.Optional[dt.datetime]
|
|
557
|
+
|
|
558
|
+
organization : typing.Optional[PromptVersionOrganization]
|
|
559
|
+
|
|
560
|
+
request_options : typing.Optional[RequestOptions]
|
|
561
|
+
Request-specific configuration.
|
|
562
|
+
|
|
563
|
+
Returns
|
|
564
|
+
-------
|
|
565
|
+
PromptVersion
|
|
566
|
+
|
|
567
|
+
|
|
568
|
+
Examples
|
|
569
|
+
--------
|
|
570
|
+
from label_studio_sdk.client import AsyncLabelStudio
|
|
571
|
+
|
|
572
|
+
client = AsyncLabelStudio(
|
|
573
|
+
api_key="YOUR_API_KEY",
|
|
574
|
+
)
|
|
575
|
+
await client.prompts.versions.create(
|
|
576
|
+
id=1,
|
|
577
|
+
)
|
|
578
|
+
"""
|
|
579
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
580
|
+
f"api/prompts/{jsonable_encoder(id)}/versions",
|
|
581
|
+
method="POST",
|
|
582
|
+
json={
|
|
583
|
+
"title": title,
|
|
584
|
+
"parent_model": parent_model,
|
|
585
|
+
"model_provider_connection": model_provider_connection,
|
|
586
|
+
"prompt": prompt,
|
|
587
|
+
"provider": provider,
|
|
588
|
+
"provider_model_id": provider_model_id,
|
|
589
|
+
"created_by": created_by,
|
|
590
|
+
"created_at": created_at,
|
|
591
|
+
"updated_at": updated_at,
|
|
592
|
+
"organization": organization,
|
|
593
|
+
},
|
|
594
|
+
request_options=request_options,
|
|
595
|
+
omit=OMIT,
|
|
596
|
+
)
|
|
597
|
+
try:
|
|
598
|
+
if 200 <= _response.status_code < 300:
|
|
599
|
+
return pydantic_v1.parse_obj_as(PromptVersion, _response.json()) # type: ignore
|
|
600
|
+
_response_json = _response.json()
|
|
601
|
+
except JSONDecodeError:
|
|
602
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
603
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
604
|
+
|
|
605
|
+
async def get(
|
|
606
|
+
self, id: int, version_id: int, *, request_options: typing.Optional[RequestOptions] = None
|
|
607
|
+
) -> PromptVersion:
|
|
608
|
+
"""
|
|
609
|
+
Get a prompt version by ID.
|
|
610
|
+
|
|
611
|
+
Parameters
|
|
612
|
+
----------
|
|
613
|
+
id : int
|
|
614
|
+
Prompt ID
|
|
615
|
+
|
|
616
|
+
version_id : int
|
|
617
|
+
Prompt Version ID
|
|
618
|
+
|
|
619
|
+
request_options : typing.Optional[RequestOptions]
|
|
620
|
+
Request-specific configuration.
|
|
621
|
+
|
|
622
|
+
Returns
|
|
623
|
+
-------
|
|
624
|
+
PromptVersion
|
|
625
|
+
|
|
626
|
+
|
|
627
|
+
Examples
|
|
628
|
+
--------
|
|
629
|
+
from label_studio_sdk.client import AsyncLabelStudio
|
|
630
|
+
|
|
631
|
+
client = AsyncLabelStudio(
|
|
632
|
+
api_key="YOUR_API_KEY",
|
|
633
|
+
)
|
|
634
|
+
await client.prompts.versions.get(
|
|
635
|
+
id=1,
|
|
636
|
+
version_id=1,
|
|
637
|
+
)
|
|
638
|
+
"""
|
|
639
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
640
|
+
f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}",
|
|
641
|
+
method="GET",
|
|
642
|
+
request_options=request_options,
|
|
643
|
+
)
|
|
644
|
+
try:
|
|
645
|
+
if 200 <= _response.status_code < 300:
|
|
646
|
+
return pydantic_v1.parse_obj_as(PromptVersion, _response.json()) # type: ignore
|
|
647
|
+
_response_json = _response.json()
|
|
648
|
+
except JSONDecodeError:
|
|
649
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
650
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
651
|
+
|
|
652
|
+
async def delete(
|
|
653
|
+
self, id: int, version_id: int, *, request_options: typing.Optional[RequestOptions] = None
|
|
654
|
+
) -> None:
|
|
655
|
+
"""
|
|
656
|
+
Delete a prompt version by ID.
|
|
657
|
+
|
|
658
|
+
Parameters
|
|
659
|
+
----------
|
|
660
|
+
id : int
|
|
661
|
+
Prompt ID
|
|
662
|
+
|
|
663
|
+
version_id : int
|
|
664
|
+
Prompt Version ID
|
|
665
|
+
|
|
666
|
+
request_options : typing.Optional[RequestOptions]
|
|
667
|
+
Request-specific configuration.
|
|
668
|
+
|
|
669
|
+
Returns
|
|
670
|
+
-------
|
|
671
|
+
None
|
|
672
|
+
|
|
673
|
+
Examples
|
|
674
|
+
--------
|
|
675
|
+
from label_studio_sdk.client import AsyncLabelStudio
|
|
676
|
+
|
|
677
|
+
client = AsyncLabelStudio(
|
|
678
|
+
api_key="YOUR_API_KEY",
|
|
679
|
+
)
|
|
680
|
+
await client.prompts.versions.delete(
|
|
681
|
+
id=1,
|
|
682
|
+
version_id=1,
|
|
683
|
+
)
|
|
684
|
+
"""
|
|
685
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
686
|
+
f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}",
|
|
687
|
+
method="DELETE",
|
|
688
|
+
request_options=request_options,
|
|
689
|
+
)
|
|
690
|
+
try:
|
|
691
|
+
if 200 <= _response.status_code < 300:
|
|
692
|
+
return
|
|
693
|
+
_response_json = _response.json()
|
|
694
|
+
except JSONDecodeError:
|
|
695
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
696
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
697
|
+
|
|
698
|
+
async def update(
|
|
699
|
+
self,
|
|
700
|
+
id: int,
|
|
701
|
+
version_id: int,
|
|
702
|
+
*,
|
|
703
|
+
title: typing.Optional[str] = OMIT,
|
|
704
|
+
parent_model: typing.Optional[int] = OMIT,
|
|
705
|
+
model_provider_connection: typing.Optional[int] = OMIT,
|
|
706
|
+
prompt: typing.Optional[str] = OMIT,
|
|
707
|
+
provider: typing.Optional[PromptVersionProvider] = OMIT,
|
|
708
|
+
provider_model_id: typing.Optional[str] = OMIT,
|
|
709
|
+
created_by: typing.Optional[PromptVersionCreatedBy] = OMIT,
|
|
710
|
+
created_at: typing.Optional[dt.datetime] = OMIT,
|
|
711
|
+
updated_at: typing.Optional[dt.datetime] = OMIT,
|
|
712
|
+
organization: typing.Optional[PromptVersionOrganization] = OMIT,
|
|
713
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
714
|
+
) -> PromptVersion:
|
|
715
|
+
"""
|
|
716
|
+
Update a prompt version by ID.
|
|
717
|
+
|
|
718
|
+
Parameters
|
|
719
|
+
----------
|
|
720
|
+
id : int
|
|
721
|
+
Prompt ID
|
|
722
|
+
|
|
723
|
+
version_id : int
|
|
724
|
+
Prompt Version ID
|
|
725
|
+
|
|
726
|
+
title : typing.Optional[str]
|
|
727
|
+
|
|
728
|
+
parent_model : typing.Optional[int]
|
|
729
|
+
|
|
730
|
+
model_provider_connection : typing.Optional[int]
|
|
731
|
+
|
|
732
|
+
prompt : typing.Optional[str]
|
|
733
|
+
|
|
734
|
+
provider : typing.Optional[PromptVersionProvider]
|
|
735
|
+
|
|
736
|
+
provider_model_id : typing.Optional[str]
|
|
737
|
+
|
|
738
|
+
created_by : typing.Optional[PromptVersionCreatedBy]
|
|
739
|
+
|
|
740
|
+
created_at : typing.Optional[dt.datetime]
|
|
741
|
+
|
|
742
|
+
updated_at : typing.Optional[dt.datetime]
|
|
743
|
+
|
|
744
|
+
organization : typing.Optional[PromptVersionOrganization]
|
|
745
|
+
|
|
746
|
+
request_options : typing.Optional[RequestOptions]
|
|
747
|
+
Request-specific configuration.
|
|
748
|
+
|
|
749
|
+
Returns
|
|
750
|
+
-------
|
|
751
|
+
PromptVersion
|
|
752
|
+
|
|
753
|
+
|
|
754
|
+
Examples
|
|
755
|
+
--------
|
|
756
|
+
from label_studio_sdk.client import AsyncLabelStudio
|
|
757
|
+
|
|
758
|
+
client = AsyncLabelStudio(
|
|
759
|
+
api_key="YOUR_API_KEY",
|
|
760
|
+
)
|
|
761
|
+
await client.prompts.versions.update(
|
|
762
|
+
id=1,
|
|
763
|
+
version_id=1,
|
|
764
|
+
)
|
|
765
|
+
"""
|
|
766
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
767
|
+
f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}",
|
|
768
|
+
method="PATCH",
|
|
769
|
+
json={
|
|
770
|
+
"title": title,
|
|
771
|
+
"parent_model": parent_model,
|
|
772
|
+
"model_provider_connection": model_provider_connection,
|
|
773
|
+
"prompt": prompt,
|
|
774
|
+
"provider": provider,
|
|
775
|
+
"provider_model_id": provider_model_id,
|
|
776
|
+
"created_by": created_by,
|
|
777
|
+
"created_at": created_at,
|
|
778
|
+
"updated_at": updated_at,
|
|
779
|
+
"organization": organization,
|
|
780
|
+
},
|
|
781
|
+
request_options=request_options,
|
|
782
|
+
omit=OMIT,
|
|
783
|
+
)
|
|
784
|
+
try:
|
|
785
|
+
if 200 <= _response.status_code < 300:
|
|
786
|
+
return pydantic_v1.parse_obj_as(PromptVersion, _response.json()) # type: ignore
|
|
787
|
+
_response_json = _response.json()
|
|
788
|
+
except JSONDecodeError:
|
|
789
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
790
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
791
|
+
|
|
792
|
+
async def get_refined_prompt(
|
|
793
|
+
self,
|
|
794
|
+
prompt_id: int,
|
|
795
|
+
version_id: int,
|
|
796
|
+
*,
|
|
797
|
+
refinement_job_id: str,
|
|
798
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
799
|
+
) -> RefinedPromptResponse:
|
|
800
|
+
"""
|
|
801
|
+
Get the refined prompt based on the `refinement_job_id`.
|
|
802
|
+
|
|
803
|
+
Parameters
|
|
804
|
+
----------
|
|
805
|
+
prompt_id : int
|
|
806
|
+
Prompt ID
|
|
807
|
+
|
|
808
|
+
version_id : int
|
|
809
|
+
Prompt Version ID
|
|
810
|
+
|
|
811
|
+
refinement_job_id : str
|
|
812
|
+
Refinement Job ID acquired from the `POST /api/prompts/{prompt_id}/versions/{version_id}/refine` endpoint
|
|
813
|
+
|
|
814
|
+
request_options : typing.Optional[RequestOptions]
|
|
815
|
+
Request-specific configuration.
|
|
816
|
+
|
|
817
|
+
Returns
|
|
818
|
+
-------
|
|
819
|
+
RefinedPromptResponse
|
|
820
|
+
|
|
821
|
+
|
|
822
|
+
Examples
|
|
823
|
+
--------
|
|
824
|
+
from label_studio_sdk.client import AsyncLabelStudio
|
|
825
|
+
|
|
826
|
+
client = AsyncLabelStudio(
|
|
827
|
+
api_key="YOUR_API_KEY",
|
|
828
|
+
)
|
|
829
|
+
await client.prompts.versions.get_refined_prompt(
|
|
830
|
+
prompt_id=1,
|
|
831
|
+
version_id=1,
|
|
832
|
+
refinement_job_id="refinement_job_id",
|
|
833
|
+
)
|
|
834
|
+
"""
|
|
835
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
836
|
+
f"api/prompts/{jsonable_encoder(prompt_id)}/versions/{jsonable_encoder(version_id)}/refine",
|
|
837
|
+
method="GET",
|
|
838
|
+
params={"refinement_job_id": refinement_job_id},
|
|
839
|
+
request_options=request_options,
|
|
840
|
+
)
|
|
841
|
+
try:
|
|
842
|
+
if 200 <= _response.status_code < 300:
|
|
843
|
+
return pydantic_v1.parse_obj_as(RefinedPromptResponse, _response.json()) # type: ignore
|
|
844
|
+
_response_json = _response.json()
|
|
845
|
+
except JSONDecodeError:
|
|
846
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
847
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
848
|
+
|
|
849
|
+
async def refine_prompt(
|
|
850
|
+
self,
|
|
851
|
+
prompt_id: int,
|
|
852
|
+
version_id: int,
|
|
853
|
+
*,
|
|
854
|
+
async_: typing.Optional[bool] = None,
|
|
855
|
+
teacher_model_provider_connection_id: typing.Optional[int] = OMIT,
|
|
856
|
+
teacher_model_name: typing.Optional[str] = OMIT,
|
|
857
|
+
project_id: typing.Optional[int] = OMIT,
|
|
858
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
859
|
+
) -> RefinedPromptResponse:
|
|
860
|
+
"""
|
|
861
|
+
Refine a prompt version using a teacher model and save the refined prompt as a new version.
|
|
862
|
+
|
|
863
|
+
Parameters
|
|
864
|
+
----------
|
|
865
|
+
prompt_id : int
|
|
866
|
+
Prompt ID
|
|
867
|
+
|
|
868
|
+
version_id : int
|
|
869
|
+
Base Prompt Version ID
|
|
870
|
+
|
|
871
|
+
async_ : typing.Optional[bool]
|
|
872
|
+
Run the refinement job asynchronously
|
|
873
|
+
|
|
874
|
+
teacher_model_provider_connection_id : typing.Optional[int]
|
|
875
|
+
Model Provider Connection ID to use to refine the prompt
|
|
876
|
+
|
|
877
|
+
teacher_model_name : typing.Optional[str]
|
|
878
|
+
Name of the model to use to refine the prompt
|
|
879
|
+
|
|
880
|
+
project_id : typing.Optional[int]
|
|
881
|
+
Project ID to target the refined prompt for
|
|
882
|
+
|
|
883
|
+
request_options : typing.Optional[RequestOptions]
|
|
884
|
+
Request-specific configuration.
|
|
885
|
+
|
|
886
|
+
Returns
|
|
887
|
+
-------
|
|
888
|
+
RefinedPromptResponse
|
|
889
|
+
|
|
890
|
+
|
|
891
|
+
Examples
|
|
892
|
+
--------
|
|
893
|
+
from label_studio_sdk.client import AsyncLabelStudio
|
|
894
|
+
|
|
895
|
+
client = AsyncLabelStudio(
|
|
896
|
+
api_key="YOUR_API_KEY",
|
|
897
|
+
)
|
|
898
|
+
await client.prompts.versions.refine_prompt(
|
|
899
|
+
prompt_id=1,
|
|
900
|
+
version_id=1,
|
|
901
|
+
)
|
|
902
|
+
"""
|
|
903
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
904
|
+
f"api/prompts/{jsonable_encoder(prompt_id)}/versions/{jsonable_encoder(version_id)}/refine",
|
|
905
|
+
method="POST",
|
|
906
|
+
params={"async": async_},
|
|
907
|
+
json={
|
|
908
|
+
"teacher_model_provider_connection_id": teacher_model_provider_connection_id,
|
|
909
|
+
"teacher_model_name": teacher_model_name,
|
|
910
|
+
"project_id": project_id,
|
|
911
|
+
},
|
|
912
|
+
request_options=request_options,
|
|
913
|
+
omit=OMIT,
|
|
914
|
+
)
|
|
915
|
+
try:
|
|
916
|
+
if 200 <= _response.status_code < 300:
|
|
917
|
+
return pydantic_v1.parse_obj_as(RefinedPromptResponse, _response.json()) # type: ignore
|
|
918
|
+
_response_json = _response.json()
|
|
919
|
+
except JSONDecodeError:
|
|
920
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
921
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|