label-studio-sdk 1.0.5__py3-none-any.whl → 1.0.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- label_studio_sdk/__init__.py +70 -0
- label_studio_sdk/_extensions/eval/categorical.py +83 -0
- label_studio_sdk/_extensions/label_studio_tools/core/utils/io.py +35 -17
- label_studio_sdk/annotations/__init__.py +3 -0
- label_studio_sdk/annotations/client.py +109 -0
- label_studio_sdk/annotations/types/__init__.py +5 -0
- label_studio_sdk/annotations/types/annotations_create_bulk_response_item.py +29 -0
- label_studio_sdk/base_client.py +9 -0
- label_studio_sdk/comments/__init__.py +2 -0
- label_studio_sdk/comments/client.py +512 -0
- label_studio_sdk/converter/converter.py +2 -0
- label_studio_sdk/converter/imports/coco.py +14 -13
- label_studio_sdk/converter/utils.py +72 -3
- label_studio_sdk/core/client_wrapper.py +1 -1
- label_studio_sdk/files/client.py +26 -16
- label_studio_sdk/label_interface/interface.py +38 -5
- label_studio_sdk/model_providers/__init__.py +2 -0
- label_studio_sdk/model_providers/client.py +190 -0
- label_studio_sdk/projects/client.py +32 -16
- label_studio_sdk/projects/exports/client.py +133 -40
- label_studio_sdk/prompts/__init__.py +21 -0
- label_studio_sdk/prompts/client.py +862 -0
- label_studio_sdk/prompts/indicators/__init__.py +2 -0
- label_studio_sdk/prompts/indicators/client.py +194 -0
- label_studio_sdk/prompts/runs/__init__.py +5 -0
- label_studio_sdk/prompts/runs/client.py +354 -0
- label_studio_sdk/prompts/runs/types/__init__.py +5 -0
- label_studio_sdk/prompts/runs/types/runs_list_request_project_subset.py +5 -0
- label_studio_sdk/prompts/types/__init__.py +15 -0
- label_studio_sdk/prompts/types/prompts_batch_failed_predictions_request_failed_predictions_item.py +42 -0
- label_studio_sdk/prompts/types/prompts_batch_failed_predictions_response.py +29 -0
- label_studio_sdk/prompts/types/prompts_batch_predictions_request_results_item.py +62 -0
- label_studio_sdk/prompts/types/prompts_batch_predictions_response.py +29 -0
- label_studio_sdk/prompts/versions/__init__.py +2 -0
- label_studio_sdk/prompts/versions/client.py +921 -0
- label_studio_sdk/types/__init__.py +52 -0
- label_studio_sdk/types/comment.py +39 -0
- label_studio_sdk/types/comment_created_by.py +5 -0
- label_studio_sdk/types/inference_run.py +43 -0
- label_studio_sdk/types/inference_run_created_by.py +5 -0
- label_studio_sdk/types/inference_run_organization.py +5 -0
- label_studio_sdk/types/inference_run_project_subset.py +5 -0
- label_studio_sdk/types/inference_run_status.py +7 -0
- label_studio_sdk/types/key_indicator_value.py +30 -0
- label_studio_sdk/types/key_indicators.py +7 -0
- label_studio_sdk/types/key_indicators_item.py +51 -0
- label_studio_sdk/types/key_indicators_item_additional_kpis_item.py +37 -0
- label_studio_sdk/types/key_indicators_item_extra_kpis_item.py +37 -0
- label_studio_sdk/types/model_provider_connection.py +41 -0
- label_studio_sdk/types/model_provider_connection_created_by.py +5 -0
- label_studio_sdk/types/model_provider_connection_organization.py +5 -0
- label_studio_sdk/types/model_provider_connection_provider.py +5 -0
- label_studio_sdk/types/model_provider_connection_scope.py +5 -0
- label_studio_sdk/types/prompt.py +79 -0
- label_studio_sdk/types/prompt_created_by.py +5 -0
- label_studio_sdk/types/prompt_organization.py +5 -0
- label_studio_sdk/types/prompt_version.py +41 -0
- label_studio_sdk/types/prompt_version_created_by.py +5 -0
- label_studio_sdk/types/prompt_version_organization.py +5 -0
- label_studio_sdk/types/prompt_version_provider.py +5 -0
- label_studio_sdk/types/refined_prompt_response.py +64 -0
- label_studio_sdk/types/refined_prompt_response_refinement_status.py +7 -0
- label_studio_sdk/webhooks/client.py +245 -36
- label_studio_sdk/workspaces/client.py +20 -20
- label_studio_sdk-1.0.7.dist-info/LICENSE +201 -0
- {label_studio_sdk-1.0.5.dist-info → label_studio_sdk-1.0.7.dist-info}/METADATA +17 -3
- {label_studio_sdk-1.0.5.dist-info → label_studio_sdk-1.0.7.dist-info}/RECORD +68 -19
- {label_studio_sdk-1.0.5.dist-info → label_studio_sdk-1.0.7.dist-info}/WHEEL +1 -1
|
@@ -0,0 +1,862 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
import datetime as dt
|
|
4
|
+
import typing
|
|
5
|
+
from json.decoder import JSONDecodeError
|
|
6
|
+
|
|
7
|
+
from ..core.api_error import ApiError
|
|
8
|
+
from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper
|
|
9
|
+
from ..core.jsonable_encoder import jsonable_encoder
|
|
10
|
+
from ..core.pydantic_utilities import pydantic_v1
|
|
11
|
+
from ..core.request_options import RequestOptions
|
|
12
|
+
from ..types.prompt import Prompt
|
|
13
|
+
from ..types.prompt_created_by import PromptCreatedBy
|
|
14
|
+
from ..types.prompt_organization import PromptOrganization
|
|
15
|
+
from .indicators.client import AsyncIndicatorsClient, IndicatorsClient
|
|
16
|
+
from .runs.client import AsyncRunsClient, RunsClient
|
|
17
|
+
from .types.prompts_batch_failed_predictions_request_failed_predictions_item import (
|
|
18
|
+
PromptsBatchFailedPredictionsRequestFailedPredictionsItem,
|
|
19
|
+
)
|
|
20
|
+
from .types.prompts_batch_failed_predictions_response import PromptsBatchFailedPredictionsResponse
|
|
21
|
+
from .types.prompts_batch_predictions_request_results_item import PromptsBatchPredictionsRequestResultsItem
|
|
22
|
+
from .types.prompts_batch_predictions_response import PromptsBatchPredictionsResponse
|
|
23
|
+
from .versions.client import AsyncVersionsClient, VersionsClient
|
|
24
|
+
|
|
25
|
+
# this is used as the default value for optional parameters
|
|
26
|
+
OMIT = typing.cast(typing.Any, ...)
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class PromptsClient:
|
|
30
|
+
def __init__(self, *, client_wrapper: SyncClientWrapper):
|
|
31
|
+
self._client_wrapper = client_wrapper
|
|
32
|
+
self.versions = VersionsClient(client_wrapper=self._client_wrapper)
|
|
33
|
+
self.runs = RunsClient(client_wrapper=self._client_wrapper)
|
|
34
|
+
self.indicators = IndicatorsClient(client_wrapper=self._client_wrapper)
|
|
35
|
+
|
|
36
|
+
def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> typing.List[Prompt]:
|
|
37
|
+
"""
|
|
38
|
+
Get a list of prompts.
|
|
39
|
+
|
|
40
|
+
Parameters
|
|
41
|
+
----------
|
|
42
|
+
request_options : typing.Optional[RequestOptions]
|
|
43
|
+
Request-specific configuration.
|
|
44
|
+
|
|
45
|
+
Returns
|
|
46
|
+
-------
|
|
47
|
+
typing.List[Prompt]
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
Examples
|
|
51
|
+
--------
|
|
52
|
+
from label_studio_sdk.client import LabelStudio
|
|
53
|
+
|
|
54
|
+
client = LabelStudio(
|
|
55
|
+
api_key="YOUR_API_KEY",
|
|
56
|
+
)
|
|
57
|
+
client.prompts.list()
|
|
58
|
+
"""
|
|
59
|
+
_response = self._client_wrapper.httpx_client.request(
|
|
60
|
+
"api/prompts/", method="GET", request_options=request_options
|
|
61
|
+
)
|
|
62
|
+
try:
|
|
63
|
+
if 200 <= _response.status_code < 300:
|
|
64
|
+
return pydantic_v1.parse_obj_as(typing.List[Prompt], _response.json()) # type: ignore
|
|
65
|
+
_response_json = _response.json()
|
|
66
|
+
except JSONDecodeError:
|
|
67
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
68
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
69
|
+
|
|
70
|
+
def create(
|
|
71
|
+
self,
|
|
72
|
+
*,
|
|
73
|
+
title: str,
|
|
74
|
+
input_fields: typing.Sequence[str],
|
|
75
|
+
output_classes: typing.Sequence[str],
|
|
76
|
+
description: typing.Optional[str] = OMIT,
|
|
77
|
+
created_by: typing.Optional[PromptCreatedBy] = OMIT,
|
|
78
|
+
created_at: typing.Optional[dt.datetime] = OMIT,
|
|
79
|
+
updated_at: typing.Optional[dt.datetime] = OMIT,
|
|
80
|
+
organization: typing.Optional[PromptOrganization] = OMIT,
|
|
81
|
+
associated_projects: typing.Optional[typing.Sequence[int]] = OMIT,
|
|
82
|
+
skill_name: typing.Optional[str] = OMIT,
|
|
83
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
84
|
+
) -> Prompt:
|
|
85
|
+
"""
|
|
86
|
+
Create a new prompt.
|
|
87
|
+
|
|
88
|
+
Parameters
|
|
89
|
+
----------
|
|
90
|
+
title : str
|
|
91
|
+
Title of the prompt
|
|
92
|
+
|
|
93
|
+
input_fields : typing.Sequence[str]
|
|
94
|
+
List of input fields
|
|
95
|
+
|
|
96
|
+
output_classes : typing.Sequence[str]
|
|
97
|
+
List of output classes
|
|
98
|
+
|
|
99
|
+
description : typing.Optional[str]
|
|
100
|
+
Description of the prompt
|
|
101
|
+
|
|
102
|
+
created_by : typing.Optional[PromptCreatedBy]
|
|
103
|
+
User ID of the creator of the prompt
|
|
104
|
+
|
|
105
|
+
created_at : typing.Optional[dt.datetime]
|
|
106
|
+
Date and time the prompt was created
|
|
107
|
+
|
|
108
|
+
updated_at : typing.Optional[dt.datetime]
|
|
109
|
+
Date and time the prompt was last updated
|
|
110
|
+
|
|
111
|
+
organization : typing.Optional[PromptOrganization]
|
|
112
|
+
Organization ID of the prompt
|
|
113
|
+
|
|
114
|
+
associated_projects : typing.Optional[typing.Sequence[int]]
|
|
115
|
+
List of associated projects IDs
|
|
116
|
+
|
|
117
|
+
skill_name : typing.Optional[str]
|
|
118
|
+
Name of the skill
|
|
119
|
+
|
|
120
|
+
request_options : typing.Optional[RequestOptions]
|
|
121
|
+
Request-specific configuration.
|
|
122
|
+
|
|
123
|
+
Returns
|
|
124
|
+
-------
|
|
125
|
+
Prompt
|
|
126
|
+
|
|
127
|
+
|
|
128
|
+
Examples
|
|
129
|
+
--------
|
|
130
|
+
from label_studio_sdk.client import LabelStudio
|
|
131
|
+
|
|
132
|
+
client = LabelStudio(
|
|
133
|
+
api_key="YOUR_API_KEY",
|
|
134
|
+
)
|
|
135
|
+
client.prompts.create(
|
|
136
|
+
title="title",
|
|
137
|
+
input_fields=["input_fields"],
|
|
138
|
+
output_classes=["output_classes"],
|
|
139
|
+
)
|
|
140
|
+
"""
|
|
141
|
+
_response = self._client_wrapper.httpx_client.request(
|
|
142
|
+
"api/prompts/",
|
|
143
|
+
method="POST",
|
|
144
|
+
json={
|
|
145
|
+
"title": title,
|
|
146
|
+
"description": description,
|
|
147
|
+
"created_by": created_by,
|
|
148
|
+
"created_at": created_at,
|
|
149
|
+
"updated_at": updated_at,
|
|
150
|
+
"organization": organization,
|
|
151
|
+
"input_fields": input_fields,
|
|
152
|
+
"output_classes": output_classes,
|
|
153
|
+
"associated_projects": associated_projects,
|
|
154
|
+
"skill_name": skill_name,
|
|
155
|
+
},
|
|
156
|
+
request_options=request_options,
|
|
157
|
+
omit=OMIT,
|
|
158
|
+
)
|
|
159
|
+
try:
|
|
160
|
+
if 200 <= _response.status_code < 300:
|
|
161
|
+
return pydantic_v1.parse_obj_as(Prompt, _response.json()) # type: ignore
|
|
162
|
+
_response_json = _response.json()
|
|
163
|
+
except JSONDecodeError:
|
|
164
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
165
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
166
|
+
|
|
167
|
+
def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> Prompt:
|
|
168
|
+
"""
|
|
169
|
+
Get a prompt by ID.
|
|
170
|
+
|
|
171
|
+
Parameters
|
|
172
|
+
----------
|
|
173
|
+
id : int
|
|
174
|
+
Prompt ID
|
|
175
|
+
|
|
176
|
+
request_options : typing.Optional[RequestOptions]
|
|
177
|
+
Request-specific configuration.
|
|
178
|
+
|
|
179
|
+
Returns
|
|
180
|
+
-------
|
|
181
|
+
Prompt
|
|
182
|
+
|
|
183
|
+
|
|
184
|
+
Examples
|
|
185
|
+
--------
|
|
186
|
+
from label_studio_sdk.client import LabelStudio
|
|
187
|
+
|
|
188
|
+
client = LabelStudio(
|
|
189
|
+
api_key="YOUR_API_KEY",
|
|
190
|
+
)
|
|
191
|
+
client.prompts.get(
|
|
192
|
+
id=1,
|
|
193
|
+
)
|
|
194
|
+
"""
|
|
195
|
+
_response = self._client_wrapper.httpx_client.request(
|
|
196
|
+
f"api/prompts/{jsonable_encoder(id)}", method="GET", request_options=request_options
|
|
197
|
+
)
|
|
198
|
+
try:
|
|
199
|
+
if 200 <= _response.status_code < 300:
|
|
200
|
+
return pydantic_v1.parse_obj_as(Prompt, _response.json()) # type: ignore
|
|
201
|
+
_response_json = _response.json()
|
|
202
|
+
except JSONDecodeError:
|
|
203
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
204
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
205
|
+
|
|
206
|
+
def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None:
|
|
207
|
+
"""
|
|
208
|
+
Delete a prompt by ID.
|
|
209
|
+
|
|
210
|
+
Parameters
|
|
211
|
+
----------
|
|
212
|
+
id : int
|
|
213
|
+
Prompt ID
|
|
214
|
+
|
|
215
|
+
request_options : typing.Optional[RequestOptions]
|
|
216
|
+
Request-specific configuration.
|
|
217
|
+
|
|
218
|
+
Returns
|
|
219
|
+
-------
|
|
220
|
+
None
|
|
221
|
+
|
|
222
|
+
Examples
|
|
223
|
+
--------
|
|
224
|
+
from label_studio_sdk.client import LabelStudio
|
|
225
|
+
|
|
226
|
+
client = LabelStudio(
|
|
227
|
+
api_key="YOUR_API_KEY",
|
|
228
|
+
)
|
|
229
|
+
client.prompts.delete(
|
|
230
|
+
id=1,
|
|
231
|
+
)
|
|
232
|
+
"""
|
|
233
|
+
_response = self._client_wrapper.httpx_client.request(
|
|
234
|
+
f"api/prompts/{jsonable_encoder(id)}", method="DELETE", request_options=request_options
|
|
235
|
+
)
|
|
236
|
+
try:
|
|
237
|
+
if 200 <= _response.status_code < 300:
|
|
238
|
+
return
|
|
239
|
+
_response_json = _response.json()
|
|
240
|
+
except JSONDecodeError:
|
|
241
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
242
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
243
|
+
|
|
244
|
+
def update(
|
|
245
|
+
self,
|
|
246
|
+
id: int,
|
|
247
|
+
*,
|
|
248
|
+
title: str,
|
|
249
|
+
input_fields: typing.Sequence[str],
|
|
250
|
+
output_classes: typing.Sequence[str],
|
|
251
|
+
description: typing.Optional[str] = OMIT,
|
|
252
|
+
created_by: typing.Optional[PromptCreatedBy] = OMIT,
|
|
253
|
+
created_at: typing.Optional[dt.datetime] = OMIT,
|
|
254
|
+
updated_at: typing.Optional[dt.datetime] = OMIT,
|
|
255
|
+
organization: typing.Optional[PromptOrganization] = OMIT,
|
|
256
|
+
associated_projects: typing.Optional[typing.Sequence[int]] = OMIT,
|
|
257
|
+
skill_name: typing.Optional[str] = OMIT,
|
|
258
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
259
|
+
) -> Prompt:
|
|
260
|
+
"""
|
|
261
|
+
Update a prompt by ID.
|
|
262
|
+
|
|
263
|
+
Parameters
|
|
264
|
+
----------
|
|
265
|
+
id : int
|
|
266
|
+
Prompt ID
|
|
267
|
+
|
|
268
|
+
title : str
|
|
269
|
+
Title of the prompt
|
|
270
|
+
|
|
271
|
+
input_fields : typing.Sequence[str]
|
|
272
|
+
List of input fields
|
|
273
|
+
|
|
274
|
+
output_classes : typing.Sequence[str]
|
|
275
|
+
List of output classes
|
|
276
|
+
|
|
277
|
+
description : typing.Optional[str]
|
|
278
|
+
Description of the prompt
|
|
279
|
+
|
|
280
|
+
created_by : typing.Optional[PromptCreatedBy]
|
|
281
|
+
User ID of the creator of the prompt
|
|
282
|
+
|
|
283
|
+
created_at : typing.Optional[dt.datetime]
|
|
284
|
+
Date and time the prompt was created
|
|
285
|
+
|
|
286
|
+
updated_at : typing.Optional[dt.datetime]
|
|
287
|
+
Date and time the prompt was last updated
|
|
288
|
+
|
|
289
|
+
organization : typing.Optional[PromptOrganization]
|
|
290
|
+
Organization ID of the prompt
|
|
291
|
+
|
|
292
|
+
associated_projects : typing.Optional[typing.Sequence[int]]
|
|
293
|
+
List of associated projects IDs
|
|
294
|
+
|
|
295
|
+
skill_name : typing.Optional[str]
|
|
296
|
+
Name of the skill
|
|
297
|
+
|
|
298
|
+
request_options : typing.Optional[RequestOptions]
|
|
299
|
+
Request-specific configuration.
|
|
300
|
+
|
|
301
|
+
Returns
|
|
302
|
+
-------
|
|
303
|
+
Prompt
|
|
304
|
+
|
|
305
|
+
|
|
306
|
+
Examples
|
|
307
|
+
--------
|
|
308
|
+
from label_studio_sdk.client import LabelStudio
|
|
309
|
+
|
|
310
|
+
client = LabelStudio(
|
|
311
|
+
api_key="YOUR_API_KEY",
|
|
312
|
+
)
|
|
313
|
+
client.prompts.update(
|
|
314
|
+
id=1,
|
|
315
|
+
title="title",
|
|
316
|
+
input_fields=["input_fields"],
|
|
317
|
+
output_classes=["output_classes"],
|
|
318
|
+
)
|
|
319
|
+
"""
|
|
320
|
+
_response = self._client_wrapper.httpx_client.request(
|
|
321
|
+
f"api/prompts/{jsonable_encoder(id)}",
|
|
322
|
+
method="PATCH",
|
|
323
|
+
json={
|
|
324
|
+
"title": title,
|
|
325
|
+
"description": description,
|
|
326
|
+
"created_by": created_by,
|
|
327
|
+
"created_at": created_at,
|
|
328
|
+
"updated_at": updated_at,
|
|
329
|
+
"organization": organization,
|
|
330
|
+
"input_fields": input_fields,
|
|
331
|
+
"output_classes": output_classes,
|
|
332
|
+
"associated_projects": associated_projects,
|
|
333
|
+
"skill_name": skill_name,
|
|
334
|
+
},
|
|
335
|
+
request_options=request_options,
|
|
336
|
+
omit=OMIT,
|
|
337
|
+
)
|
|
338
|
+
try:
|
|
339
|
+
if 200 <= _response.status_code < 300:
|
|
340
|
+
return pydantic_v1.parse_obj_as(Prompt, _response.json()) # type: ignore
|
|
341
|
+
_response_json = _response.json()
|
|
342
|
+
except JSONDecodeError:
|
|
343
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
344
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
345
|
+
|
|
346
|
+
def batch_predictions(
|
|
347
|
+
self,
|
|
348
|
+
*,
|
|
349
|
+
modelrun_id: typing.Optional[int] = OMIT,
|
|
350
|
+
results: typing.Optional[typing.Sequence[PromptsBatchPredictionsRequestResultsItem]] = OMIT,
|
|
351
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
352
|
+
) -> PromptsBatchPredictionsResponse:
|
|
353
|
+
"""
|
|
354
|
+
Create a new batch prediction.
|
|
355
|
+
|
|
356
|
+
Parameters
|
|
357
|
+
----------
|
|
358
|
+
modelrun_id : typing.Optional[int]
|
|
359
|
+
Model Run ID to associate the prediction with
|
|
360
|
+
|
|
361
|
+
results : typing.Optional[typing.Sequence[PromptsBatchPredictionsRequestResultsItem]]
|
|
362
|
+
|
|
363
|
+
request_options : typing.Optional[RequestOptions]
|
|
364
|
+
Request-specific configuration.
|
|
365
|
+
|
|
366
|
+
Returns
|
|
367
|
+
-------
|
|
368
|
+
PromptsBatchPredictionsResponse
|
|
369
|
+
|
|
370
|
+
|
|
371
|
+
Examples
|
|
372
|
+
--------
|
|
373
|
+
from label_studio_sdk.client import LabelStudio
|
|
374
|
+
|
|
375
|
+
client = LabelStudio(
|
|
376
|
+
api_key="YOUR_API_KEY",
|
|
377
|
+
)
|
|
378
|
+
client.prompts.batch_predictions()
|
|
379
|
+
"""
|
|
380
|
+
_response = self._client_wrapper.httpx_client.request(
|
|
381
|
+
"api/model-run/batch-predictions",
|
|
382
|
+
method="POST",
|
|
383
|
+
json={"modelrun_id": modelrun_id, "results": results},
|
|
384
|
+
request_options=request_options,
|
|
385
|
+
omit=OMIT,
|
|
386
|
+
)
|
|
387
|
+
try:
|
|
388
|
+
if 200 <= _response.status_code < 300:
|
|
389
|
+
return pydantic_v1.parse_obj_as(PromptsBatchPredictionsResponse, _response.json()) # type: ignore
|
|
390
|
+
_response_json = _response.json()
|
|
391
|
+
except JSONDecodeError:
|
|
392
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
393
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
394
|
+
|
|
395
|
+
def batch_failed_predictions(
|
|
396
|
+
self,
|
|
397
|
+
*,
|
|
398
|
+
modelrun_id: typing.Optional[int] = OMIT,
|
|
399
|
+
failed_predictions: typing.Optional[
|
|
400
|
+
typing.Sequence[PromptsBatchFailedPredictionsRequestFailedPredictionsItem]
|
|
401
|
+
] = OMIT,
|
|
402
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
403
|
+
) -> PromptsBatchFailedPredictionsResponse:
|
|
404
|
+
"""
|
|
405
|
+
Create a new batch of failed predictions.
|
|
406
|
+
|
|
407
|
+
Parameters
|
|
408
|
+
----------
|
|
409
|
+
modelrun_id : typing.Optional[int]
|
|
410
|
+
Model Run ID where the failed predictions came from
|
|
411
|
+
|
|
412
|
+
failed_predictions : typing.Optional[typing.Sequence[PromptsBatchFailedPredictionsRequestFailedPredictionsItem]]
|
|
413
|
+
|
|
414
|
+
request_options : typing.Optional[RequestOptions]
|
|
415
|
+
Request-specific configuration.
|
|
416
|
+
|
|
417
|
+
Returns
|
|
418
|
+
-------
|
|
419
|
+
PromptsBatchFailedPredictionsResponse
|
|
420
|
+
|
|
421
|
+
|
|
422
|
+
Examples
|
|
423
|
+
--------
|
|
424
|
+
from label_studio_sdk.client import LabelStudio
|
|
425
|
+
|
|
426
|
+
client = LabelStudio(
|
|
427
|
+
api_key="YOUR_API_KEY",
|
|
428
|
+
)
|
|
429
|
+
client.prompts.batch_failed_predictions()
|
|
430
|
+
"""
|
|
431
|
+
_response = self._client_wrapper.httpx_client.request(
|
|
432
|
+
"api/model-run/batch-failed-predictions",
|
|
433
|
+
method="POST",
|
|
434
|
+
json={"modelrun_id": modelrun_id, "failed_predictions": failed_predictions},
|
|
435
|
+
request_options=request_options,
|
|
436
|
+
omit=OMIT,
|
|
437
|
+
)
|
|
438
|
+
try:
|
|
439
|
+
if 200 <= _response.status_code < 300:
|
|
440
|
+
return pydantic_v1.parse_obj_as(PromptsBatchFailedPredictionsResponse, _response.json()) # type: ignore
|
|
441
|
+
_response_json = _response.json()
|
|
442
|
+
except JSONDecodeError:
|
|
443
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
444
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
445
|
+
|
|
446
|
+
|
|
447
|
+
class AsyncPromptsClient:
|
|
448
|
+
def __init__(self, *, client_wrapper: AsyncClientWrapper):
|
|
449
|
+
self._client_wrapper = client_wrapper
|
|
450
|
+
self.versions = AsyncVersionsClient(client_wrapper=self._client_wrapper)
|
|
451
|
+
self.runs = AsyncRunsClient(client_wrapper=self._client_wrapper)
|
|
452
|
+
self.indicators = AsyncIndicatorsClient(client_wrapper=self._client_wrapper)
|
|
453
|
+
|
|
454
|
+
async def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> typing.List[Prompt]:
|
|
455
|
+
"""
|
|
456
|
+
Get a list of prompts.
|
|
457
|
+
|
|
458
|
+
Parameters
|
|
459
|
+
----------
|
|
460
|
+
request_options : typing.Optional[RequestOptions]
|
|
461
|
+
Request-specific configuration.
|
|
462
|
+
|
|
463
|
+
Returns
|
|
464
|
+
-------
|
|
465
|
+
typing.List[Prompt]
|
|
466
|
+
|
|
467
|
+
|
|
468
|
+
Examples
|
|
469
|
+
--------
|
|
470
|
+
from label_studio_sdk.client import AsyncLabelStudio
|
|
471
|
+
|
|
472
|
+
client = AsyncLabelStudio(
|
|
473
|
+
api_key="YOUR_API_KEY",
|
|
474
|
+
)
|
|
475
|
+
await client.prompts.list()
|
|
476
|
+
"""
|
|
477
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
478
|
+
"api/prompts/", method="GET", request_options=request_options
|
|
479
|
+
)
|
|
480
|
+
try:
|
|
481
|
+
if 200 <= _response.status_code < 300:
|
|
482
|
+
return pydantic_v1.parse_obj_as(typing.List[Prompt], _response.json()) # type: ignore
|
|
483
|
+
_response_json = _response.json()
|
|
484
|
+
except JSONDecodeError:
|
|
485
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
486
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
487
|
+
|
|
488
|
+
async def create(
|
|
489
|
+
self,
|
|
490
|
+
*,
|
|
491
|
+
title: str,
|
|
492
|
+
input_fields: typing.Sequence[str],
|
|
493
|
+
output_classes: typing.Sequence[str],
|
|
494
|
+
description: typing.Optional[str] = OMIT,
|
|
495
|
+
created_by: typing.Optional[PromptCreatedBy] = OMIT,
|
|
496
|
+
created_at: typing.Optional[dt.datetime] = OMIT,
|
|
497
|
+
updated_at: typing.Optional[dt.datetime] = OMIT,
|
|
498
|
+
organization: typing.Optional[PromptOrganization] = OMIT,
|
|
499
|
+
associated_projects: typing.Optional[typing.Sequence[int]] = OMIT,
|
|
500
|
+
skill_name: typing.Optional[str] = OMIT,
|
|
501
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
502
|
+
) -> Prompt:
|
|
503
|
+
"""
|
|
504
|
+
Create a new prompt.
|
|
505
|
+
|
|
506
|
+
Parameters
|
|
507
|
+
----------
|
|
508
|
+
title : str
|
|
509
|
+
Title of the prompt
|
|
510
|
+
|
|
511
|
+
input_fields : typing.Sequence[str]
|
|
512
|
+
List of input fields
|
|
513
|
+
|
|
514
|
+
output_classes : typing.Sequence[str]
|
|
515
|
+
List of output classes
|
|
516
|
+
|
|
517
|
+
description : typing.Optional[str]
|
|
518
|
+
Description of the prompt
|
|
519
|
+
|
|
520
|
+
created_by : typing.Optional[PromptCreatedBy]
|
|
521
|
+
User ID of the creator of the prompt
|
|
522
|
+
|
|
523
|
+
created_at : typing.Optional[dt.datetime]
|
|
524
|
+
Date and time the prompt was created
|
|
525
|
+
|
|
526
|
+
updated_at : typing.Optional[dt.datetime]
|
|
527
|
+
Date and time the prompt was last updated
|
|
528
|
+
|
|
529
|
+
organization : typing.Optional[PromptOrganization]
|
|
530
|
+
Organization ID of the prompt
|
|
531
|
+
|
|
532
|
+
associated_projects : typing.Optional[typing.Sequence[int]]
|
|
533
|
+
List of associated projects IDs
|
|
534
|
+
|
|
535
|
+
skill_name : typing.Optional[str]
|
|
536
|
+
Name of the skill
|
|
537
|
+
|
|
538
|
+
request_options : typing.Optional[RequestOptions]
|
|
539
|
+
Request-specific configuration.
|
|
540
|
+
|
|
541
|
+
Returns
|
|
542
|
+
-------
|
|
543
|
+
Prompt
|
|
544
|
+
|
|
545
|
+
|
|
546
|
+
Examples
|
|
547
|
+
--------
|
|
548
|
+
from label_studio_sdk.client import AsyncLabelStudio
|
|
549
|
+
|
|
550
|
+
client = AsyncLabelStudio(
|
|
551
|
+
api_key="YOUR_API_KEY",
|
|
552
|
+
)
|
|
553
|
+
await client.prompts.create(
|
|
554
|
+
title="title",
|
|
555
|
+
input_fields=["input_fields"],
|
|
556
|
+
output_classes=["output_classes"],
|
|
557
|
+
)
|
|
558
|
+
"""
|
|
559
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
560
|
+
"api/prompts/",
|
|
561
|
+
method="POST",
|
|
562
|
+
json={
|
|
563
|
+
"title": title,
|
|
564
|
+
"description": description,
|
|
565
|
+
"created_by": created_by,
|
|
566
|
+
"created_at": created_at,
|
|
567
|
+
"updated_at": updated_at,
|
|
568
|
+
"organization": organization,
|
|
569
|
+
"input_fields": input_fields,
|
|
570
|
+
"output_classes": output_classes,
|
|
571
|
+
"associated_projects": associated_projects,
|
|
572
|
+
"skill_name": skill_name,
|
|
573
|
+
},
|
|
574
|
+
request_options=request_options,
|
|
575
|
+
omit=OMIT,
|
|
576
|
+
)
|
|
577
|
+
try:
|
|
578
|
+
if 200 <= _response.status_code < 300:
|
|
579
|
+
return pydantic_v1.parse_obj_as(Prompt, _response.json()) # type: ignore
|
|
580
|
+
_response_json = _response.json()
|
|
581
|
+
except JSONDecodeError:
|
|
582
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
583
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
584
|
+
|
|
585
|
+
async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> Prompt:
|
|
586
|
+
"""
|
|
587
|
+
Get a prompt by ID.
|
|
588
|
+
|
|
589
|
+
Parameters
|
|
590
|
+
----------
|
|
591
|
+
id : int
|
|
592
|
+
Prompt ID
|
|
593
|
+
|
|
594
|
+
request_options : typing.Optional[RequestOptions]
|
|
595
|
+
Request-specific configuration.
|
|
596
|
+
|
|
597
|
+
Returns
|
|
598
|
+
-------
|
|
599
|
+
Prompt
|
|
600
|
+
|
|
601
|
+
|
|
602
|
+
Examples
|
|
603
|
+
--------
|
|
604
|
+
from label_studio_sdk.client import AsyncLabelStudio
|
|
605
|
+
|
|
606
|
+
client = AsyncLabelStudio(
|
|
607
|
+
api_key="YOUR_API_KEY",
|
|
608
|
+
)
|
|
609
|
+
await client.prompts.get(
|
|
610
|
+
id=1,
|
|
611
|
+
)
|
|
612
|
+
"""
|
|
613
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
614
|
+
f"api/prompts/{jsonable_encoder(id)}", method="GET", request_options=request_options
|
|
615
|
+
)
|
|
616
|
+
try:
|
|
617
|
+
if 200 <= _response.status_code < 300:
|
|
618
|
+
return pydantic_v1.parse_obj_as(Prompt, _response.json()) # type: ignore
|
|
619
|
+
_response_json = _response.json()
|
|
620
|
+
except JSONDecodeError:
|
|
621
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
622
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
623
|
+
|
|
624
|
+
async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None:
|
|
625
|
+
"""
|
|
626
|
+
Delete a prompt by ID.
|
|
627
|
+
|
|
628
|
+
Parameters
|
|
629
|
+
----------
|
|
630
|
+
id : int
|
|
631
|
+
Prompt ID
|
|
632
|
+
|
|
633
|
+
request_options : typing.Optional[RequestOptions]
|
|
634
|
+
Request-specific configuration.
|
|
635
|
+
|
|
636
|
+
Returns
|
|
637
|
+
-------
|
|
638
|
+
None
|
|
639
|
+
|
|
640
|
+
Examples
|
|
641
|
+
--------
|
|
642
|
+
from label_studio_sdk.client import AsyncLabelStudio
|
|
643
|
+
|
|
644
|
+
client = AsyncLabelStudio(
|
|
645
|
+
api_key="YOUR_API_KEY",
|
|
646
|
+
)
|
|
647
|
+
await client.prompts.delete(
|
|
648
|
+
id=1,
|
|
649
|
+
)
|
|
650
|
+
"""
|
|
651
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
652
|
+
f"api/prompts/{jsonable_encoder(id)}", method="DELETE", request_options=request_options
|
|
653
|
+
)
|
|
654
|
+
try:
|
|
655
|
+
if 200 <= _response.status_code < 300:
|
|
656
|
+
return
|
|
657
|
+
_response_json = _response.json()
|
|
658
|
+
except JSONDecodeError:
|
|
659
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
660
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
661
|
+
|
|
662
|
+
async def update(
|
|
663
|
+
self,
|
|
664
|
+
id: int,
|
|
665
|
+
*,
|
|
666
|
+
title: str,
|
|
667
|
+
input_fields: typing.Sequence[str],
|
|
668
|
+
output_classes: typing.Sequence[str],
|
|
669
|
+
description: typing.Optional[str] = OMIT,
|
|
670
|
+
created_by: typing.Optional[PromptCreatedBy] = OMIT,
|
|
671
|
+
created_at: typing.Optional[dt.datetime] = OMIT,
|
|
672
|
+
updated_at: typing.Optional[dt.datetime] = OMIT,
|
|
673
|
+
organization: typing.Optional[PromptOrganization] = OMIT,
|
|
674
|
+
associated_projects: typing.Optional[typing.Sequence[int]] = OMIT,
|
|
675
|
+
skill_name: typing.Optional[str] = OMIT,
|
|
676
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
677
|
+
) -> Prompt:
|
|
678
|
+
"""
|
|
679
|
+
Update a prompt by ID.
|
|
680
|
+
|
|
681
|
+
Parameters
|
|
682
|
+
----------
|
|
683
|
+
id : int
|
|
684
|
+
Prompt ID
|
|
685
|
+
|
|
686
|
+
title : str
|
|
687
|
+
Title of the prompt
|
|
688
|
+
|
|
689
|
+
input_fields : typing.Sequence[str]
|
|
690
|
+
List of input fields
|
|
691
|
+
|
|
692
|
+
output_classes : typing.Sequence[str]
|
|
693
|
+
List of output classes
|
|
694
|
+
|
|
695
|
+
description : typing.Optional[str]
|
|
696
|
+
Description of the prompt
|
|
697
|
+
|
|
698
|
+
created_by : typing.Optional[PromptCreatedBy]
|
|
699
|
+
User ID of the creator of the prompt
|
|
700
|
+
|
|
701
|
+
created_at : typing.Optional[dt.datetime]
|
|
702
|
+
Date and time the prompt was created
|
|
703
|
+
|
|
704
|
+
updated_at : typing.Optional[dt.datetime]
|
|
705
|
+
Date and time the prompt was last updated
|
|
706
|
+
|
|
707
|
+
organization : typing.Optional[PromptOrganization]
|
|
708
|
+
Organization ID of the prompt
|
|
709
|
+
|
|
710
|
+
associated_projects : typing.Optional[typing.Sequence[int]]
|
|
711
|
+
List of associated projects IDs
|
|
712
|
+
|
|
713
|
+
skill_name : typing.Optional[str]
|
|
714
|
+
Name of the skill
|
|
715
|
+
|
|
716
|
+
request_options : typing.Optional[RequestOptions]
|
|
717
|
+
Request-specific configuration.
|
|
718
|
+
|
|
719
|
+
Returns
|
|
720
|
+
-------
|
|
721
|
+
Prompt
|
|
722
|
+
|
|
723
|
+
|
|
724
|
+
Examples
|
|
725
|
+
--------
|
|
726
|
+
from label_studio_sdk.client import AsyncLabelStudio
|
|
727
|
+
|
|
728
|
+
client = AsyncLabelStudio(
|
|
729
|
+
api_key="YOUR_API_KEY",
|
|
730
|
+
)
|
|
731
|
+
await client.prompts.update(
|
|
732
|
+
id=1,
|
|
733
|
+
title="title",
|
|
734
|
+
input_fields=["input_fields"],
|
|
735
|
+
output_classes=["output_classes"],
|
|
736
|
+
)
|
|
737
|
+
"""
|
|
738
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
739
|
+
f"api/prompts/{jsonable_encoder(id)}",
|
|
740
|
+
method="PATCH",
|
|
741
|
+
json={
|
|
742
|
+
"title": title,
|
|
743
|
+
"description": description,
|
|
744
|
+
"created_by": created_by,
|
|
745
|
+
"created_at": created_at,
|
|
746
|
+
"updated_at": updated_at,
|
|
747
|
+
"organization": organization,
|
|
748
|
+
"input_fields": input_fields,
|
|
749
|
+
"output_classes": output_classes,
|
|
750
|
+
"associated_projects": associated_projects,
|
|
751
|
+
"skill_name": skill_name,
|
|
752
|
+
},
|
|
753
|
+
request_options=request_options,
|
|
754
|
+
omit=OMIT,
|
|
755
|
+
)
|
|
756
|
+
try:
|
|
757
|
+
if 200 <= _response.status_code < 300:
|
|
758
|
+
return pydantic_v1.parse_obj_as(Prompt, _response.json()) # type: ignore
|
|
759
|
+
_response_json = _response.json()
|
|
760
|
+
except JSONDecodeError:
|
|
761
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
762
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
763
|
+
|
|
764
|
+
async def batch_predictions(
|
|
765
|
+
self,
|
|
766
|
+
*,
|
|
767
|
+
modelrun_id: typing.Optional[int] = OMIT,
|
|
768
|
+
results: typing.Optional[typing.Sequence[PromptsBatchPredictionsRequestResultsItem]] = OMIT,
|
|
769
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
770
|
+
) -> PromptsBatchPredictionsResponse:
|
|
771
|
+
"""
|
|
772
|
+
Create a new batch prediction.
|
|
773
|
+
|
|
774
|
+
Parameters
|
|
775
|
+
----------
|
|
776
|
+
modelrun_id : typing.Optional[int]
|
|
777
|
+
Model Run ID to associate the prediction with
|
|
778
|
+
|
|
779
|
+
results : typing.Optional[typing.Sequence[PromptsBatchPredictionsRequestResultsItem]]
|
|
780
|
+
|
|
781
|
+
request_options : typing.Optional[RequestOptions]
|
|
782
|
+
Request-specific configuration.
|
|
783
|
+
|
|
784
|
+
Returns
|
|
785
|
+
-------
|
|
786
|
+
PromptsBatchPredictionsResponse
|
|
787
|
+
|
|
788
|
+
|
|
789
|
+
Examples
|
|
790
|
+
--------
|
|
791
|
+
from label_studio_sdk.client import AsyncLabelStudio
|
|
792
|
+
|
|
793
|
+
client = AsyncLabelStudio(
|
|
794
|
+
api_key="YOUR_API_KEY",
|
|
795
|
+
)
|
|
796
|
+
await client.prompts.batch_predictions()
|
|
797
|
+
"""
|
|
798
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
799
|
+
"api/model-run/batch-predictions",
|
|
800
|
+
method="POST",
|
|
801
|
+
json={"modelrun_id": modelrun_id, "results": results},
|
|
802
|
+
request_options=request_options,
|
|
803
|
+
omit=OMIT,
|
|
804
|
+
)
|
|
805
|
+
try:
|
|
806
|
+
if 200 <= _response.status_code < 300:
|
|
807
|
+
return pydantic_v1.parse_obj_as(PromptsBatchPredictionsResponse, _response.json()) # type: ignore
|
|
808
|
+
_response_json = _response.json()
|
|
809
|
+
except JSONDecodeError:
|
|
810
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
811
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
812
|
+
|
|
813
|
+
async def batch_failed_predictions(
|
|
814
|
+
self,
|
|
815
|
+
*,
|
|
816
|
+
modelrun_id: typing.Optional[int] = OMIT,
|
|
817
|
+
failed_predictions: typing.Optional[
|
|
818
|
+
typing.Sequence[PromptsBatchFailedPredictionsRequestFailedPredictionsItem]
|
|
819
|
+
] = OMIT,
|
|
820
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
821
|
+
) -> PromptsBatchFailedPredictionsResponse:
|
|
822
|
+
"""
|
|
823
|
+
Create a new batch of failed predictions.
|
|
824
|
+
|
|
825
|
+
Parameters
|
|
826
|
+
----------
|
|
827
|
+
modelrun_id : typing.Optional[int]
|
|
828
|
+
Model Run ID where the failed predictions came from
|
|
829
|
+
|
|
830
|
+
failed_predictions : typing.Optional[typing.Sequence[PromptsBatchFailedPredictionsRequestFailedPredictionsItem]]
|
|
831
|
+
|
|
832
|
+
request_options : typing.Optional[RequestOptions]
|
|
833
|
+
Request-specific configuration.
|
|
834
|
+
|
|
835
|
+
Returns
|
|
836
|
+
-------
|
|
837
|
+
PromptsBatchFailedPredictionsResponse
|
|
838
|
+
|
|
839
|
+
|
|
840
|
+
Examples
|
|
841
|
+
--------
|
|
842
|
+
from label_studio_sdk.client import AsyncLabelStudio
|
|
843
|
+
|
|
844
|
+
client = AsyncLabelStudio(
|
|
845
|
+
api_key="YOUR_API_KEY",
|
|
846
|
+
)
|
|
847
|
+
await client.prompts.batch_failed_predictions()
|
|
848
|
+
"""
|
|
849
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
850
|
+
"api/model-run/batch-failed-predictions",
|
|
851
|
+
method="POST",
|
|
852
|
+
json={"modelrun_id": modelrun_id, "failed_predictions": failed_predictions},
|
|
853
|
+
request_options=request_options,
|
|
854
|
+
omit=OMIT,
|
|
855
|
+
)
|
|
856
|
+
try:
|
|
857
|
+
if 200 <= _response.status_code < 300:
|
|
858
|
+
return pydantic_v1.parse_obj_as(PromptsBatchFailedPredictionsResponse, _response.json()) # type: ignore
|
|
859
|
+
_response_json = _response.json()
|
|
860
|
+
except JSONDecodeError:
|
|
861
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
862
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|