label-studio-sdk 1.0.5__py3-none-any.whl → 1.0.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of label-studio-sdk might be problematic. Click here for more details.

Files changed (77) hide show
  1. label_studio_sdk/__init__.py +76 -0
  2. label_studio_sdk/_extensions/eval/categorical.py +83 -0
  3. label_studio_sdk/_extensions/label_studio_tools/core/label_config.py +13 -4
  4. label_studio_sdk/_extensions/label_studio_tools/core/utils/io.py +35 -17
  5. label_studio_sdk/_extensions/label_studio_tools/core/utils/json_schema.py +86 -0
  6. label_studio_sdk/_legacy/schema/label_config_schema.json +42 -11
  7. label_studio_sdk/annotations/__init__.py +3 -0
  8. label_studio_sdk/annotations/client.py +109 -0
  9. label_studio_sdk/annotations/types/__init__.py +5 -0
  10. label_studio_sdk/annotations/types/annotations_create_bulk_response_item.py +29 -0
  11. label_studio_sdk/base_client.py +9 -0
  12. label_studio_sdk/comments/__init__.py +2 -0
  13. label_studio_sdk/comments/client.py +512 -0
  14. label_studio_sdk/converter/converter.py +11 -4
  15. label_studio_sdk/converter/imports/coco.py +14 -13
  16. label_studio_sdk/converter/utils.py +72 -3
  17. label_studio_sdk/core/client_wrapper.py +1 -1
  18. label_studio_sdk/files/client.py +26 -16
  19. label_studio_sdk/label_interface/control_tags.py +205 -10
  20. label_studio_sdk/label_interface/interface.py +117 -10
  21. label_studio_sdk/label_interface/region.py +1 -10
  22. label_studio_sdk/model_providers/__init__.py +2 -0
  23. label_studio_sdk/model_providers/client.py +708 -0
  24. label_studio_sdk/projects/client.py +32 -16
  25. label_studio_sdk/projects/exports/client.py +133 -40
  26. label_studio_sdk/prompts/__init__.py +21 -0
  27. label_studio_sdk/prompts/client.py +862 -0
  28. label_studio_sdk/prompts/indicators/__init__.py +2 -0
  29. label_studio_sdk/prompts/indicators/client.py +194 -0
  30. label_studio_sdk/prompts/runs/__init__.py +5 -0
  31. label_studio_sdk/prompts/runs/client.py +354 -0
  32. label_studio_sdk/prompts/runs/types/__init__.py +5 -0
  33. label_studio_sdk/prompts/runs/types/runs_list_request_project_subset.py +5 -0
  34. label_studio_sdk/prompts/types/__init__.py +15 -0
  35. label_studio_sdk/prompts/types/prompts_batch_failed_predictions_request_failed_predictions_item.py +42 -0
  36. label_studio_sdk/prompts/types/prompts_batch_failed_predictions_response.py +29 -0
  37. label_studio_sdk/prompts/types/prompts_batch_predictions_request_results_item.py +62 -0
  38. label_studio_sdk/prompts/types/prompts_batch_predictions_response.py +29 -0
  39. label_studio_sdk/prompts/versions/__init__.py +2 -0
  40. label_studio_sdk/prompts/versions/client.py +1046 -0
  41. label_studio_sdk/types/__init__.py +58 -0
  42. label_studio_sdk/types/comment.py +39 -0
  43. label_studio_sdk/types/comment_created_by.py +5 -0
  44. label_studio_sdk/types/inference_run.py +43 -0
  45. label_studio_sdk/types/inference_run_cost_estimate.py +57 -0
  46. label_studio_sdk/types/inference_run_created_by.py +5 -0
  47. label_studio_sdk/types/inference_run_organization.py +5 -0
  48. label_studio_sdk/types/inference_run_project_subset.py +5 -0
  49. label_studio_sdk/types/inference_run_status.py +7 -0
  50. label_studio_sdk/types/key_indicator_value.py +30 -0
  51. label_studio_sdk/types/key_indicators.py +7 -0
  52. label_studio_sdk/types/key_indicators_item.py +51 -0
  53. label_studio_sdk/types/key_indicators_item_additional_kpis_item.py +37 -0
  54. label_studio_sdk/types/key_indicators_item_extra_kpis_item.py +37 -0
  55. label_studio_sdk/types/model_provider_connection.py +71 -0
  56. label_studio_sdk/types/model_provider_connection_budget_reset_period.py +5 -0
  57. label_studio_sdk/types/model_provider_connection_created_by.py +5 -0
  58. label_studio_sdk/types/model_provider_connection_organization.py +5 -0
  59. label_studio_sdk/types/model_provider_connection_provider.py +5 -0
  60. label_studio_sdk/types/model_provider_connection_scope.py +5 -0
  61. label_studio_sdk/types/prompt.py +79 -0
  62. label_studio_sdk/types/prompt_created_by.py +5 -0
  63. label_studio_sdk/types/prompt_organization.py +5 -0
  64. label_studio_sdk/types/prompt_version.py +41 -0
  65. label_studio_sdk/types/prompt_version_created_by.py +5 -0
  66. label_studio_sdk/types/prompt_version_organization.py +5 -0
  67. label_studio_sdk/types/prompt_version_provider.py +5 -0
  68. label_studio_sdk/types/refined_prompt_response.py +64 -0
  69. label_studio_sdk/types/refined_prompt_response_refinement_status.py +7 -0
  70. label_studio_sdk/types/task.py +3 -2
  71. label_studio_sdk/types/task_comment_authors_item.py +5 -0
  72. label_studio_sdk/webhooks/client.py +245 -36
  73. label_studio_sdk/workspaces/client.py +20 -20
  74. label_studio_sdk-1.0.8.dist-info/LICENSE +201 -0
  75. {label_studio_sdk-1.0.5.dist-info → label_studio_sdk-1.0.8.dist-info}/METADATA +19 -3
  76. {label_studio_sdk-1.0.5.dist-info → label_studio_sdk-1.0.8.dist-info}/RECORD +77 -24
  77. {label_studio_sdk-1.0.5.dist-info → label_studio_sdk-1.0.8.dist-info}/WHEEL +1 -1
@@ -0,0 +1,1046 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import datetime as dt
4
+ import typing
5
+ from json.decoder import JSONDecodeError
6
+
7
+ from ...core.api_error import ApiError
8
+ from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper
9
+ from ...core.jsonable_encoder import jsonable_encoder
10
+ from ...core.pydantic_utilities import pydantic_v1
11
+ from ...core.request_options import RequestOptions
12
+ from ...types.inference_run_cost_estimate import InferenceRunCostEstimate
13
+ from ...types.prompt_version import PromptVersion
14
+ from ...types.prompt_version_created_by import PromptVersionCreatedBy
15
+ from ...types.prompt_version_organization import PromptVersionOrganization
16
+ from ...types.prompt_version_provider import PromptVersionProvider
17
+ from ...types.refined_prompt_response import RefinedPromptResponse
18
+
19
+ # this is used as the default value for optional parameters
20
+ OMIT = typing.cast(typing.Any, ...)
21
+
22
+
23
+ class VersionsClient:
24
+ def __init__(self, *, client_wrapper: SyncClientWrapper):
25
+ self._client_wrapper = client_wrapper
26
+
27
+ def list(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> typing.List[PromptVersion]:
28
+ """
29
+ Get a list of prompt versions.
30
+
31
+ Parameters
32
+ ----------
33
+ id : int
34
+ Prompt ID
35
+
36
+ request_options : typing.Optional[RequestOptions]
37
+ Request-specific configuration.
38
+
39
+ Returns
40
+ -------
41
+ typing.List[PromptVersion]
42
+
43
+
44
+ Examples
45
+ --------
46
+ from label_studio_sdk.client import LabelStudio
47
+
48
+ client = LabelStudio(
49
+ api_key="YOUR_API_KEY",
50
+ )
51
+ client.prompts.versions.list(
52
+ id=1,
53
+ )
54
+ """
55
+ _response = self._client_wrapper.httpx_client.request(
56
+ f"api/prompts/{jsonable_encoder(id)}/versions", method="GET", request_options=request_options
57
+ )
58
+ try:
59
+ if 200 <= _response.status_code < 300:
60
+ return pydantic_v1.parse_obj_as(typing.List[PromptVersion], _response.json()) # type: ignore
61
+ _response_json = _response.json()
62
+ except JSONDecodeError:
63
+ raise ApiError(status_code=_response.status_code, body=_response.text)
64
+ raise ApiError(status_code=_response.status_code, body=_response_json)
65
+
66
+ def create(
67
+ self,
68
+ id: int,
69
+ *,
70
+ title: typing.Optional[str] = OMIT,
71
+ parent_model: typing.Optional[int] = OMIT,
72
+ model_provider_connection: typing.Optional[int] = OMIT,
73
+ prompt: typing.Optional[str] = OMIT,
74
+ provider: typing.Optional[PromptVersionProvider] = OMIT,
75
+ provider_model_id: typing.Optional[str] = OMIT,
76
+ created_by: typing.Optional[PromptVersionCreatedBy] = OMIT,
77
+ created_at: typing.Optional[dt.datetime] = OMIT,
78
+ updated_at: typing.Optional[dt.datetime] = OMIT,
79
+ organization: typing.Optional[PromptVersionOrganization] = OMIT,
80
+ request_options: typing.Optional[RequestOptions] = None,
81
+ ) -> PromptVersion:
82
+ """
83
+ Create a new version of a prompt.
84
+
85
+ Parameters
86
+ ----------
87
+ id : int
88
+ Prompt ID
89
+
90
+ title : typing.Optional[str]
91
+
92
+ parent_model : typing.Optional[int]
93
+
94
+ model_provider_connection : typing.Optional[int]
95
+
96
+ prompt : typing.Optional[str]
97
+
98
+ provider : typing.Optional[PromptVersionProvider]
99
+
100
+ provider_model_id : typing.Optional[str]
101
+
102
+ created_by : typing.Optional[PromptVersionCreatedBy]
103
+
104
+ created_at : typing.Optional[dt.datetime]
105
+
106
+ updated_at : typing.Optional[dt.datetime]
107
+
108
+ organization : typing.Optional[PromptVersionOrganization]
109
+
110
+ request_options : typing.Optional[RequestOptions]
111
+ Request-specific configuration.
112
+
113
+ Returns
114
+ -------
115
+ PromptVersion
116
+
117
+
118
+ Examples
119
+ --------
120
+ from label_studio_sdk.client import LabelStudio
121
+
122
+ client = LabelStudio(
123
+ api_key="YOUR_API_KEY",
124
+ )
125
+ client.prompts.versions.create(
126
+ id=1,
127
+ )
128
+ """
129
+ _response = self._client_wrapper.httpx_client.request(
130
+ f"api/prompts/{jsonable_encoder(id)}/versions",
131
+ method="POST",
132
+ json={
133
+ "title": title,
134
+ "parent_model": parent_model,
135
+ "model_provider_connection": model_provider_connection,
136
+ "prompt": prompt,
137
+ "provider": provider,
138
+ "provider_model_id": provider_model_id,
139
+ "created_by": created_by,
140
+ "created_at": created_at,
141
+ "updated_at": updated_at,
142
+ "organization": organization,
143
+ },
144
+ request_options=request_options,
145
+ omit=OMIT,
146
+ )
147
+ try:
148
+ if 200 <= _response.status_code < 300:
149
+ return pydantic_v1.parse_obj_as(PromptVersion, _response.json()) # type: ignore
150
+ _response_json = _response.json()
151
+ except JSONDecodeError:
152
+ raise ApiError(status_code=_response.status_code, body=_response.text)
153
+ raise ApiError(status_code=_response.status_code, body=_response_json)
154
+
155
+ def get(
156
+ self, id: int, version_id: int, *, request_options: typing.Optional[RequestOptions] = None
157
+ ) -> PromptVersion:
158
+ """
159
+ Get a prompt version by ID.
160
+
161
+ Parameters
162
+ ----------
163
+ id : int
164
+ Prompt ID
165
+
166
+ version_id : int
167
+ Prompt Version ID
168
+
169
+ request_options : typing.Optional[RequestOptions]
170
+ Request-specific configuration.
171
+
172
+ Returns
173
+ -------
174
+ PromptVersion
175
+
176
+
177
+ Examples
178
+ --------
179
+ from label_studio_sdk.client import LabelStudio
180
+
181
+ client = LabelStudio(
182
+ api_key="YOUR_API_KEY",
183
+ )
184
+ client.prompts.versions.get(
185
+ id=1,
186
+ version_id=1,
187
+ )
188
+ """
189
+ _response = self._client_wrapper.httpx_client.request(
190
+ f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}",
191
+ method="GET",
192
+ request_options=request_options,
193
+ )
194
+ try:
195
+ if 200 <= _response.status_code < 300:
196
+ return pydantic_v1.parse_obj_as(PromptVersion, _response.json()) # type: ignore
197
+ _response_json = _response.json()
198
+ except JSONDecodeError:
199
+ raise ApiError(status_code=_response.status_code, body=_response.text)
200
+ raise ApiError(status_code=_response.status_code, body=_response_json)
201
+
202
+ def delete(self, id: int, version_id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None:
203
+ """
204
+ Delete a prompt version by ID.
205
+
206
+ Parameters
207
+ ----------
208
+ id : int
209
+ Prompt ID
210
+
211
+ version_id : int
212
+ Prompt Version ID
213
+
214
+ request_options : typing.Optional[RequestOptions]
215
+ Request-specific configuration.
216
+
217
+ Returns
218
+ -------
219
+ None
220
+
221
+ Examples
222
+ --------
223
+ from label_studio_sdk.client import LabelStudio
224
+
225
+ client = LabelStudio(
226
+ api_key="YOUR_API_KEY",
227
+ )
228
+ client.prompts.versions.delete(
229
+ id=1,
230
+ version_id=1,
231
+ )
232
+ """
233
+ _response = self._client_wrapper.httpx_client.request(
234
+ f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}",
235
+ method="DELETE",
236
+ request_options=request_options,
237
+ )
238
+ try:
239
+ if 200 <= _response.status_code < 300:
240
+ return
241
+ _response_json = _response.json()
242
+ except JSONDecodeError:
243
+ raise ApiError(status_code=_response.status_code, body=_response.text)
244
+ raise ApiError(status_code=_response.status_code, body=_response_json)
245
+
246
+ def update(
247
+ self,
248
+ id: int,
249
+ version_id: int,
250
+ *,
251
+ title: typing.Optional[str] = OMIT,
252
+ parent_model: typing.Optional[int] = OMIT,
253
+ model_provider_connection: typing.Optional[int] = OMIT,
254
+ prompt: typing.Optional[str] = OMIT,
255
+ provider: typing.Optional[PromptVersionProvider] = OMIT,
256
+ provider_model_id: typing.Optional[str] = OMIT,
257
+ created_by: typing.Optional[PromptVersionCreatedBy] = OMIT,
258
+ created_at: typing.Optional[dt.datetime] = OMIT,
259
+ updated_at: typing.Optional[dt.datetime] = OMIT,
260
+ organization: typing.Optional[PromptVersionOrganization] = OMIT,
261
+ request_options: typing.Optional[RequestOptions] = None,
262
+ ) -> PromptVersion:
263
+ """
264
+ Update a prompt version by ID.
265
+
266
+ Parameters
267
+ ----------
268
+ id : int
269
+ Prompt ID
270
+
271
+ version_id : int
272
+ Prompt Version ID
273
+
274
+ title : typing.Optional[str]
275
+
276
+ parent_model : typing.Optional[int]
277
+
278
+ model_provider_connection : typing.Optional[int]
279
+
280
+ prompt : typing.Optional[str]
281
+
282
+ provider : typing.Optional[PromptVersionProvider]
283
+
284
+ provider_model_id : typing.Optional[str]
285
+
286
+ created_by : typing.Optional[PromptVersionCreatedBy]
287
+
288
+ created_at : typing.Optional[dt.datetime]
289
+
290
+ updated_at : typing.Optional[dt.datetime]
291
+
292
+ organization : typing.Optional[PromptVersionOrganization]
293
+
294
+ request_options : typing.Optional[RequestOptions]
295
+ Request-specific configuration.
296
+
297
+ Returns
298
+ -------
299
+ PromptVersion
300
+
301
+
302
+ Examples
303
+ --------
304
+ from label_studio_sdk.client import LabelStudio
305
+
306
+ client = LabelStudio(
307
+ api_key="YOUR_API_KEY",
308
+ )
309
+ client.prompts.versions.update(
310
+ id=1,
311
+ version_id=1,
312
+ )
313
+ """
314
+ _response = self._client_wrapper.httpx_client.request(
315
+ f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}",
316
+ method="PATCH",
317
+ json={
318
+ "title": title,
319
+ "parent_model": parent_model,
320
+ "model_provider_connection": model_provider_connection,
321
+ "prompt": prompt,
322
+ "provider": provider,
323
+ "provider_model_id": provider_model_id,
324
+ "created_by": created_by,
325
+ "created_at": created_at,
326
+ "updated_at": updated_at,
327
+ "organization": organization,
328
+ },
329
+ request_options=request_options,
330
+ omit=OMIT,
331
+ )
332
+ try:
333
+ if 200 <= _response.status_code < 300:
334
+ return pydantic_v1.parse_obj_as(PromptVersion, _response.json()) # type: ignore
335
+ _response_json = _response.json()
336
+ except JSONDecodeError:
337
+ raise ApiError(status_code=_response.status_code, body=_response.text)
338
+ raise ApiError(status_code=_response.status_code, body=_response_json)
339
+
340
+ def cost_estimate(
341
+ self,
342
+ prompt_id: int,
343
+ version_id: int,
344
+ *,
345
+ project_id: int,
346
+ project_subset: int,
347
+ request_options: typing.Optional[RequestOptions] = None,
348
+ ) -> InferenceRunCostEstimate:
349
+ """
350
+ Get cost estimate for running a prompt version on a particular project/subset
351
+
352
+ Parameters
353
+ ----------
354
+ prompt_id : int
355
+ Prompt ID
356
+
357
+ version_id : int
358
+ Prompt Version ID
359
+
360
+ project_id : int
361
+ ID of the project to get an estimate for running on
362
+
363
+ project_subset : int
364
+ Subset of the project to get an estimate for running on (e.g. 'All', 'Sample', or 'HasGT')
365
+
366
+ request_options : typing.Optional[RequestOptions]
367
+ Request-specific configuration.
368
+
369
+ Returns
370
+ -------
371
+ InferenceRunCostEstimate
372
+
373
+
374
+ Examples
375
+ --------
376
+ from label_studio_sdk.client import LabelStudio
377
+
378
+ client = LabelStudio(
379
+ api_key="YOUR_API_KEY",
380
+ )
381
+ client.prompts.versions.cost_estimate(
382
+ prompt_id=1,
383
+ version_id=1,
384
+ project_id=1,
385
+ project_subset=1,
386
+ )
387
+ """
388
+ _response = self._client_wrapper.httpx_client.request(
389
+ f"api/prompts/{jsonable_encoder(prompt_id)}/versions/{jsonable_encoder(version_id)}/cost-estimate",
390
+ method="POST",
391
+ params={"project_id": project_id, "project_subset": project_subset},
392
+ request_options=request_options,
393
+ )
394
+ try:
395
+ if 200 <= _response.status_code < 300:
396
+ return pydantic_v1.parse_obj_as(InferenceRunCostEstimate, _response.json()) # type: ignore
397
+ _response_json = _response.json()
398
+ except JSONDecodeError:
399
+ raise ApiError(status_code=_response.status_code, body=_response.text)
400
+ raise ApiError(status_code=_response.status_code, body=_response_json)
401
+
402
+ def get_refined_prompt(
403
+ self,
404
+ prompt_id: int,
405
+ version_id: int,
406
+ *,
407
+ refinement_job_id: str,
408
+ request_options: typing.Optional[RequestOptions] = None,
409
+ ) -> RefinedPromptResponse:
410
+ """
411
+ Get the refined prompt based on the `refinement_job_id`.
412
+
413
+ Parameters
414
+ ----------
415
+ prompt_id : int
416
+ Prompt ID
417
+
418
+ version_id : int
419
+ Prompt Version ID
420
+
421
+ refinement_job_id : str
422
+ Refinement Job ID acquired from the `POST /api/prompts/{prompt_id}/versions/{version_id}/refine` endpoint
423
+
424
+ request_options : typing.Optional[RequestOptions]
425
+ Request-specific configuration.
426
+
427
+ Returns
428
+ -------
429
+ RefinedPromptResponse
430
+
431
+
432
+ Examples
433
+ --------
434
+ from label_studio_sdk.client import LabelStudio
435
+
436
+ client = LabelStudio(
437
+ api_key="YOUR_API_KEY",
438
+ )
439
+ client.prompts.versions.get_refined_prompt(
440
+ prompt_id=1,
441
+ version_id=1,
442
+ refinement_job_id="refinement_job_id",
443
+ )
444
+ """
445
+ _response = self._client_wrapper.httpx_client.request(
446
+ f"api/prompts/{jsonable_encoder(prompt_id)}/versions/{jsonable_encoder(version_id)}/refine",
447
+ method="GET",
448
+ params={"refinement_job_id": refinement_job_id},
449
+ request_options=request_options,
450
+ )
451
+ try:
452
+ if 200 <= _response.status_code < 300:
453
+ return pydantic_v1.parse_obj_as(RefinedPromptResponse, _response.json()) # type: ignore
454
+ _response_json = _response.json()
455
+ except JSONDecodeError:
456
+ raise ApiError(status_code=_response.status_code, body=_response.text)
457
+ raise ApiError(status_code=_response.status_code, body=_response_json)
458
+
459
+ def refine_prompt(
460
+ self,
461
+ prompt_id: int,
462
+ version_id: int,
463
+ *,
464
+ async_: typing.Optional[bool] = None,
465
+ teacher_model_provider_connection_id: typing.Optional[int] = OMIT,
466
+ teacher_model_name: typing.Optional[str] = OMIT,
467
+ project_id: typing.Optional[int] = OMIT,
468
+ request_options: typing.Optional[RequestOptions] = None,
469
+ ) -> RefinedPromptResponse:
470
+ """
471
+ Refine a prompt version using a teacher model and save the refined prompt as a new version.
472
+
473
+ Parameters
474
+ ----------
475
+ prompt_id : int
476
+ Prompt ID
477
+
478
+ version_id : int
479
+ Base Prompt Version ID
480
+
481
+ async_ : typing.Optional[bool]
482
+ Run the refinement job asynchronously
483
+
484
+ teacher_model_provider_connection_id : typing.Optional[int]
485
+ Model Provider Connection ID to use to refine the prompt
486
+
487
+ teacher_model_name : typing.Optional[str]
488
+ Name of the model to use to refine the prompt
489
+
490
+ project_id : typing.Optional[int]
491
+ Project ID to target the refined prompt for
492
+
493
+ request_options : typing.Optional[RequestOptions]
494
+ Request-specific configuration.
495
+
496
+ Returns
497
+ -------
498
+ RefinedPromptResponse
499
+
500
+
501
+ Examples
502
+ --------
503
+ from label_studio_sdk.client import LabelStudio
504
+
505
+ client = LabelStudio(
506
+ api_key="YOUR_API_KEY",
507
+ )
508
+ client.prompts.versions.refine_prompt(
509
+ prompt_id=1,
510
+ version_id=1,
511
+ )
512
+ """
513
+ _response = self._client_wrapper.httpx_client.request(
514
+ f"api/prompts/{jsonable_encoder(prompt_id)}/versions/{jsonable_encoder(version_id)}/refine",
515
+ method="POST",
516
+ params={"async": async_},
517
+ json={
518
+ "teacher_model_provider_connection_id": teacher_model_provider_connection_id,
519
+ "teacher_model_name": teacher_model_name,
520
+ "project_id": project_id,
521
+ },
522
+ request_options=request_options,
523
+ omit=OMIT,
524
+ )
525
+ try:
526
+ if 200 <= _response.status_code < 300:
527
+ return pydantic_v1.parse_obj_as(RefinedPromptResponse, _response.json()) # type: ignore
528
+ _response_json = _response.json()
529
+ except JSONDecodeError:
530
+ raise ApiError(status_code=_response.status_code, body=_response.text)
531
+ raise ApiError(status_code=_response.status_code, body=_response_json)
532
+
533
+
534
+ class AsyncVersionsClient:
535
+ def __init__(self, *, client_wrapper: AsyncClientWrapper):
536
+ self._client_wrapper = client_wrapper
537
+
538
+ async def list(
539
+ self, id: int, *, request_options: typing.Optional[RequestOptions] = None
540
+ ) -> typing.List[PromptVersion]:
541
+ """
542
+ Get a list of prompt versions.
543
+
544
+ Parameters
545
+ ----------
546
+ id : int
547
+ Prompt ID
548
+
549
+ request_options : typing.Optional[RequestOptions]
550
+ Request-specific configuration.
551
+
552
+ Returns
553
+ -------
554
+ typing.List[PromptVersion]
555
+
556
+
557
+ Examples
558
+ --------
559
+ from label_studio_sdk.client import AsyncLabelStudio
560
+
561
+ client = AsyncLabelStudio(
562
+ api_key="YOUR_API_KEY",
563
+ )
564
+ await client.prompts.versions.list(
565
+ id=1,
566
+ )
567
+ """
568
+ _response = await self._client_wrapper.httpx_client.request(
569
+ f"api/prompts/{jsonable_encoder(id)}/versions", method="GET", request_options=request_options
570
+ )
571
+ try:
572
+ if 200 <= _response.status_code < 300:
573
+ return pydantic_v1.parse_obj_as(typing.List[PromptVersion], _response.json()) # type: ignore
574
+ _response_json = _response.json()
575
+ except JSONDecodeError:
576
+ raise ApiError(status_code=_response.status_code, body=_response.text)
577
+ raise ApiError(status_code=_response.status_code, body=_response_json)
578
+
579
+ async def create(
580
+ self,
581
+ id: int,
582
+ *,
583
+ title: typing.Optional[str] = OMIT,
584
+ parent_model: typing.Optional[int] = OMIT,
585
+ model_provider_connection: typing.Optional[int] = OMIT,
586
+ prompt: typing.Optional[str] = OMIT,
587
+ provider: typing.Optional[PromptVersionProvider] = OMIT,
588
+ provider_model_id: typing.Optional[str] = OMIT,
589
+ created_by: typing.Optional[PromptVersionCreatedBy] = OMIT,
590
+ created_at: typing.Optional[dt.datetime] = OMIT,
591
+ updated_at: typing.Optional[dt.datetime] = OMIT,
592
+ organization: typing.Optional[PromptVersionOrganization] = OMIT,
593
+ request_options: typing.Optional[RequestOptions] = None,
594
+ ) -> PromptVersion:
595
+ """
596
+ Create a new version of a prompt.
597
+
598
+ Parameters
599
+ ----------
600
+ id : int
601
+ Prompt ID
602
+
603
+ title : typing.Optional[str]
604
+
605
+ parent_model : typing.Optional[int]
606
+
607
+ model_provider_connection : typing.Optional[int]
608
+
609
+ prompt : typing.Optional[str]
610
+
611
+ provider : typing.Optional[PromptVersionProvider]
612
+
613
+ provider_model_id : typing.Optional[str]
614
+
615
+ created_by : typing.Optional[PromptVersionCreatedBy]
616
+
617
+ created_at : typing.Optional[dt.datetime]
618
+
619
+ updated_at : typing.Optional[dt.datetime]
620
+
621
+ organization : typing.Optional[PromptVersionOrganization]
622
+
623
+ request_options : typing.Optional[RequestOptions]
624
+ Request-specific configuration.
625
+
626
+ Returns
627
+ -------
628
+ PromptVersion
629
+
630
+
631
+ Examples
632
+ --------
633
+ from label_studio_sdk.client import AsyncLabelStudio
634
+
635
+ client = AsyncLabelStudio(
636
+ api_key="YOUR_API_KEY",
637
+ )
638
+ await client.prompts.versions.create(
639
+ id=1,
640
+ )
641
+ """
642
+ _response = await self._client_wrapper.httpx_client.request(
643
+ f"api/prompts/{jsonable_encoder(id)}/versions",
644
+ method="POST",
645
+ json={
646
+ "title": title,
647
+ "parent_model": parent_model,
648
+ "model_provider_connection": model_provider_connection,
649
+ "prompt": prompt,
650
+ "provider": provider,
651
+ "provider_model_id": provider_model_id,
652
+ "created_by": created_by,
653
+ "created_at": created_at,
654
+ "updated_at": updated_at,
655
+ "organization": organization,
656
+ },
657
+ request_options=request_options,
658
+ omit=OMIT,
659
+ )
660
+ try:
661
+ if 200 <= _response.status_code < 300:
662
+ return pydantic_v1.parse_obj_as(PromptVersion, _response.json()) # type: ignore
663
+ _response_json = _response.json()
664
+ except JSONDecodeError:
665
+ raise ApiError(status_code=_response.status_code, body=_response.text)
666
+ raise ApiError(status_code=_response.status_code, body=_response_json)
667
+
668
+ async def get(
669
+ self, id: int, version_id: int, *, request_options: typing.Optional[RequestOptions] = None
670
+ ) -> PromptVersion:
671
+ """
672
+ Get a prompt version by ID.
673
+
674
+ Parameters
675
+ ----------
676
+ id : int
677
+ Prompt ID
678
+
679
+ version_id : int
680
+ Prompt Version ID
681
+
682
+ request_options : typing.Optional[RequestOptions]
683
+ Request-specific configuration.
684
+
685
+ Returns
686
+ -------
687
+ PromptVersion
688
+
689
+
690
+ Examples
691
+ --------
692
+ from label_studio_sdk.client import AsyncLabelStudio
693
+
694
+ client = AsyncLabelStudio(
695
+ api_key="YOUR_API_KEY",
696
+ )
697
+ await client.prompts.versions.get(
698
+ id=1,
699
+ version_id=1,
700
+ )
701
+ """
702
+ _response = await self._client_wrapper.httpx_client.request(
703
+ f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}",
704
+ method="GET",
705
+ request_options=request_options,
706
+ )
707
+ try:
708
+ if 200 <= _response.status_code < 300:
709
+ return pydantic_v1.parse_obj_as(PromptVersion, _response.json()) # type: ignore
710
+ _response_json = _response.json()
711
+ except JSONDecodeError:
712
+ raise ApiError(status_code=_response.status_code, body=_response.text)
713
+ raise ApiError(status_code=_response.status_code, body=_response_json)
714
+
715
+ async def delete(
716
+ self, id: int, version_id: int, *, request_options: typing.Optional[RequestOptions] = None
717
+ ) -> None:
718
+ """
719
+ Delete a prompt version by ID.
720
+
721
+ Parameters
722
+ ----------
723
+ id : int
724
+ Prompt ID
725
+
726
+ version_id : int
727
+ Prompt Version ID
728
+
729
+ request_options : typing.Optional[RequestOptions]
730
+ Request-specific configuration.
731
+
732
+ Returns
733
+ -------
734
+ None
735
+
736
+ Examples
737
+ --------
738
+ from label_studio_sdk.client import AsyncLabelStudio
739
+
740
+ client = AsyncLabelStudio(
741
+ api_key="YOUR_API_KEY",
742
+ )
743
+ await client.prompts.versions.delete(
744
+ id=1,
745
+ version_id=1,
746
+ )
747
+ """
748
+ _response = await self._client_wrapper.httpx_client.request(
749
+ f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}",
750
+ method="DELETE",
751
+ request_options=request_options,
752
+ )
753
+ try:
754
+ if 200 <= _response.status_code < 300:
755
+ return
756
+ _response_json = _response.json()
757
+ except JSONDecodeError:
758
+ raise ApiError(status_code=_response.status_code, body=_response.text)
759
+ raise ApiError(status_code=_response.status_code, body=_response_json)
760
+
761
+ async def update(
762
+ self,
763
+ id: int,
764
+ version_id: int,
765
+ *,
766
+ title: typing.Optional[str] = OMIT,
767
+ parent_model: typing.Optional[int] = OMIT,
768
+ model_provider_connection: typing.Optional[int] = OMIT,
769
+ prompt: typing.Optional[str] = OMIT,
770
+ provider: typing.Optional[PromptVersionProvider] = OMIT,
771
+ provider_model_id: typing.Optional[str] = OMIT,
772
+ created_by: typing.Optional[PromptVersionCreatedBy] = OMIT,
773
+ created_at: typing.Optional[dt.datetime] = OMIT,
774
+ updated_at: typing.Optional[dt.datetime] = OMIT,
775
+ organization: typing.Optional[PromptVersionOrganization] = OMIT,
776
+ request_options: typing.Optional[RequestOptions] = None,
777
+ ) -> PromptVersion:
778
+ """
779
+ Update a prompt version by ID.
780
+
781
+ Parameters
782
+ ----------
783
+ id : int
784
+ Prompt ID
785
+
786
+ version_id : int
787
+ Prompt Version ID
788
+
789
+ title : typing.Optional[str]
790
+
791
+ parent_model : typing.Optional[int]
792
+
793
+ model_provider_connection : typing.Optional[int]
794
+
795
+ prompt : typing.Optional[str]
796
+
797
+ provider : typing.Optional[PromptVersionProvider]
798
+
799
+ provider_model_id : typing.Optional[str]
800
+
801
+ created_by : typing.Optional[PromptVersionCreatedBy]
802
+
803
+ created_at : typing.Optional[dt.datetime]
804
+
805
+ updated_at : typing.Optional[dt.datetime]
806
+
807
+ organization : typing.Optional[PromptVersionOrganization]
808
+
809
+ request_options : typing.Optional[RequestOptions]
810
+ Request-specific configuration.
811
+
812
+ Returns
813
+ -------
814
+ PromptVersion
815
+
816
+
817
+ Examples
818
+ --------
819
+ from label_studio_sdk.client import AsyncLabelStudio
820
+
821
+ client = AsyncLabelStudio(
822
+ api_key="YOUR_API_KEY",
823
+ )
824
+ await client.prompts.versions.update(
825
+ id=1,
826
+ version_id=1,
827
+ )
828
+ """
829
+ _response = await self._client_wrapper.httpx_client.request(
830
+ f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}",
831
+ method="PATCH",
832
+ json={
833
+ "title": title,
834
+ "parent_model": parent_model,
835
+ "model_provider_connection": model_provider_connection,
836
+ "prompt": prompt,
837
+ "provider": provider,
838
+ "provider_model_id": provider_model_id,
839
+ "created_by": created_by,
840
+ "created_at": created_at,
841
+ "updated_at": updated_at,
842
+ "organization": organization,
843
+ },
844
+ request_options=request_options,
845
+ omit=OMIT,
846
+ )
847
+ try:
848
+ if 200 <= _response.status_code < 300:
849
+ return pydantic_v1.parse_obj_as(PromptVersion, _response.json()) # type: ignore
850
+ _response_json = _response.json()
851
+ except JSONDecodeError:
852
+ raise ApiError(status_code=_response.status_code, body=_response.text)
853
+ raise ApiError(status_code=_response.status_code, body=_response_json)
854
+
855
+ async def cost_estimate(
856
+ self,
857
+ prompt_id: int,
858
+ version_id: int,
859
+ *,
860
+ project_id: int,
861
+ project_subset: int,
862
+ request_options: typing.Optional[RequestOptions] = None,
863
+ ) -> InferenceRunCostEstimate:
864
+ """
865
+ Get cost estimate for running a prompt version on a particular project/subset
866
+
867
+ Parameters
868
+ ----------
869
+ prompt_id : int
870
+ Prompt ID
871
+
872
+ version_id : int
873
+ Prompt Version ID
874
+
875
+ project_id : int
876
+ ID of the project to get an estimate for running on
877
+
878
+ project_subset : int
879
+ Subset of the project to get an estimate for running on (e.g. 'All', 'Sample', or 'HasGT')
880
+
881
+ request_options : typing.Optional[RequestOptions]
882
+ Request-specific configuration.
883
+
884
+ Returns
885
+ -------
886
+ InferenceRunCostEstimate
887
+
888
+
889
+ Examples
890
+ --------
891
+ from label_studio_sdk.client import AsyncLabelStudio
892
+
893
+ client = AsyncLabelStudio(
894
+ api_key="YOUR_API_KEY",
895
+ )
896
+ await client.prompts.versions.cost_estimate(
897
+ prompt_id=1,
898
+ version_id=1,
899
+ project_id=1,
900
+ project_subset=1,
901
+ )
902
+ """
903
+ _response = await self._client_wrapper.httpx_client.request(
904
+ f"api/prompts/{jsonable_encoder(prompt_id)}/versions/{jsonable_encoder(version_id)}/cost-estimate",
905
+ method="POST",
906
+ params={"project_id": project_id, "project_subset": project_subset},
907
+ request_options=request_options,
908
+ )
909
+ try:
910
+ if 200 <= _response.status_code < 300:
911
+ return pydantic_v1.parse_obj_as(InferenceRunCostEstimate, _response.json()) # type: ignore
912
+ _response_json = _response.json()
913
+ except JSONDecodeError:
914
+ raise ApiError(status_code=_response.status_code, body=_response.text)
915
+ raise ApiError(status_code=_response.status_code, body=_response_json)
916
+
917
+ async def get_refined_prompt(
918
+ self,
919
+ prompt_id: int,
920
+ version_id: int,
921
+ *,
922
+ refinement_job_id: str,
923
+ request_options: typing.Optional[RequestOptions] = None,
924
+ ) -> RefinedPromptResponse:
925
+ """
926
+ Get the refined prompt based on the `refinement_job_id`.
927
+
928
+ Parameters
929
+ ----------
930
+ prompt_id : int
931
+ Prompt ID
932
+
933
+ version_id : int
934
+ Prompt Version ID
935
+
936
+ refinement_job_id : str
937
+ Refinement Job ID acquired from the `POST /api/prompts/{prompt_id}/versions/{version_id}/refine` endpoint
938
+
939
+ request_options : typing.Optional[RequestOptions]
940
+ Request-specific configuration.
941
+
942
+ Returns
943
+ -------
944
+ RefinedPromptResponse
945
+
946
+
947
+ Examples
948
+ --------
949
+ from label_studio_sdk.client import AsyncLabelStudio
950
+
951
+ client = AsyncLabelStudio(
952
+ api_key="YOUR_API_KEY",
953
+ )
954
+ await client.prompts.versions.get_refined_prompt(
955
+ prompt_id=1,
956
+ version_id=1,
957
+ refinement_job_id="refinement_job_id",
958
+ )
959
+ """
960
+ _response = await self._client_wrapper.httpx_client.request(
961
+ f"api/prompts/{jsonable_encoder(prompt_id)}/versions/{jsonable_encoder(version_id)}/refine",
962
+ method="GET",
963
+ params={"refinement_job_id": refinement_job_id},
964
+ request_options=request_options,
965
+ )
966
+ try:
967
+ if 200 <= _response.status_code < 300:
968
+ return pydantic_v1.parse_obj_as(RefinedPromptResponse, _response.json()) # type: ignore
969
+ _response_json = _response.json()
970
+ except JSONDecodeError:
971
+ raise ApiError(status_code=_response.status_code, body=_response.text)
972
+ raise ApiError(status_code=_response.status_code, body=_response_json)
973
+
974
+ async def refine_prompt(
975
+ self,
976
+ prompt_id: int,
977
+ version_id: int,
978
+ *,
979
+ async_: typing.Optional[bool] = None,
980
+ teacher_model_provider_connection_id: typing.Optional[int] = OMIT,
981
+ teacher_model_name: typing.Optional[str] = OMIT,
982
+ project_id: typing.Optional[int] = OMIT,
983
+ request_options: typing.Optional[RequestOptions] = None,
984
+ ) -> RefinedPromptResponse:
985
+ """
986
+ Refine a prompt version using a teacher model and save the refined prompt as a new version.
987
+
988
+ Parameters
989
+ ----------
990
+ prompt_id : int
991
+ Prompt ID
992
+
993
+ version_id : int
994
+ Base Prompt Version ID
995
+
996
+ async_ : typing.Optional[bool]
997
+ Run the refinement job asynchronously
998
+
999
+ teacher_model_provider_connection_id : typing.Optional[int]
1000
+ Model Provider Connection ID to use to refine the prompt
1001
+
1002
+ teacher_model_name : typing.Optional[str]
1003
+ Name of the model to use to refine the prompt
1004
+
1005
+ project_id : typing.Optional[int]
1006
+ Project ID to target the refined prompt for
1007
+
1008
+ request_options : typing.Optional[RequestOptions]
1009
+ Request-specific configuration.
1010
+
1011
+ Returns
1012
+ -------
1013
+ RefinedPromptResponse
1014
+
1015
+
1016
+ Examples
1017
+ --------
1018
+ from label_studio_sdk.client import AsyncLabelStudio
1019
+
1020
+ client = AsyncLabelStudio(
1021
+ api_key="YOUR_API_KEY",
1022
+ )
1023
+ await client.prompts.versions.refine_prompt(
1024
+ prompt_id=1,
1025
+ version_id=1,
1026
+ )
1027
+ """
1028
+ _response = await self._client_wrapper.httpx_client.request(
1029
+ f"api/prompts/{jsonable_encoder(prompt_id)}/versions/{jsonable_encoder(version_id)}/refine",
1030
+ method="POST",
1031
+ params={"async": async_},
1032
+ json={
1033
+ "teacher_model_provider_connection_id": teacher_model_provider_connection_id,
1034
+ "teacher_model_name": teacher_model_name,
1035
+ "project_id": project_id,
1036
+ },
1037
+ request_options=request_options,
1038
+ omit=OMIT,
1039
+ )
1040
+ try:
1041
+ if 200 <= _response.status_code < 300:
1042
+ return pydantic_v1.parse_obj_as(RefinedPromptResponse, _response.json()) # type: ignore
1043
+ _response_json = _response.json()
1044
+ except JSONDecodeError:
1045
+ raise ApiError(status_code=_response.status_code, body=_response.text)
1046
+ raise ApiError(status_code=_response.status_code, body=_response_json)