label-studio-sdk 2.0.8__py3-none-any.whl → 2.0.9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of label-studio-sdk might be problematic. Click here for more details.

Files changed (70) hide show
  1. label_studio_sdk/__init__.py +36 -16
  2. label_studio_sdk/base_client.py +0 -4
  3. label_studio_sdk/core/client_wrapper.py +1 -1
  4. label_studio_sdk/export_storage/__init__.py +13 -2
  5. label_studio_sdk/export_storage/client.py +4 -0
  6. label_studio_sdk/export_storage/databricks/client.py +1406 -0
  7. label_studio_sdk/import_storage/__init__.py +13 -2
  8. label_studio_sdk/import_storage/client.py +4 -0
  9. label_studio_sdk/import_storage/databricks/__init__.py +2 -0
  10. label_studio_sdk/import_storage/databricks/client.py +1466 -0
  11. label_studio_sdk/import_storage/gcswif/client.py +30 -0
  12. label_studio_sdk/projects/__init__.py +0 -2
  13. label_studio_sdk/projects/client.py +186 -32
  14. label_studio_sdk/projects/client_ext.py +20 -8
  15. label_studio_sdk/projects/types/__init__.py +0 -2
  16. label_studio_sdk/projects/types/lse_project_create_request_sampling.py +2 -2
  17. label_studio_sdk/projects/types/patched_lse_project_update_request_sampling.py +2 -2
  18. label_studio_sdk/prompts/client.py +340 -1
  19. label_studio_sdk/prompts/runs/client.py +127 -0
  20. label_studio_sdk/tasks/client.py +7 -2
  21. label_studio_sdk/types/__init__.py +36 -12
  22. label_studio_sdk/types/all_roles_project_list.py +10 -10
  23. label_studio_sdk/types/all_roles_project_list_sampling.py +2 -2
  24. label_studio_sdk/types/azure_blob_import_storage.py +5 -0
  25. label_studio_sdk/types/cancel_model_run_response.py +19 -0
  26. label_studio_sdk/types/configurable_permission_option.py +2 -2
  27. label_studio_sdk/types/databricks_export_storage.py +113 -0
  28. label_studio_sdk/types/databricks_export_storage_request.py +107 -0
  29. label_studio_sdk/types/databricks_import_storage.py +123 -0
  30. label_studio_sdk/types/databricks_import_storage_request.py +117 -0
  31. label_studio_sdk/types/default165enum.py +5 -0
  32. label_studio_sdk/types/gcs_import_storage.py +5 -0
  33. label_studio_sdk/types/gcswif_import_storage.py +5 -0
  34. label_studio_sdk/types/gcswif_import_storage_request.py +5 -0
  35. label_studio_sdk/types/local_files_import_storage.py +5 -0
  36. label_studio_sdk/types/lse_project_counts.py +8 -8
  37. label_studio_sdk/types/lse_project_create_sampling.py +2 -2
  38. label_studio_sdk/types/{project.py → lse_project_response.py} +44 -31
  39. label_studio_sdk/types/lse_project_response_sampling.py +7 -0
  40. label_studio_sdk/types/{project_skip_queue.py → lse_project_response_skip_queue.py} +1 -1
  41. label_studio_sdk/types/lse_project_sampling.py +2 -2
  42. label_studio_sdk/types/lse_project_update_sampling.py +2 -2
  43. label_studio_sdk/types/lse_task.py +6 -0
  44. label_studio_sdk/types/lse_task_serializer_for_reviewers.py +6 -0
  45. label_studio_sdk/types/lse_user.py +1 -0
  46. label_studio_sdk/types/lse_user_api.py +1 -0
  47. label_studio_sdk/types/options165enum.py +5 -0
  48. label_studio_sdk/types/organization_permission.py +7 -4
  49. label_studio_sdk/types/paginated_project_member.py +1 -0
  50. label_studio_sdk/types/paginated_project_subset_tasks_response_list.py +23 -0
  51. label_studio_sdk/types/project_subset_item.py +21 -0
  52. label_studio_sdk/types/project_subset_task_item.py +24 -0
  53. label_studio_sdk/types/project_subset_tasks_response.py +27 -0
  54. label_studio_sdk/types/review_settings.py +14 -0
  55. label_studio_sdk/types/review_settings_request.py +14 -0
  56. label_studio_sdk/types/review_settings_request_sampling.py +8 -0
  57. label_studio_sdk/types/review_settings_sampling.py +8 -0
  58. label_studio_sdk/types/review_settings_sampling_enum.py +5 -0
  59. label_studio_sdk/types/{sampling_enum.py → sampling_de5enum.py} +1 -1
  60. label_studio_sdk/types/who_am_i_user.py +1 -0
  61. {label_studio_sdk-2.0.8.dist-info → label_studio_sdk-2.0.9.dist-info}/METADATA +41 -90
  62. {label_studio_sdk-2.0.8.dist-info → label_studio_sdk-2.0.9.dist-info}/RECORD +65 -52
  63. label_studio_sdk/blueprints/client.py +0 -272
  64. label_studio_sdk/projects/types/projects_list_request_filter.py +0 -5
  65. label_studio_sdk/types/blueprint.py +0 -41
  66. label_studio_sdk/types/configurable_permission_option_default.py +0 -7
  67. label_studio_sdk/types/project_sampling.py +0 -7
  68. /label_studio_sdk/{blueprints → export_storage/databricks}/__init__.py +0 -0
  69. {label_studio_sdk-2.0.8.dist-info → label_studio_sdk-2.0.9.dist-info}/LICENSE +0 -0
  70. {label_studio_sdk-2.0.8.dist-info → label_studio_sdk-2.0.9.dist-info}/WHEEL +0 -0
@@ -0,0 +1,1406 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import typing
4
+ from ...core.client_wrapper import SyncClientWrapper
5
+ from ...core.request_options import RequestOptions
6
+ from ...types.databricks_export_storage import DatabricksExportStorage
7
+ from ...core.unchecked_base_model import construct_type
8
+ from json.decoder import JSONDecodeError
9
+ from ...core.api_error import ApiError
10
+ import datetime as dt
11
+ from ...types.status_c5a_enum import StatusC5AEnum
12
+ from ...core.jsonable_encoder import jsonable_encoder
13
+ from ...core.client_wrapper import AsyncClientWrapper
14
+
15
+ # this is used as the default value for optional parameters
16
+ OMIT = typing.cast(typing.Any, ...)
17
+
18
+
19
+ class DatabricksClient:
20
+ def __init__(self, *, client_wrapper: SyncClientWrapper):
21
+ self._client_wrapper = client_wrapper
22
+
23
+ def list(
24
+ self,
25
+ *,
26
+ ordering: typing.Optional[str] = None,
27
+ project: typing.Optional[int] = None,
28
+ request_options: typing.Optional[RequestOptions] = None,
29
+ ) -> typing.List[DatabricksExportStorage]:
30
+ """
31
+ Get a list of all Databricks Files export storage connections.
32
+
33
+ Parameters
34
+ ----------
35
+ ordering : typing.Optional[str]
36
+ Which field to use when ordering the results.
37
+
38
+ project : typing.Optional[int]
39
+ Project ID
40
+
41
+ request_options : typing.Optional[RequestOptions]
42
+ Request-specific configuration.
43
+
44
+ Returns
45
+ -------
46
+ typing.List[DatabricksExportStorage]
47
+
48
+
49
+ Examples
50
+ --------
51
+ from label_studio_sdk import LabelStudio
52
+
53
+ client = LabelStudio(
54
+ api_key="YOUR_API_KEY",
55
+ )
56
+ client.export_storage.databricks.list()
57
+ """
58
+ _response = self._client_wrapper.httpx_client.request(
59
+ "api/storages/export/databricks",
60
+ method="GET",
61
+ params={
62
+ "ordering": ordering,
63
+ "project": project,
64
+ },
65
+ request_options=request_options,
66
+ )
67
+ try:
68
+ if 200 <= _response.status_code < 300:
69
+ return typing.cast(
70
+ typing.List[DatabricksExportStorage],
71
+ construct_type(
72
+ type_=typing.List[DatabricksExportStorage], # type: ignore
73
+ object_=_response.json(),
74
+ ),
75
+ )
76
+ _response_json = _response.json()
77
+ except JSONDecodeError:
78
+ raise ApiError(status_code=_response.status_code, body=_response.text)
79
+ raise ApiError(status_code=_response.status_code, body=_response_json)
80
+
81
+ def create(
82
+ self,
83
+ *,
84
+ catalog: str,
85
+ host: str,
86
+ project: int,
87
+ schema: str,
88
+ volume: str,
89
+ can_delete_objects: typing.Optional[bool] = OMIT,
90
+ description: typing.Optional[str] = OMIT,
91
+ last_sync: typing.Optional[dt.datetime] = OMIT,
92
+ last_sync_count: typing.Optional[int] = OMIT,
93
+ last_sync_job: typing.Optional[str] = OMIT,
94
+ meta: typing.Optional[typing.Optional[typing.Any]] = OMIT,
95
+ prefix: typing.Optional[str] = OMIT,
96
+ regex_filter: typing.Optional[str] = OMIT,
97
+ request_timeout_s: typing.Optional[int] = OMIT,
98
+ status: typing.Optional[StatusC5AEnum] = OMIT,
99
+ stream_chunk_bytes: typing.Optional[int] = OMIT,
100
+ synchronizable: typing.Optional[bool] = OMIT,
101
+ title: typing.Optional[str] = OMIT,
102
+ token: typing.Optional[str] = OMIT,
103
+ traceback: typing.Optional[str] = OMIT,
104
+ use_blob_urls: typing.Optional[bool] = OMIT,
105
+ verify_tls: typing.Optional[bool] = OMIT,
106
+ request_options: typing.Optional[RequestOptions] = None,
107
+ ) -> DatabricksExportStorage:
108
+ """
109
+ Create a Databricks Files export storage connection.
110
+
111
+ Parameters
112
+ ----------
113
+ catalog : str
114
+ UC catalog name
115
+
116
+ host : str
117
+ Databricks workspace base URL (https://...)
118
+
119
+ project : int
120
+ A unique integer value identifying this project.
121
+
122
+ schema : str
123
+ UC schema name
124
+
125
+ volume : str
126
+ UC volume name
127
+
128
+ can_delete_objects : typing.Optional[bool]
129
+ Deletion from storage enabled
130
+
131
+ description : typing.Optional[str]
132
+ Cloud storage description
133
+
134
+ last_sync : typing.Optional[dt.datetime]
135
+ Last sync finished time
136
+
137
+ last_sync_count : typing.Optional[int]
138
+ Count of tasks synced last time
139
+
140
+ last_sync_job : typing.Optional[str]
141
+ Last sync job ID
142
+
143
+ meta : typing.Optional[typing.Optional[typing.Any]]
144
+
145
+ prefix : typing.Optional[str]
146
+ Export path prefix under the volume
147
+
148
+ regex_filter : typing.Optional[str]
149
+ Regex for filtering objects
150
+
151
+ request_timeout_s : typing.Optional[int]
152
+
153
+ status : typing.Optional[StatusC5AEnum]
154
+
155
+ stream_chunk_bytes : typing.Optional[int]
156
+
157
+ synchronizable : typing.Optional[bool]
158
+
159
+ title : typing.Optional[str]
160
+ Cloud storage title
161
+
162
+ token : typing.Optional[str]
163
+
164
+ traceback : typing.Optional[str]
165
+ Traceback report for the last failed sync
166
+
167
+ use_blob_urls : typing.Optional[bool]
168
+ Generate blob URLs in tasks
169
+
170
+ verify_tls : typing.Optional[bool]
171
+ Verify TLS certificates
172
+
173
+ request_options : typing.Optional[RequestOptions]
174
+ Request-specific configuration.
175
+
176
+ Returns
177
+ -------
178
+ DatabricksExportStorage
179
+
180
+
181
+ Examples
182
+ --------
183
+ from label_studio_sdk import LabelStudio
184
+
185
+ client = LabelStudio(
186
+ api_key="YOUR_API_KEY",
187
+ )
188
+ client.export_storage.databricks.create(
189
+ catalog="catalog",
190
+ host="host",
191
+ project=1,
192
+ schema="schema",
193
+ volume="volume",
194
+ )
195
+ """
196
+ _response = self._client_wrapper.httpx_client.request(
197
+ "api/storages/export/databricks",
198
+ method="POST",
199
+ json={
200
+ "can_delete_objects": can_delete_objects,
201
+ "catalog": catalog,
202
+ "description": description,
203
+ "host": host,
204
+ "last_sync": last_sync,
205
+ "last_sync_count": last_sync_count,
206
+ "last_sync_job": last_sync_job,
207
+ "meta": meta,
208
+ "prefix": prefix,
209
+ "project": project,
210
+ "regex_filter": regex_filter,
211
+ "request_timeout_s": request_timeout_s,
212
+ "schema": schema,
213
+ "status": status,
214
+ "stream_chunk_bytes": stream_chunk_bytes,
215
+ "synchronizable": synchronizable,
216
+ "title": title,
217
+ "token": token,
218
+ "traceback": traceback,
219
+ "use_blob_urls": use_blob_urls,
220
+ "verify_tls": verify_tls,
221
+ "volume": volume,
222
+ },
223
+ request_options=request_options,
224
+ omit=OMIT,
225
+ )
226
+ try:
227
+ if 200 <= _response.status_code < 300:
228
+ return typing.cast(
229
+ DatabricksExportStorage,
230
+ construct_type(
231
+ type_=DatabricksExportStorage, # type: ignore
232
+ object_=_response.json(),
233
+ ),
234
+ )
235
+ _response_json = _response.json()
236
+ except JSONDecodeError:
237
+ raise ApiError(status_code=_response.status_code, body=_response.text)
238
+ raise ApiError(status_code=_response.status_code, body=_response_json)
239
+
240
+ def validate(
241
+ self,
242
+ *,
243
+ catalog: str,
244
+ host: str,
245
+ project: int,
246
+ schema: str,
247
+ volume: str,
248
+ can_delete_objects: typing.Optional[bool] = OMIT,
249
+ description: typing.Optional[str] = OMIT,
250
+ last_sync: typing.Optional[dt.datetime] = OMIT,
251
+ last_sync_count: typing.Optional[int] = OMIT,
252
+ last_sync_job: typing.Optional[str] = OMIT,
253
+ meta: typing.Optional[typing.Optional[typing.Any]] = OMIT,
254
+ prefix: typing.Optional[str] = OMIT,
255
+ regex_filter: typing.Optional[str] = OMIT,
256
+ request_timeout_s: typing.Optional[int] = OMIT,
257
+ status: typing.Optional[StatusC5AEnum] = OMIT,
258
+ stream_chunk_bytes: typing.Optional[int] = OMIT,
259
+ synchronizable: typing.Optional[bool] = OMIT,
260
+ title: typing.Optional[str] = OMIT,
261
+ token: typing.Optional[str] = OMIT,
262
+ traceback: typing.Optional[str] = OMIT,
263
+ use_blob_urls: typing.Optional[bool] = OMIT,
264
+ verify_tls: typing.Optional[bool] = OMIT,
265
+ request_options: typing.Optional[RequestOptions] = None,
266
+ ) -> None:
267
+ """
268
+ Validate a specific Databricks Files export storage connection.
269
+
270
+ Parameters
271
+ ----------
272
+ catalog : str
273
+ UC catalog name
274
+
275
+ host : str
276
+ Databricks workspace base URL (https://...)
277
+
278
+ project : int
279
+ A unique integer value identifying this project.
280
+
281
+ schema : str
282
+ UC schema name
283
+
284
+ volume : str
285
+ UC volume name
286
+
287
+ can_delete_objects : typing.Optional[bool]
288
+ Deletion from storage enabled
289
+
290
+ description : typing.Optional[str]
291
+ Cloud storage description
292
+
293
+ last_sync : typing.Optional[dt.datetime]
294
+ Last sync finished time
295
+
296
+ last_sync_count : typing.Optional[int]
297
+ Count of tasks synced last time
298
+
299
+ last_sync_job : typing.Optional[str]
300
+ Last sync job ID
301
+
302
+ meta : typing.Optional[typing.Optional[typing.Any]]
303
+
304
+ prefix : typing.Optional[str]
305
+ Export path prefix under the volume
306
+
307
+ regex_filter : typing.Optional[str]
308
+ Regex for filtering objects
309
+
310
+ request_timeout_s : typing.Optional[int]
311
+
312
+ status : typing.Optional[StatusC5AEnum]
313
+
314
+ stream_chunk_bytes : typing.Optional[int]
315
+
316
+ synchronizable : typing.Optional[bool]
317
+
318
+ title : typing.Optional[str]
319
+ Cloud storage title
320
+
321
+ token : typing.Optional[str]
322
+
323
+ traceback : typing.Optional[str]
324
+ Traceback report for the last failed sync
325
+
326
+ use_blob_urls : typing.Optional[bool]
327
+ Generate blob URLs in tasks
328
+
329
+ verify_tls : typing.Optional[bool]
330
+ Verify TLS certificates
331
+
332
+ request_options : typing.Optional[RequestOptions]
333
+ Request-specific configuration.
334
+
335
+ Returns
336
+ -------
337
+ None
338
+
339
+ Examples
340
+ --------
341
+ from label_studio_sdk import LabelStudio
342
+
343
+ client = LabelStudio(
344
+ api_key="YOUR_API_KEY",
345
+ )
346
+ client.export_storage.databricks.validate(
347
+ catalog="catalog",
348
+ host="host",
349
+ project=1,
350
+ schema="schema",
351
+ volume="volume",
352
+ )
353
+ """
354
+ _response = self._client_wrapper.httpx_client.request(
355
+ "api/storages/export/databricks/validate",
356
+ method="POST",
357
+ json={
358
+ "can_delete_objects": can_delete_objects,
359
+ "catalog": catalog,
360
+ "description": description,
361
+ "host": host,
362
+ "last_sync": last_sync,
363
+ "last_sync_count": last_sync_count,
364
+ "last_sync_job": last_sync_job,
365
+ "meta": meta,
366
+ "prefix": prefix,
367
+ "project": project,
368
+ "regex_filter": regex_filter,
369
+ "request_timeout_s": request_timeout_s,
370
+ "schema": schema,
371
+ "status": status,
372
+ "stream_chunk_bytes": stream_chunk_bytes,
373
+ "synchronizable": synchronizable,
374
+ "title": title,
375
+ "token": token,
376
+ "traceback": traceback,
377
+ "use_blob_urls": use_blob_urls,
378
+ "verify_tls": verify_tls,
379
+ "volume": volume,
380
+ },
381
+ request_options=request_options,
382
+ omit=OMIT,
383
+ )
384
+ try:
385
+ if 200 <= _response.status_code < 300:
386
+ return
387
+ _response_json = _response.json()
388
+ except JSONDecodeError:
389
+ raise ApiError(status_code=_response.status_code, body=_response.text)
390
+ raise ApiError(status_code=_response.status_code, body=_response_json)
391
+
392
+ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> DatabricksExportStorage:
393
+ """
394
+ Get a specific Databricks Files export storage connection.
395
+
396
+ Parameters
397
+ ----------
398
+ id : int
399
+
400
+ request_options : typing.Optional[RequestOptions]
401
+ Request-specific configuration.
402
+
403
+ Returns
404
+ -------
405
+ DatabricksExportStorage
406
+
407
+
408
+ Examples
409
+ --------
410
+ from label_studio_sdk import LabelStudio
411
+
412
+ client = LabelStudio(
413
+ api_key="YOUR_API_KEY",
414
+ )
415
+ client.export_storage.databricks.get(
416
+ id=1,
417
+ )
418
+ """
419
+ _response = self._client_wrapper.httpx_client.request(
420
+ f"api/storages/export/databricks/{jsonable_encoder(id)}",
421
+ method="GET",
422
+ request_options=request_options,
423
+ )
424
+ try:
425
+ if 200 <= _response.status_code < 300:
426
+ return typing.cast(
427
+ DatabricksExportStorage,
428
+ construct_type(
429
+ type_=DatabricksExportStorage, # type: ignore
430
+ object_=_response.json(),
431
+ ),
432
+ )
433
+ _response_json = _response.json()
434
+ except JSONDecodeError:
435
+ raise ApiError(status_code=_response.status_code, body=_response.text)
436
+ raise ApiError(status_code=_response.status_code, body=_response_json)
437
+
438
+ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None:
439
+ """
440
+ Delete a specific Databricks Files export storage connection.
441
+
442
+ Parameters
443
+ ----------
444
+ id : int
445
+
446
+ request_options : typing.Optional[RequestOptions]
447
+ Request-specific configuration.
448
+
449
+ Returns
450
+ -------
451
+ None
452
+
453
+ Examples
454
+ --------
455
+ from label_studio_sdk import LabelStudio
456
+
457
+ client = LabelStudio(
458
+ api_key="YOUR_API_KEY",
459
+ )
460
+ client.export_storage.databricks.delete(
461
+ id=1,
462
+ )
463
+ """
464
+ _response = self._client_wrapper.httpx_client.request(
465
+ f"api/storages/export/databricks/{jsonable_encoder(id)}",
466
+ method="DELETE",
467
+ request_options=request_options,
468
+ )
469
+ try:
470
+ if 200 <= _response.status_code < 300:
471
+ return
472
+ _response_json = _response.json()
473
+ except JSONDecodeError:
474
+ raise ApiError(status_code=_response.status_code, body=_response.text)
475
+ raise ApiError(status_code=_response.status_code, body=_response_json)
476
+
477
+ def update(
478
+ self,
479
+ id: int,
480
+ *,
481
+ can_delete_objects: typing.Optional[bool] = OMIT,
482
+ catalog: typing.Optional[str] = OMIT,
483
+ description: typing.Optional[str] = OMIT,
484
+ host: typing.Optional[str] = OMIT,
485
+ last_sync: typing.Optional[dt.datetime] = OMIT,
486
+ last_sync_count: typing.Optional[int] = OMIT,
487
+ last_sync_job: typing.Optional[str] = OMIT,
488
+ meta: typing.Optional[typing.Optional[typing.Any]] = OMIT,
489
+ prefix: typing.Optional[str] = OMIT,
490
+ project: typing.Optional[int] = OMIT,
491
+ regex_filter: typing.Optional[str] = OMIT,
492
+ request_timeout_s: typing.Optional[int] = OMIT,
493
+ schema: typing.Optional[str] = OMIT,
494
+ status: typing.Optional[StatusC5AEnum] = OMIT,
495
+ stream_chunk_bytes: typing.Optional[int] = OMIT,
496
+ synchronizable: typing.Optional[bool] = OMIT,
497
+ title: typing.Optional[str] = OMIT,
498
+ token: typing.Optional[str] = OMIT,
499
+ traceback: typing.Optional[str] = OMIT,
500
+ use_blob_urls: typing.Optional[bool] = OMIT,
501
+ verify_tls: typing.Optional[bool] = OMIT,
502
+ volume: typing.Optional[str] = OMIT,
503
+ request_options: typing.Optional[RequestOptions] = None,
504
+ ) -> DatabricksExportStorage:
505
+ """
506
+ Update a specific Databricks Files export storage connection.
507
+
508
+ Parameters
509
+ ----------
510
+ id : int
511
+
512
+ can_delete_objects : typing.Optional[bool]
513
+ Deletion from storage enabled
514
+
515
+ catalog : typing.Optional[str]
516
+ UC catalog name
517
+
518
+ description : typing.Optional[str]
519
+ Cloud storage description
520
+
521
+ host : typing.Optional[str]
522
+ Databricks workspace base URL (https://...)
523
+
524
+ last_sync : typing.Optional[dt.datetime]
525
+ Last sync finished time
526
+
527
+ last_sync_count : typing.Optional[int]
528
+ Count of tasks synced last time
529
+
530
+ last_sync_job : typing.Optional[str]
531
+ Last sync job ID
532
+
533
+ meta : typing.Optional[typing.Optional[typing.Any]]
534
+
535
+ prefix : typing.Optional[str]
536
+ Export path prefix under the volume
537
+
538
+ project : typing.Optional[int]
539
+ A unique integer value identifying this project.
540
+
541
+ regex_filter : typing.Optional[str]
542
+ Regex for filtering objects
543
+
544
+ request_timeout_s : typing.Optional[int]
545
+
546
+ schema : typing.Optional[str]
547
+ UC schema name
548
+
549
+ status : typing.Optional[StatusC5AEnum]
550
+
551
+ stream_chunk_bytes : typing.Optional[int]
552
+
553
+ synchronizable : typing.Optional[bool]
554
+
555
+ title : typing.Optional[str]
556
+ Cloud storage title
557
+
558
+ token : typing.Optional[str]
559
+
560
+ traceback : typing.Optional[str]
561
+ Traceback report for the last failed sync
562
+
563
+ use_blob_urls : typing.Optional[bool]
564
+ Generate blob URLs in tasks
565
+
566
+ verify_tls : typing.Optional[bool]
567
+ Verify TLS certificates
568
+
569
+ volume : typing.Optional[str]
570
+ UC volume name
571
+
572
+ request_options : typing.Optional[RequestOptions]
573
+ Request-specific configuration.
574
+
575
+ Returns
576
+ -------
577
+ DatabricksExportStorage
578
+
579
+
580
+ Examples
581
+ --------
582
+ from label_studio_sdk import LabelStudio
583
+
584
+ client = LabelStudio(
585
+ api_key="YOUR_API_KEY",
586
+ )
587
+ client.export_storage.databricks.update(
588
+ id=1,
589
+ )
590
+ """
591
+ _response = self._client_wrapper.httpx_client.request(
592
+ f"api/storages/export/databricks/{jsonable_encoder(id)}",
593
+ method="PATCH",
594
+ json={
595
+ "can_delete_objects": can_delete_objects,
596
+ "catalog": catalog,
597
+ "description": description,
598
+ "host": host,
599
+ "last_sync": last_sync,
600
+ "last_sync_count": last_sync_count,
601
+ "last_sync_job": last_sync_job,
602
+ "meta": meta,
603
+ "prefix": prefix,
604
+ "project": project,
605
+ "regex_filter": regex_filter,
606
+ "request_timeout_s": request_timeout_s,
607
+ "schema": schema,
608
+ "status": status,
609
+ "stream_chunk_bytes": stream_chunk_bytes,
610
+ "synchronizable": synchronizable,
611
+ "title": title,
612
+ "token": token,
613
+ "traceback": traceback,
614
+ "use_blob_urls": use_blob_urls,
615
+ "verify_tls": verify_tls,
616
+ "volume": volume,
617
+ },
618
+ headers={
619
+ "content-type": "application/json",
620
+ },
621
+ request_options=request_options,
622
+ omit=OMIT,
623
+ )
624
+ try:
625
+ if 200 <= _response.status_code < 300:
626
+ return typing.cast(
627
+ DatabricksExportStorage,
628
+ construct_type(
629
+ type_=DatabricksExportStorage, # type: ignore
630
+ object_=_response.json(),
631
+ ),
632
+ )
633
+ _response_json = _response.json()
634
+ except JSONDecodeError:
635
+ raise ApiError(status_code=_response.status_code, body=_response.text)
636
+ raise ApiError(status_code=_response.status_code, body=_response_json)
637
+
638
+ def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> DatabricksExportStorage:
639
+ """
640
+ Export annotations to a Databricks Files storage.
641
+
642
+ Parameters
643
+ ----------
644
+ id : int
645
+
646
+ request_options : typing.Optional[RequestOptions]
647
+ Request-specific configuration.
648
+
649
+ Returns
650
+ -------
651
+ DatabricksExportStorage
652
+
653
+
654
+ Examples
655
+ --------
656
+ from label_studio_sdk import LabelStudio
657
+
658
+ client = LabelStudio(
659
+ api_key="YOUR_API_KEY",
660
+ )
661
+ client.export_storage.databricks.sync(
662
+ id=1,
663
+ )
664
+ """
665
+ _response = self._client_wrapper.httpx_client.request(
666
+ f"api/storages/export/databricks/{jsonable_encoder(id)}/sync",
667
+ method="POST",
668
+ request_options=request_options,
669
+ )
670
+ try:
671
+ if 200 <= _response.status_code < 300:
672
+ return typing.cast(
673
+ DatabricksExportStorage,
674
+ construct_type(
675
+ type_=DatabricksExportStorage, # type: ignore
676
+ object_=_response.json(),
677
+ ),
678
+ )
679
+ _response_json = _response.json()
680
+ except JSONDecodeError:
681
+ raise ApiError(status_code=_response.status_code, body=_response.text)
682
+ raise ApiError(status_code=_response.status_code, body=_response_json)
683
+
684
+
685
+ class AsyncDatabricksClient:
686
+ def __init__(self, *, client_wrapper: AsyncClientWrapper):
687
+ self._client_wrapper = client_wrapper
688
+
689
+ async def list(
690
+ self,
691
+ *,
692
+ ordering: typing.Optional[str] = None,
693
+ project: typing.Optional[int] = None,
694
+ request_options: typing.Optional[RequestOptions] = None,
695
+ ) -> typing.List[DatabricksExportStorage]:
696
+ """
697
+ Get a list of all Databricks Files export storage connections.
698
+
699
+ Parameters
700
+ ----------
701
+ ordering : typing.Optional[str]
702
+ Which field to use when ordering the results.
703
+
704
+ project : typing.Optional[int]
705
+ Project ID
706
+
707
+ request_options : typing.Optional[RequestOptions]
708
+ Request-specific configuration.
709
+
710
+ Returns
711
+ -------
712
+ typing.List[DatabricksExportStorage]
713
+
714
+
715
+ Examples
716
+ --------
717
+ import asyncio
718
+
719
+ from label_studio_sdk import AsyncLabelStudio
720
+
721
+ client = AsyncLabelStudio(
722
+ api_key="YOUR_API_KEY",
723
+ )
724
+
725
+
726
+ async def main() -> None:
727
+ await client.export_storage.databricks.list()
728
+
729
+
730
+ asyncio.run(main())
731
+ """
732
+ _response = await self._client_wrapper.httpx_client.request(
733
+ "api/storages/export/databricks",
734
+ method="GET",
735
+ params={
736
+ "ordering": ordering,
737
+ "project": project,
738
+ },
739
+ request_options=request_options,
740
+ )
741
+ try:
742
+ if 200 <= _response.status_code < 300:
743
+ return typing.cast(
744
+ typing.List[DatabricksExportStorage],
745
+ construct_type(
746
+ type_=typing.List[DatabricksExportStorage], # type: ignore
747
+ object_=_response.json(),
748
+ ),
749
+ )
750
+ _response_json = _response.json()
751
+ except JSONDecodeError:
752
+ raise ApiError(status_code=_response.status_code, body=_response.text)
753
+ raise ApiError(status_code=_response.status_code, body=_response_json)
754
+
755
+ async def create(
756
+ self,
757
+ *,
758
+ catalog: str,
759
+ host: str,
760
+ project: int,
761
+ schema: str,
762
+ volume: str,
763
+ can_delete_objects: typing.Optional[bool] = OMIT,
764
+ description: typing.Optional[str] = OMIT,
765
+ last_sync: typing.Optional[dt.datetime] = OMIT,
766
+ last_sync_count: typing.Optional[int] = OMIT,
767
+ last_sync_job: typing.Optional[str] = OMIT,
768
+ meta: typing.Optional[typing.Optional[typing.Any]] = OMIT,
769
+ prefix: typing.Optional[str] = OMIT,
770
+ regex_filter: typing.Optional[str] = OMIT,
771
+ request_timeout_s: typing.Optional[int] = OMIT,
772
+ status: typing.Optional[StatusC5AEnum] = OMIT,
773
+ stream_chunk_bytes: typing.Optional[int] = OMIT,
774
+ synchronizable: typing.Optional[bool] = OMIT,
775
+ title: typing.Optional[str] = OMIT,
776
+ token: typing.Optional[str] = OMIT,
777
+ traceback: typing.Optional[str] = OMIT,
778
+ use_blob_urls: typing.Optional[bool] = OMIT,
779
+ verify_tls: typing.Optional[bool] = OMIT,
780
+ request_options: typing.Optional[RequestOptions] = None,
781
+ ) -> DatabricksExportStorage:
782
+ """
783
+ Create a Databricks Files export storage connection.
784
+
785
+ Parameters
786
+ ----------
787
+ catalog : str
788
+ UC catalog name
789
+
790
+ host : str
791
+ Databricks workspace base URL (https://...)
792
+
793
+ project : int
794
+ A unique integer value identifying this project.
795
+
796
+ schema : str
797
+ UC schema name
798
+
799
+ volume : str
800
+ UC volume name
801
+
802
+ can_delete_objects : typing.Optional[bool]
803
+ Deletion from storage enabled
804
+
805
+ description : typing.Optional[str]
806
+ Cloud storage description
807
+
808
+ last_sync : typing.Optional[dt.datetime]
809
+ Last sync finished time
810
+
811
+ last_sync_count : typing.Optional[int]
812
+ Count of tasks synced last time
813
+
814
+ last_sync_job : typing.Optional[str]
815
+ Last sync job ID
816
+
817
+ meta : typing.Optional[typing.Optional[typing.Any]]
818
+
819
+ prefix : typing.Optional[str]
820
+ Export path prefix under the volume
821
+
822
+ regex_filter : typing.Optional[str]
823
+ Regex for filtering objects
824
+
825
+ request_timeout_s : typing.Optional[int]
826
+
827
+ status : typing.Optional[StatusC5AEnum]
828
+
829
+ stream_chunk_bytes : typing.Optional[int]
830
+
831
+ synchronizable : typing.Optional[bool]
832
+
833
+ title : typing.Optional[str]
834
+ Cloud storage title
835
+
836
+ token : typing.Optional[str]
837
+
838
+ traceback : typing.Optional[str]
839
+ Traceback report for the last failed sync
840
+
841
+ use_blob_urls : typing.Optional[bool]
842
+ Generate blob URLs in tasks
843
+
844
+ verify_tls : typing.Optional[bool]
845
+ Verify TLS certificates
846
+
847
+ request_options : typing.Optional[RequestOptions]
848
+ Request-specific configuration.
849
+
850
+ Returns
851
+ -------
852
+ DatabricksExportStorage
853
+
854
+
855
+ Examples
856
+ --------
857
+ import asyncio
858
+
859
+ from label_studio_sdk import AsyncLabelStudio
860
+
861
+ client = AsyncLabelStudio(
862
+ api_key="YOUR_API_KEY",
863
+ )
864
+
865
+
866
+ async def main() -> None:
867
+ await client.export_storage.databricks.create(
868
+ catalog="catalog",
869
+ host="host",
870
+ project=1,
871
+ schema="schema",
872
+ volume="volume",
873
+ )
874
+
875
+
876
+ asyncio.run(main())
877
+ """
878
+ _response = await self._client_wrapper.httpx_client.request(
879
+ "api/storages/export/databricks",
880
+ method="POST",
881
+ json={
882
+ "can_delete_objects": can_delete_objects,
883
+ "catalog": catalog,
884
+ "description": description,
885
+ "host": host,
886
+ "last_sync": last_sync,
887
+ "last_sync_count": last_sync_count,
888
+ "last_sync_job": last_sync_job,
889
+ "meta": meta,
890
+ "prefix": prefix,
891
+ "project": project,
892
+ "regex_filter": regex_filter,
893
+ "request_timeout_s": request_timeout_s,
894
+ "schema": schema,
895
+ "status": status,
896
+ "stream_chunk_bytes": stream_chunk_bytes,
897
+ "synchronizable": synchronizable,
898
+ "title": title,
899
+ "token": token,
900
+ "traceback": traceback,
901
+ "use_blob_urls": use_blob_urls,
902
+ "verify_tls": verify_tls,
903
+ "volume": volume,
904
+ },
905
+ request_options=request_options,
906
+ omit=OMIT,
907
+ )
908
+ try:
909
+ if 200 <= _response.status_code < 300:
910
+ return typing.cast(
911
+ DatabricksExportStorage,
912
+ construct_type(
913
+ type_=DatabricksExportStorage, # type: ignore
914
+ object_=_response.json(),
915
+ ),
916
+ )
917
+ _response_json = _response.json()
918
+ except JSONDecodeError:
919
+ raise ApiError(status_code=_response.status_code, body=_response.text)
920
+ raise ApiError(status_code=_response.status_code, body=_response_json)
921
+
922
+ async def validate(
923
+ self,
924
+ *,
925
+ catalog: str,
926
+ host: str,
927
+ project: int,
928
+ schema: str,
929
+ volume: str,
930
+ can_delete_objects: typing.Optional[bool] = OMIT,
931
+ description: typing.Optional[str] = OMIT,
932
+ last_sync: typing.Optional[dt.datetime] = OMIT,
933
+ last_sync_count: typing.Optional[int] = OMIT,
934
+ last_sync_job: typing.Optional[str] = OMIT,
935
+ meta: typing.Optional[typing.Optional[typing.Any]] = OMIT,
936
+ prefix: typing.Optional[str] = OMIT,
937
+ regex_filter: typing.Optional[str] = OMIT,
938
+ request_timeout_s: typing.Optional[int] = OMIT,
939
+ status: typing.Optional[StatusC5AEnum] = OMIT,
940
+ stream_chunk_bytes: typing.Optional[int] = OMIT,
941
+ synchronizable: typing.Optional[bool] = OMIT,
942
+ title: typing.Optional[str] = OMIT,
943
+ token: typing.Optional[str] = OMIT,
944
+ traceback: typing.Optional[str] = OMIT,
945
+ use_blob_urls: typing.Optional[bool] = OMIT,
946
+ verify_tls: typing.Optional[bool] = OMIT,
947
+ request_options: typing.Optional[RequestOptions] = None,
948
+ ) -> None:
949
+ """
950
+ Validate a specific Databricks Files export storage connection.
951
+
952
+ Parameters
953
+ ----------
954
+ catalog : str
955
+ UC catalog name
956
+
957
+ host : str
958
+ Databricks workspace base URL (https://...)
959
+
960
+ project : int
961
+ A unique integer value identifying this project.
962
+
963
+ schema : str
964
+ UC schema name
965
+
966
+ volume : str
967
+ UC volume name
968
+
969
+ can_delete_objects : typing.Optional[bool]
970
+ Deletion from storage enabled
971
+
972
+ description : typing.Optional[str]
973
+ Cloud storage description
974
+
975
+ last_sync : typing.Optional[dt.datetime]
976
+ Last sync finished time
977
+
978
+ last_sync_count : typing.Optional[int]
979
+ Count of tasks synced last time
980
+
981
+ last_sync_job : typing.Optional[str]
982
+ Last sync job ID
983
+
984
+ meta : typing.Optional[typing.Optional[typing.Any]]
985
+
986
+ prefix : typing.Optional[str]
987
+ Export path prefix under the volume
988
+
989
+ regex_filter : typing.Optional[str]
990
+ Regex for filtering objects
991
+
992
+ request_timeout_s : typing.Optional[int]
993
+
994
+ status : typing.Optional[StatusC5AEnum]
995
+
996
+ stream_chunk_bytes : typing.Optional[int]
997
+
998
+ synchronizable : typing.Optional[bool]
999
+
1000
+ title : typing.Optional[str]
1001
+ Cloud storage title
1002
+
1003
+ token : typing.Optional[str]
1004
+
1005
+ traceback : typing.Optional[str]
1006
+ Traceback report for the last failed sync
1007
+
1008
+ use_blob_urls : typing.Optional[bool]
1009
+ Generate blob URLs in tasks
1010
+
1011
+ verify_tls : typing.Optional[bool]
1012
+ Verify TLS certificates
1013
+
1014
+ request_options : typing.Optional[RequestOptions]
1015
+ Request-specific configuration.
1016
+
1017
+ Returns
1018
+ -------
1019
+ None
1020
+
1021
+ Examples
1022
+ --------
1023
+ import asyncio
1024
+
1025
+ from label_studio_sdk import AsyncLabelStudio
1026
+
1027
+ client = AsyncLabelStudio(
1028
+ api_key="YOUR_API_KEY",
1029
+ )
1030
+
1031
+
1032
+ async def main() -> None:
1033
+ await client.export_storage.databricks.validate(
1034
+ catalog="catalog",
1035
+ host="host",
1036
+ project=1,
1037
+ schema="schema",
1038
+ volume="volume",
1039
+ )
1040
+
1041
+
1042
+ asyncio.run(main())
1043
+ """
1044
+ _response = await self._client_wrapper.httpx_client.request(
1045
+ "api/storages/export/databricks/validate",
1046
+ method="POST",
1047
+ json={
1048
+ "can_delete_objects": can_delete_objects,
1049
+ "catalog": catalog,
1050
+ "description": description,
1051
+ "host": host,
1052
+ "last_sync": last_sync,
1053
+ "last_sync_count": last_sync_count,
1054
+ "last_sync_job": last_sync_job,
1055
+ "meta": meta,
1056
+ "prefix": prefix,
1057
+ "project": project,
1058
+ "regex_filter": regex_filter,
1059
+ "request_timeout_s": request_timeout_s,
1060
+ "schema": schema,
1061
+ "status": status,
1062
+ "stream_chunk_bytes": stream_chunk_bytes,
1063
+ "synchronizable": synchronizable,
1064
+ "title": title,
1065
+ "token": token,
1066
+ "traceback": traceback,
1067
+ "use_blob_urls": use_blob_urls,
1068
+ "verify_tls": verify_tls,
1069
+ "volume": volume,
1070
+ },
1071
+ request_options=request_options,
1072
+ omit=OMIT,
1073
+ )
1074
+ try:
1075
+ if 200 <= _response.status_code < 300:
1076
+ return
1077
+ _response_json = _response.json()
1078
+ except JSONDecodeError:
1079
+ raise ApiError(status_code=_response.status_code, body=_response.text)
1080
+ raise ApiError(status_code=_response.status_code, body=_response_json)
1081
+
1082
+ async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> DatabricksExportStorage:
1083
+ """
1084
+ Get a specific Databricks Files export storage connection.
1085
+
1086
+ Parameters
1087
+ ----------
1088
+ id : int
1089
+
1090
+ request_options : typing.Optional[RequestOptions]
1091
+ Request-specific configuration.
1092
+
1093
+ Returns
1094
+ -------
1095
+ DatabricksExportStorage
1096
+
1097
+
1098
+ Examples
1099
+ --------
1100
+ import asyncio
1101
+
1102
+ from label_studio_sdk import AsyncLabelStudio
1103
+
1104
+ client = AsyncLabelStudio(
1105
+ api_key="YOUR_API_KEY",
1106
+ )
1107
+
1108
+
1109
+ async def main() -> None:
1110
+ await client.export_storage.databricks.get(
1111
+ id=1,
1112
+ )
1113
+
1114
+
1115
+ asyncio.run(main())
1116
+ """
1117
+ _response = await self._client_wrapper.httpx_client.request(
1118
+ f"api/storages/export/databricks/{jsonable_encoder(id)}",
1119
+ method="GET",
1120
+ request_options=request_options,
1121
+ )
1122
+ try:
1123
+ if 200 <= _response.status_code < 300:
1124
+ return typing.cast(
1125
+ DatabricksExportStorage,
1126
+ construct_type(
1127
+ type_=DatabricksExportStorage, # type: ignore
1128
+ object_=_response.json(),
1129
+ ),
1130
+ )
1131
+ _response_json = _response.json()
1132
+ except JSONDecodeError:
1133
+ raise ApiError(status_code=_response.status_code, body=_response.text)
1134
+ raise ApiError(status_code=_response.status_code, body=_response_json)
1135
+
1136
+ async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None:
1137
+ """
1138
+ Delete a specific Databricks Files export storage connection.
1139
+
1140
+ Parameters
1141
+ ----------
1142
+ id : int
1143
+
1144
+ request_options : typing.Optional[RequestOptions]
1145
+ Request-specific configuration.
1146
+
1147
+ Returns
1148
+ -------
1149
+ None
1150
+
1151
+ Examples
1152
+ --------
1153
+ import asyncio
1154
+
1155
+ from label_studio_sdk import AsyncLabelStudio
1156
+
1157
+ client = AsyncLabelStudio(
1158
+ api_key="YOUR_API_KEY",
1159
+ )
1160
+
1161
+
1162
+ async def main() -> None:
1163
+ await client.export_storage.databricks.delete(
1164
+ id=1,
1165
+ )
1166
+
1167
+
1168
+ asyncio.run(main())
1169
+ """
1170
+ _response = await self._client_wrapper.httpx_client.request(
1171
+ f"api/storages/export/databricks/{jsonable_encoder(id)}",
1172
+ method="DELETE",
1173
+ request_options=request_options,
1174
+ )
1175
+ try:
1176
+ if 200 <= _response.status_code < 300:
1177
+ return
1178
+ _response_json = _response.json()
1179
+ except JSONDecodeError:
1180
+ raise ApiError(status_code=_response.status_code, body=_response.text)
1181
+ raise ApiError(status_code=_response.status_code, body=_response_json)
1182
+
1183
+ async def update(
1184
+ self,
1185
+ id: int,
1186
+ *,
1187
+ can_delete_objects: typing.Optional[bool] = OMIT,
1188
+ catalog: typing.Optional[str] = OMIT,
1189
+ description: typing.Optional[str] = OMIT,
1190
+ host: typing.Optional[str] = OMIT,
1191
+ last_sync: typing.Optional[dt.datetime] = OMIT,
1192
+ last_sync_count: typing.Optional[int] = OMIT,
1193
+ last_sync_job: typing.Optional[str] = OMIT,
1194
+ meta: typing.Optional[typing.Optional[typing.Any]] = OMIT,
1195
+ prefix: typing.Optional[str] = OMIT,
1196
+ project: typing.Optional[int] = OMIT,
1197
+ regex_filter: typing.Optional[str] = OMIT,
1198
+ request_timeout_s: typing.Optional[int] = OMIT,
1199
+ schema: typing.Optional[str] = OMIT,
1200
+ status: typing.Optional[StatusC5AEnum] = OMIT,
1201
+ stream_chunk_bytes: typing.Optional[int] = OMIT,
1202
+ synchronizable: typing.Optional[bool] = OMIT,
1203
+ title: typing.Optional[str] = OMIT,
1204
+ token: typing.Optional[str] = OMIT,
1205
+ traceback: typing.Optional[str] = OMIT,
1206
+ use_blob_urls: typing.Optional[bool] = OMIT,
1207
+ verify_tls: typing.Optional[bool] = OMIT,
1208
+ volume: typing.Optional[str] = OMIT,
1209
+ request_options: typing.Optional[RequestOptions] = None,
1210
+ ) -> DatabricksExportStorage:
1211
+ """
1212
+ Update a specific Databricks Files export storage connection.
1213
+
1214
+ Parameters
1215
+ ----------
1216
+ id : int
1217
+
1218
+ can_delete_objects : typing.Optional[bool]
1219
+ Deletion from storage enabled
1220
+
1221
+ catalog : typing.Optional[str]
1222
+ UC catalog name
1223
+
1224
+ description : typing.Optional[str]
1225
+ Cloud storage description
1226
+
1227
+ host : typing.Optional[str]
1228
+ Databricks workspace base URL (https://...)
1229
+
1230
+ last_sync : typing.Optional[dt.datetime]
1231
+ Last sync finished time
1232
+
1233
+ last_sync_count : typing.Optional[int]
1234
+ Count of tasks synced last time
1235
+
1236
+ last_sync_job : typing.Optional[str]
1237
+ Last sync job ID
1238
+
1239
+ meta : typing.Optional[typing.Optional[typing.Any]]
1240
+
1241
+ prefix : typing.Optional[str]
1242
+ Export path prefix under the volume
1243
+
1244
+ project : typing.Optional[int]
1245
+ A unique integer value identifying this project.
1246
+
1247
+ regex_filter : typing.Optional[str]
1248
+ Regex for filtering objects
1249
+
1250
+ request_timeout_s : typing.Optional[int]
1251
+
1252
+ schema : typing.Optional[str]
1253
+ UC schema name
1254
+
1255
+ status : typing.Optional[StatusC5AEnum]
1256
+
1257
+ stream_chunk_bytes : typing.Optional[int]
1258
+
1259
+ synchronizable : typing.Optional[bool]
1260
+
1261
+ title : typing.Optional[str]
1262
+ Cloud storage title
1263
+
1264
+ token : typing.Optional[str]
1265
+
1266
+ traceback : typing.Optional[str]
1267
+ Traceback report for the last failed sync
1268
+
1269
+ use_blob_urls : typing.Optional[bool]
1270
+ Generate blob URLs in tasks
1271
+
1272
+ verify_tls : typing.Optional[bool]
1273
+ Verify TLS certificates
1274
+
1275
+ volume : typing.Optional[str]
1276
+ UC volume name
1277
+
1278
+ request_options : typing.Optional[RequestOptions]
1279
+ Request-specific configuration.
1280
+
1281
+ Returns
1282
+ -------
1283
+ DatabricksExportStorage
1284
+
1285
+
1286
+ Examples
1287
+ --------
1288
+ import asyncio
1289
+
1290
+ from label_studio_sdk import AsyncLabelStudio
1291
+
1292
+ client = AsyncLabelStudio(
1293
+ api_key="YOUR_API_KEY",
1294
+ )
1295
+
1296
+
1297
+ async def main() -> None:
1298
+ await client.export_storage.databricks.update(
1299
+ id=1,
1300
+ )
1301
+
1302
+
1303
+ asyncio.run(main())
1304
+ """
1305
+ _response = await self._client_wrapper.httpx_client.request(
1306
+ f"api/storages/export/databricks/{jsonable_encoder(id)}",
1307
+ method="PATCH",
1308
+ json={
1309
+ "can_delete_objects": can_delete_objects,
1310
+ "catalog": catalog,
1311
+ "description": description,
1312
+ "host": host,
1313
+ "last_sync": last_sync,
1314
+ "last_sync_count": last_sync_count,
1315
+ "last_sync_job": last_sync_job,
1316
+ "meta": meta,
1317
+ "prefix": prefix,
1318
+ "project": project,
1319
+ "regex_filter": regex_filter,
1320
+ "request_timeout_s": request_timeout_s,
1321
+ "schema": schema,
1322
+ "status": status,
1323
+ "stream_chunk_bytes": stream_chunk_bytes,
1324
+ "synchronizable": synchronizable,
1325
+ "title": title,
1326
+ "token": token,
1327
+ "traceback": traceback,
1328
+ "use_blob_urls": use_blob_urls,
1329
+ "verify_tls": verify_tls,
1330
+ "volume": volume,
1331
+ },
1332
+ headers={
1333
+ "content-type": "application/json",
1334
+ },
1335
+ request_options=request_options,
1336
+ omit=OMIT,
1337
+ )
1338
+ try:
1339
+ if 200 <= _response.status_code < 300:
1340
+ return typing.cast(
1341
+ DatabricksExportStorage,
1342
+ construct_type(
1343
+ type_=DatabricksExportStorage, # type: ignore
1344
+ object_=_response.json(),
1345
+ ),
1346
+ )
1347
+ _response_json = _response.json()
1348
+ except JSONDecodeError:
1349
+ raise ApiError(status_code=_response.status_code, body=_response.text)
1350
+ raise ApiError(status_code=_response.status_code, body=_response_json)
1351
+
1352
+ async def sync(
1353
+ self, id: int, *, request_options: typing.Optional[RequestOptions] = None
1354
+ ) -> DatabricksExportStorage:
1355
+ """
1356
+ Export annotations to a Databricks Files storage.
1357
+
1358
+ Parameters
1359
+ ----------
1360
+ id : int
1361
+
1362
+ request_options : typing.Optional[RequestOptions]
1363
+ Request-specific configuration.
1364
+
1365
+ Returns
1366
+ -------
1367
+ DatabricksExportStorage
1368
+
1369
+
1370
+ Examples
1371
+ --------
1372
+ import asyncio
1373
+
1374
+ from label_studio_sdk import AsyncLabelStudio
1375
+
1376
+ client = AsyncLabelStudio(
1377
+ api_key="YOUR_API_KEY",
1378
+ )
1379
+
1380
+
1381
+ async def main() -> None:
1382
+ await client.export_storage.databricks.sync(
1383
+ id=1,
1384
+ )
1385
+
1386
+
1387
+ asyncio.run(main())
1388
+ """
1389
+ _response = await self._client_wrapper.httpx_client.request(
1390
+ f"api/storages/export/databricks/{jsonable_encoder(id)}/sync",
1391
+ method="POST",
1392
+ request_options=request_options,
1393
+ )
1394
+ try:
1395
+ if 200 <= _response.status_code < 300:
1396
+ return typing.cast(
1397
+ DatabricksExportStorage,
1398
+ construct_type(
1399
+ type_=DatabricksExportStorage, # type: ignore
1400
+ object_=_response.json(),
1401
+ ),
1402
+ )
1403
+ _response_json = _response.json()
1404
+ except JSONDecodeError:
1405
+ raise ApiError(status_code=_response.status_code, body=_response.text)
1406
+ raise ApiError(status_code=_response.status_code, body=_response_json)