label-studio-sdk 2.0.8__py3-none-any.whl → 2.0.9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of label-studio-sdk might be problematic. Click here for more details.

Files changed (70) hide show
  1. label_studio_sdk/__init__.py +36 -16
  2. label_studio_sdk/base_client.py +0 -4
  3. label_studio_sdk/core/client_wrapper.py +1 -1
  4. label_studio_sdk/export_storage/__init__.py +13 -2
  5. label_studio_sdk/export_storage/client.py +4 -0
  6. label_studio_sdk/export_storage/databricks/client.py +1406 -0
  7. label_studio_sdk/import_storage/__init__.py +13 -2
  8. label_studio_sdk/import_storage/client.py +4 -0
  9. label_studio_sdk/import_storage/databricks/__init__.py +2 -0
  10. label_studio_sdk/import_storage/databricks/client.py +1466 -0
  11. label_studio_sdk/import_storage/gcswif/client.py +30 -0
  12. label_studio_sdk/projects/__init__.py +0 -2
  13. label_studio_sdk/projects/client.py +186 -32
  14. label_studio_sdk/projects/client_ext.py +20 -8
  15. label_studio_sdk/projects/types/__init__.py +0 -2
  16. label_studio_sdk/projects/types/lse_project_create_request_sampling.py +2 -2
  17. label_studio_sdk/projects/types/patched_lse_project_update_request_sampling.py +2 -2
  18. label_studio_sdk/prompts/client.py +340 -1
  19. label_studio_sdk/prompts/runs/client.py +127 -0
  20. label_studio_sdk/tasks/client.py +7 -2
  21. label_studio_sdk/types/__init__.py +36 -12
  22. label_studio_sdk/types/all_roles_project_list.py +10 -10
  23. label_studio_sdk/types/all_roles_project_list_sampling.py +2 -2
  24. label_studio_sdk/types/azure_blob_import_storage.py +5 -0
  25. label_studio_sdk/types/cancel_model_run_response.py +19 -0
  26. label_studio_sdk/types/configurable_permission_option.py +2 -2
  27. label_studio_sdk/types/databricks_export_storage.py +113 -0
  28. label_studio_sdk/types/databricks_export_storage_request.py +107 -0
  29. label_studio_sdk/types/databricks_import_storage.py +123 -0
  30. label_studio_sdk/types/databricks_import_storage_request.py +117 -0
  31. label_studio_sdk/types/default165enum.py +5 -0
  32. label_studio_sdk/types/gcs_import_storage.py +5 -0
  33. label_studio_sdk/types/gcswif_import_storage.py +5 -0
  34. label_studio_sdk/types/gcswif_import_storage_request.py +5 -0
  35. label_studio_sdk/types/local_files_import_storage.py +5 -0
  36. label_studio_sdk/types/lse_project_counts.py +8 -8
  37. label_studio_sdk/types/lse_project_create_sampling.py +2 -2
  38. label_studio_sdk/types/{project.py → lse_project_response.py} +44 -31
  39. label_studio_sdk/types/lse_project_response_sampling.py +7 -0
  40. label_studio_sdk/types/{project_skip_queue.py → lse_project_response_skip_queue.py} +1 -1
  41. label_studio_sdk/types/lse_project_sampling.py +2 -2
  42. label_studio_sdk/types/lse_project_update_sampling.py +2 -2
  43. label_studio_sdk/types/lse_task.py +6 -0
  44. label_studio_sdk/types/lse_task_serializer_for_reviewers.py +6 -0
  45. label_studio_sdk/types/lse_user.py +1 -0
  46. label_studio_sdk/types/lse_user_api.py +1 -0
  47. label_studio_sdk/types/options165enum.py +5 -0
  48. label_studio_sdk/types/organization_permission.py +7 -4
  49. label_studio_sdk/types/paginated_project_member.py +1 -0
  50. label_studio_sdk/types/paginated_project_subset_tasks_response_list.py +23 -0
  51. label_studio_sdk/types/project_subset_item.py +21 -0
  52. label_studio_sdk/types/project_subset_task_item.py +24 -0
  53. label_studio_sdk/types/project_subset_tasks_response.py +27 -0
  54. label_studio_sdk/types/review_settings.py +14 -0
  55. label_studio_sdk/types/review_settings_request.py +14 -0
  56. label_studio_sdk/types/review_settings_request_sampling.py +8 -0
  57. label_studio_sdk/types/review_settings_sampling.py +8 -0
  58. label_studio_sdk/types/review_settings_sampling_enum.py +5 -0
  59. label_studio_sdk/types/{sampling_enum.py → sampling_de5enum.py} +1 -1
  60. label_studio_sdk/types/who_am_i_user.py +1 -0
  61. {label_studio_sdk-2.0.8.dist-info → label_studio_sdk-2.0.9.dist-info}/METADATA +41 -90
  62. {label_studio_sdk-2.0.8.dist-info → label_studio_sdk-2.0.9.dist-info}/RECORD +65 -52
  63. label_studio_sdk/blueprints/client.py +0 -272
  64. label_studio_sdk/projects/types/projects_list_request_filter.py +0 -5
  65. label_studio_sdk/types/blueprint.py +0 -41
  66. label_studio_sdk/types/configurable_permission_option_default.py +0 -7
  67. label_studio_sdk/types/project_sampling.py +0 -7
  68. /label_studio_sdk/{blueprints → export_storage/databricks}/__init__.py +0 -0
  69. {label_studio_sdk-2.0.8.dist-info → label_studio_sdk-2.0.9.dist-info}/LICENSE +0 -0
  70. {label_studio_sdk-2.0.8.dist-info → label_studio_sdk-2.0.9.dist-info}/WHEEL +0 -0
@@ -0,0 +1,1466 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import typing
4
+ from ...core.client_wrapper import SyncClientWrapper
5
+ from ...core.request_options import RequestOptions
6
+ from ...types.databricks_import_storage import DatabricksImportStorage
7
+ from ...core.unchecked_base_model import construct_type
8
+ from json.decoder import JSONDecodeError
9
+ from ...core.api_error import ApiError
10
+ import datetime as dt
11
+ from ...types.status_c5a_enum import StatusC5AEnum
12
+ from ...core.jsonable_encoder import jsonable_encoder
13
+ from ...core.client_wrapper import AsyncClientWrapper
14
+
15
+ # this is used as the default value for optional parameters
16
+ OMIT = typing.cast(typing.Any, ...)
17
+
18
+
19
+ class DatabricksClient:
20
+ def __init__(self, *, client_wrapper: SyncClientWrapper):
21
+ self._client_wrapper = client_wrapper
22
+
23
+ def list(
24
+ self,
25
+ *,
26
+ ordering: typing.Optional[str] = None,
27
+ project: typing.Optional[int] = None,
28
+ request_options: typing.Optional[RequestOptions] = None,
29
+ ) -> typing.List[DatabricksImportStorage]:
30
+ """
31
+ Get list of all Databricks Files import storage connections.
32
+
33
+ Parameters
34
+ ----------
35
+ ordering : typing.Optional[str]
36
+ Which field to use when ordering the results.
37
+
38
+ project : typing.Optional[int]
39
+ Project ID
40
+
41
+ request_options : typing.Optional[RequestOptions]
42
+ Request-specific configuration.
43
+
44
+ Returns
45
+ -------
46
+ typing.List[DatabricksImportStorage]
47
+
48
+
49
+ Examples
50
+ --------
51
+ from label_studio_sdk import LabelStudio
52
+
53
+ client = LabelStudio(
54
+ api_key="YOUR_API_KEY",
55
+ )
56
+ client.import_storage.databricks.list()
57
+ """
58
+ _response = self._client_wrapper.httpx_client.request(
59
+ "api/storages/databricks/",
60
+ method="GET",
61
+ params={
62
+ "ordering": ordering,
63
+ "project": project,
64
+ },
65
+ request_options=request_options,
66
+ )
67
+ try:
68
+ if 200 <= _response.status_code < 300:
69
+ return typing.cast(
70
+ typing.List[DatabricksImportStorage],
71
+ construct_type(
72
+ type_=typing.List[DatabricksImportStorage], # type: ignore
73
+ object_=_response.json(),
74
+ ),
75
+ )
76
+ _response_json = _response.json()
77
+ except JSONDecodeError:
78
+ raise ApiError(status_code=_response.status_code, body=_response.text)
79
+ raise ApiError(status_code=_response.status_code, body=_response_json)
80
+
81
+ def create(
82
+ self,
83
+ *,
84
+ catalog: str,
85
+ host: str,
86
+ project: int,
87
+ schema: str,
88
+ volume: str,
89
+ description: typing.Optional[str] = OMIT,
90
+ last_sync: typing.Optional[dt.datetime] = OMIT,
91
+ last_sync_count: typing.Optional[int] = OMIT,
92
+ last_sync_job: typing.Optional[str] = OMIT,
93
+ meta: typing.Optional[typing.Optional[typing.Any]] = OMIT,
94
+ prefix: typing.Optional[str] = OMIT,
95
+ presign: typing.Optional[bool] = OMIT,
96
+ presign_ttl: typing.Optional[int] = OMIT,
97
+ recursive_scan: typing.Optional[bool] = OMIT,
98
+ regex_filter: typing.Optional[str] = OMIT,
99
+ request_timeout_s: typing.Optional[int] = OMIT,
100
+ status: typing.Optional[StatusC5AEnum] = OMIT,
101
+ stream_chunk_bytes: typing.Optional[int] = OMIT,
102
+ synchronizable: typing.Optional[bool] = OMIT,
103
+ title: typing.Optional[str] = OMIT,
104
+ token: typing.Optional[str] = OMIT,
105
+ traceback: typing.Optional[str] = OMIT,
106
+ use_blob_urls: typing.Optional[bool] = OMIT,
107
+ verify_tls: typing.Optional[bool] = OMIT,
108
+ request_options: typing.Optional[RequestOptions] = None,
109
+ ) -> DatabricksImportStorage:
110
+ """
111
+ Create a Databricks Files import storage connection.
112
+
113
+ Parameters
114
+ ----------
115
+ catalog : str
116
+ UC catalog name
117
+
118
+ host : str
119
+ Databricks workspace base URL (https://...)
120
+
121
+ project : int
122
+ A unique integer value identifying this project.
123
+
124
+ schema : str
125
+ UC schema name
126
+
127
+ volume : str
128
+ UC volume name
129
+
130
+ description : typing.Optional[str]
131
+ Cloud storage description
132
+
133
+ last_sync : typing.Optional[dt.datetime]
134
+ Last sync finished time
135
+
136
+ last_sync_count : typing.Optional[int]
137
+ Count of tasks synced last time
138
+
139
+ last_sync_job : typing.Optional[str]
140
+ Last sync job ID
141
+
142
+ meta : typing.Optional[typing.Optional[typing.Any]]
143
+
144
+ prefix : typing.Optional[str]
145
+ Path under the volume
146
+
147
+ presign : typing.Optional[bool]
148
+ Presign not supported; always proxied
149
+
150
+ presign_ttl : typing.Optional[int]
151
+ Unused for Databricks; kept for compatibility
152
+
153
+ recursive_scan : typing.Optional[bool]
154
+ Perform recursive scan
155
+
156
+ regex_filter : typing.Optional[str]
157
+ Regex for filtering objects
158
+
159
+ request_timeout_s : typing.Optional[int]
160
+
161
+ status : typing.Optional[StatusC5AEnum]
162
+
163
+ stream_chunk_bytes : typing.Optional[int]
164
+
165
+ synchronizable : typing.Optional[bool]
166
+
167
+ title : typing.Optional[str]
168
+ Cloud storage title
169
+
170
+ token : typing.Optional[str]
171
+
172
+ traceback : typing.Optional[str]
173
+ Traceback report for the last failed sync
174
+
175
+ use_blob_urls : typing.Optional[bool]
176
+ Generate blob URLs in tasks
177
+
178
+ verify_tls : typing.Optional[bool]
179
+ Verify TLS certificates
180
+
181
+ request_options : typing.Optional[RequestOptions]
182
+ Request-specific configuration.
183
+
184
+ Returns
185
+ -------
186
+ DatabricksImportStorage
187
+
188
+
189
+ Examples
190
+ --------
191
+ from label_studio_sdk import LabelStudio
192
+
193
+ client = LabelStudio(
194
+ api_key="YOUR_API_KEY",
195
+ )
196
+ client.import_storage.databricks.create(
197
+ catalog="catalog",
198
+ host="host",
199
+ project=1,
200
+ schema="schema",
201
+ volume="volume",
202
+ )
203
+ """
204
+ _response = self._client_wrapper.httpx_client.request(
205
+ "api/storages/databricks/",
206
+ method="POST",
207
+ json={
208
+ "catalog": catalog,
209
+ "description": description,
210
+ "host": host,
211
+ "last_sync": last_sync,
212
+ "last_sync_count": last_sync_count,
213
+ "last_sync_job": last_sync_job,
214
+ "meta": meta,
215
+ "prefix": prefix,
216
+ "presign": presign,
217
+ "presign_ttl": presign_ttl,
218
+ "project": project,
219
+ "recursive_scan": recursive_scan,
220
+ "regex_filter": regex_filter,
221
+ "request_timeout_s": request_timeout_s,
222
+ "schema": schema,
223
+ "status": status,
224
+ "stream_chunk_bytes": stream_chunk_bytes,
225
+ "synchronizable": synchronizable,
226
+ "title": title,
227
+ "token": token,
228
+ "traceback": traceback,
229
+ "use_blob_urls": use_blob_urls,
230
+ "verify_tls": verify_tls,
231
+ "volume": volume,
232
+ },
233
+ request_options=request_options,
234
+ omit=OMIT,
235
+ )
236
+ try:
237
+ if 200 <= _response.status_code < 300:
238
+ return typing.cast(
239
+ DatabricksImportStorage,
240
+ construct_type(
241
+ type_=DatabricksImportStorage, # type: ignore
242
+ object_=_response.json(),
243
+ ),
244
+ )
245
+ _response_json = _response.json()
246
+ except JSONDecodeError:
247
+ raise ApiError(status_code=_response.status_code, body=_response.text)
248
+ raise ApiError(status_code=_response.status_code, body=_response_json)
249
+
250
+ def validate(
251
+ self,
252
+ *,
253
+ catalog: str,
254
+ host: str,
255
+ project: int,
256
+ schema: str,
257
+ volume: str,
258
+ description: typing.Optional[str] = OMIT,
259
+ last_sync: typing.Optional[dt.datetime] = OMIT,
260
+ last_sync_count: typing.Optional[int] = OMIT,
261
+ last_sync_job: typing.Optional[str] = OMIT,
262
+ meta: typing.Optional[typing.Optional[typing.Any]] = OMIT,
263
+ prefix: typing.Optional[str] = OMIT,
264
+ presign: typing.Optional[bool] = OMIT,
265
+ presign_ttl: typing.Optional[int] = OMIT,
266
+ recursive_scan: typing.Optional[bool] = OMIT,
267
+ regex_filter: typing.Optional[str] = OMIT,
268
+ request_timeout_s: typing.Optional[int] = OMIT,
269
+ status: typing.Optional[StatusC5AEnum] = OMIT,
270
+ stream_chunk_bytes: typing.Optional[int] = OMIT,
271
+ synchronizable: typing.Optional[bool] = OMIT,
272
+ title: typing.Optional[str] = OMIT,
273
+ token: typing.Optional[str] = OMIT,
274
+ traceback: typing.Optional[str] = OMIT,
275
+ use_blob_urls: typing.Optional[bool] = OMIT,
276
+ verify_tls: typing.Optional[bool] = OMIT,
277
+ request_options: typing.Optional[RequestOptions] = None,
278
+ ) -> None:
279
+ """
280
+ Validate a specific Databricks Files import storage connection.
281
+
282
+ Parameters
283
+ ----------
284
+ catalog : str
285
+ UC catalog name
286
+
287
+ host : str
288
+ Databricks workspace base URL (https://...)
289
+
290
+ project : int
291
+ A unique integer value identifying this project.
292
+
293
+ schema : str
294
+ UC schema name
295
+
296
+ volume : str
297
+ UC volume name
298
+
299
+ description : typing.Optional[str]
300
+ Cloud storage description
301
+
302
+ last_sync : typing.Optional[dt.datetime]
303
+ Last sync finished time
304
+
305
+ last_sync_count : typing.Optional[int]
306
+ Count of tasks synced last time
307
+
308
+ last_sync_job : typing.Optional[str]
309
+ Last sync job ID
310
+
311
+ meta : typing.Optional[typing.Optional[typing.Any]]
312
+
313
+ prefix : typing.Optional[str]
314
+ Path under the volume
315
+
316
+ presign : typing.Optional[bool]
317
+ Presign not supported; always proxied
318
+
319
+ presign_ttl : typing.Optional[int]
320
+ Unused for Databricks; kept for compatibility
321
+
322
+ recursive_scan : typing.Optional[bool]
323
+ Perform recursive scan
324
+
325
+ regex_filter : typing.Optional[str]
326
+ Regex for filtering objects
327
+
328
+ request_timeout_s : typing.Optional[int]
329
+
330
+ status : typing.Optional[StatusC5AEnum]
331
+
332
+ stream_chunk_bytes : typing.Optional[int]
333
+
334
+ synchronizable : typing.Optional[bool]
335
+
336
+ title : typing.Optional[str]
337
+ Cloud storage title
338
+
339
+ token : typing.Optional[str]
340
+
341
+ traceback : typing.Optional[str]
342
+ Traceback report for the last failed sync
343
+
344
+ use_blob_urls : typing.Optional[bool]
345
+ Generate blob URLs in tasks
346
+
347
+ verify_tls : typing.Optional[bool]
348
+ Verify TLS certificates
349
+
350
+ request_options : typing.Optional[RequestOptions]
351
+ Request-specific configuration.
352
+
353
+ Returns
354
+ -------
355
+ None
356
+
357
+ Examples
358
+ --------
359
+ from label_studio_sdk import LabelStudio
360
+
361
+ client = LabelStudio(
362
+ api_key="YOUR_API_KEY",
363
+ )
364
+ client.import_storage.databricks.validate(
365
+ catalog="catalog",
366
+ host="host",
367
+ project=1,
368
+ schema="schema",
369
+ volume="volume",
370
+ )
371
+ """
372
+ _response = self._client_wrapper.httpx_client.request(
373
+ "api/storages/databricks/validate",
374
+ method="POST",
375
+ json={
376
+ "catalog": catalog,
377
+ "description": description,
378
+ "host": host,
379
+ "last_sync": last_sync,
380
+ "last_sync_count": last_sync_count,
381
+ "last_sync_job": last_sync_job,
382
+ "meta": meta,
383
+ "prefix": prefix,
384
+ "presign": presign,
385
+ "presign_ttl": presign_ttl,
386
+ "project": project,
387
+ "recursive_scan": recursive_scan,
388
+ "regex_filter": regex_filter,
389
+ "request_timeout_s": request_timeout_s,
390
+ "schema": schema,
391
+ "status": status,
392
+ "stream_chunk_bytes": stream_chunk_bytes,
393
+ "synchronizable": synchronizable,
394
+ "title": title,
395
+ "token": token,
396
+ "traceback": traceback,
397
+ "use_blob_urls": use_blob_urls,
398
+ "verify_tls": verify_tls,
399
+ "volume": volume,
400
+ },
401
+ request_options=request_options,
402
+ omit=OMIT,
403
+ )
404
+ try:
405
+ if 200 <= _response.status_code < 300:
406
+ return
407
+ _response_json = _response.json()
408
+ except JSONDecodeError:
409
+ raise ApiError(status_code=_response.status_code, body=_response.text)
410
+ raise ApiError(status_code=_response.status_code, body=_response_json)
411
+
412
+ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> DatabricksImportStorage:
413
+ """
414
+ Get a specific Databricks Files import storage connection.
415
+
416
+ Parameters
417
+ ----------
418
+ id : int
419
+
420
+ request_options : typing.Optional[RequestOptions]
421
+ Request-specific configuration.
422
+
423
+ Returns
424
+ -------
425
+ DatabricksImportStorage
426
+
427
+
428
+ Examples
429
+ --------
430
+ from label_studio_sdk import LabelStudio
431
+
432
+ client = LabelStudio(
433
+ api_key="YOUR_API_KEY",
434
+ )
435
+ client.import_storage.databricks.get(
436
+ id=1,
437
+ )
438
+ """
439
+ _response = self._client_wrapper.httpx_client.request(
440
+ f"api/storages/databricks/{jsonable_encoder(id)}",
441
+ method="GET",
442
+ request_options=request_options,
443
+ )
444
+ try:
445
+ if 200 <= _response.status_code < 300:
446
+ return typing.cast(
447
+ DatabricksImportStorage,
448
+ construct_type(
449
+ type_=DatabricksImportStorage, # type: ignore
450
+ object_=_response.json(),
451
+ ),
452
+ )
453
+ _response_json = _response.json()
454
+ except JSONDecodeError:
455
+ raise ApiError(status_code=_response.status_code, body=_response.text)
456
+ raise ApiError(status_code=_response.status_code, body=_response_json)
457
+
458
+ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None:
459
+ """
460
+ Delete a specific Databricks Files import storage connection.
461
+
462
+ Parameters
463
+ ----------
464
+ id : int
465
+
466
+ request_options : typing.Optional[RequestOptions]
467
+ Request-specific configuration.
468
+
469
+ Returns
470
+ -------
471
+ None
472
+
473
+ Examples
474
+ --------
475
+ from label_studio_sdk import LabelStudio
476
+
477
+ client = LabelStudio(
478
+ api_key="YOUR_API_KEY",
479
+ )
480
+ client.import_storage.databricks.delete(
481
+ id=1,
482
+ )
483
+ """
484
+ _response = self._client_wrapper.httpx_client.request(
485
+ f"api/storages/databricks/{jsonable_encoder(id)}",
486
+ method="DELETE",
487
+ request_options=request_options,
488
+ )
489
+ try:
490
+ if 200 <= _response.status_code < 300:
491
+ return
492
+ _response_json = _response.json()
493
+ except JSONDecodeError:
494
+ raise ApiError(status_code=_response.status_code, body=_response.text)
495
+ raise ApiError(status_code=_response.status_code, body=_response_json)
496
+
497
+ def update(
498
+ self,
499
+ id: int,
500
+ *,
501
+ catalog: typing.Optional[str] = OMIT,
502
+ description: typing.Optional[str] = OMIT,
503
+ host: typing.Optional[str] = OMIT,
504
+ last_sync: typing.Optional[dt.datetime] = OMIT,
505
+ last_sync_count: typing.Optional[int] = OMIT,
506
+ last_sync_job: typing.Optional[str] = OMIT,
507
+ meta: typing.Optional[typing.Optional[typing.Any]] = OMIT,
508
+ prefix: typing.Optional[str] = OMIT,
509
+ presign: typing.Optional[bool] = OMIT,
510
+ presign_ttl: typing.Optional[int] = OMIT,
511
+ project: typing.Optional[int] = OMIT,
512
+ recursive_scan: typing.Optional[bool] = OMIT,
513
+ regex_filter: typing.Optional[str] = OMIT,
514
+ request_timeout_s: typing.Optional[int] = OMIT,
515
+ schema: typing.Optional[str] = OMIT,
516
+ status: typing.Optional[StatusC5AEnum] = OMIT,
517
+ stream_chunk_bytes: typing.Optional[int] = OMIT,
518
+ synchronizable: typing.Optional[bool] = OMIT,
519
+ title: typing.Optional[str] = OMIT,
520
+ token: typing.Optional[str] = OMIT,
521
+ traceback: typing.Optional[str] = OMIT,
522
+ use_blob_urls: typing.Optional[bool] = OMIT,
523
+ verify_tls: typing.Optional[bool] = OMIT,
524
+ volume: typing.Optional[str] = OMIT,
525
+ request_options: typing.Optional[RequestOptions] = None,
526
+ ) -> DatabricksImportStorage:
527
+ """
528
+ Update a specific Databricks Files import storage connection.
529
+
530
+ Parameters
531
+ ----------
532
+ id : int
533
+
534
+ catalog : typing.Optional[str]
535
+ UC catalog name
536
+
537
+ description : typing.Optional[str]
538
+ Cloud storage description
539
+
540
+ host : typing.Optional[str]
541
+ Databricks workspace base URL (https://...)
542
+
543
+ last_sync : typing.Optional[dt.datetime]
544
+ Last sync finished time
545
+
546
+ last_sync_count : typing.Optional[int]
547
+ Count of tasks synced last time
548
+
549
+ last_sync_job : typing.Optional[str]
550
+ Last sync job ID
551
+
552
+ meta : typing.Optional[typing.Optional[typing.Any]]
553
+
554
+ prefix : typing.Optional[str]
555
+ Path under the volume
556
+
557
+ presign : typing.Optional[bool]
558
+ Presign not supported; always proxied
559
+
560
+ presign_ttl : typing.Optional[int]
561
+ Unused for Databricks; kept for compatibility
562
+
563
+ project : typing.Optional[int]
564
+ A unique integer value identifying this project.
565
+
566
+ recursive_scan : typing.Optional[bool]
567
+ Perform recursive scan
568
+
569
+ regex_filter : typing.Optional[str]
570
+ Regex for filtering objects
571
+
572
+ request_timeout_s : typing.Optional[int]
573
+
574
+ schema : typing.Optional[str]
575
+ UC schema name
576
+
577
+ status : typing.Optional[StatusC5AEnum]
578
+
579
+ stream_chunk_bytes : typing.Optional[int]
580
+
581
+ synchronizable : typing.Optional[bool]
582
+
583
+ title : typing.Optional[str]
584
+ Cloud storage title
585
+
586
+ token : typing.Optional[str]
587
+
588
+ traceback : typing.Optional[str]
589
+ Traceback report for the last failed sync
590
+
591
+ use_blob_urls : typing.Optional[bool]
592
+ Generate blob URLs in tasks
593
+
594
+ verify_tls : typing.Optional[bool]
595
+ Verify TLS certificates
596
+
597
+ volume : typing.Optional[str]
598
+ UC volume name
599
+
600
+ request_options : typing.Optional[RequestOptions]
601
+ Request-specific configuration.
602
+
603
+ Returns
604
+ -------
605
+ DatabricksImportStorage
606
+
607
+
608
+ Examples
609
+ --------
610
+ from label_studio_sdk import LabelStudio
611
+
612
+ client = LabelStudio(
613
+ api_key="YOUR_API_KEY",
614
+ )
615
+ client.import_storage.databricks.update(
616
+ id=1,
617
+ )
618
+ """
619
+ _response = self._client_wrapper.httpx_client.request(
620
+ f"api/storages/databricks/{jsonable_encoder(id)}",
621
+ method="PATCH",
622
+ json={
623
+ "catalog": catalog,
624
+ "description": description,
625
+ "host": host,
626
+ "last_sync": last_sync,
627
+ "last_sync_count": last_sync_count,
628
+ "last_sync_job": last_sync_job,
629
+ "meta": meta,
630
+ "prefix": prefix,
631
+ "presign": presign,
632
+ "presign_ttl": presign_ttl,
633
+ "project": project,
634
+ "recursive_scan": recursive_scan,
635
+ "regex_filter": regex_filter,
636
+ "request_timeout_s": request_timeout_s,
637
+ "schema": schema,
638
+ "status": status,
639
+ "stream_chunk_bytes": stream_chunk_bytes,
640
+ "synchronizable": synchronizable,
641
+ "title": title,
642
+ "token": token,
643
+ "traceback": traceback,
644
+ "use_blob_urls": use_blob_urls,
645
+ "verify_tls": verify_tls,
646
+ "volume": volume,
647
+ },
648
+ headers={
649
+ "content-type": "application/json",
650
+ },
651
+ request_options=request_options,
652
+ omit=OMIT,
653
+ )
654
+ try:
655
+ if 200 <= _response.status_code < 300:
656
+ return typing.cast(
657
+ DatabricksImportStorage,
658
+ construct_type(
659
+ type_=DatabricksImportStorage, # type: ignore
660
+ object_=_response.json(),
661
+ ),
662
+ )
663
+ _response_json = _response.json()
664
+ except JSONDecodeError:
665
+ raise ApiError(status_code=_response.status_code, body=_response.text)
666
+ raise ApiError(status_code=_response.status_code, body=_response_json)
667
+
668
+ def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> DatabricksImportStorage:
669
+ """
670
+ Sync tasks from a Databricks Files import storage.
671
+
672
+ Parameters
673
+ ----------
674
+ id : int
675
+
676
+ request_options : typing.Optional[RequestOptions]
677
+ Request-specific configuration.
678
+
679
+ Returns
680
+ -------
681
+ DatabricksImportStorage
682
+
683
+
684
+ Examples
685
+ --------
686
+ from label_studio_sdk import LabelStudio
687
+
688
+ client = LabelStudio(
689
+ api_key="YOUR_API_KEY",
690
+ )
691
+ client.import_storage.databricks.sync(
692
+ id=1,
693
+ )
694
+ """
695
+ _response = self._client_wrapper.httpx_client.request(
696
+ f"api/storages/databricks/{jsonable_encoder(id)}/sync",
697
+ method="POST",
698
+ request_options=request_options,
699
+ )
700
+ try:
701
+ if 200 <= _response.status_code < 300:
702
+ return typing.cast(
703
+ DatabricksImportStorage,
704
+ construct_type(
705
+ type_=DatabricksImportStorage, # type: ignore
706
+ object_=_response.json(),
707
+ ),
708
+ )
709
+ _response_json = _response.json()
710
+ except JSONDecodeError:
711
+ raise ApiError(status_code=_response.status_code, body=_response.text)
712
+ raise ApiError(status_code=_response.status_code, body=_response_json)
713
+
714
+
715
+ class AsyncDatabricksClient:
716
+ def __init__(self, *, client_wrapper: AsyncClientWrapper):
717
+ self._client_wrapper = client_wrapper
718
+
719
+ async def list(
720
+ self,
721
+ *,
722
+ ordering: typing.Optional[str] = None,
723
+ project: typing.Optional[int] = None,
724
+ request_options: typing.Optional[RequestOptions] = None,
725
+ ) -> typing.List[DatabricksImportStorage]:
726
+ """
727
+ Get list of all Databricks Files import storage connections.
728
+
729
+ Parameters
730
+ ----------
731
+ ordering : typing.Optional[str]
732
+ Which field to use when ordering the results.
733
+
734
+ project : typing.Optional[int]
735
+ Project ID
736
+
737
+ request_options : typing.Optional[RequestOptions]
738
+ Request-specific configuration.
739
+
740
+ Returns
741
+ -------
742
+ typing.List[DatabricksImportStorage]
743
+
744
+
745
+ Examples
746
+ --------
747
+ import asyncio
748
+
749
+ from label_studio_sdk import AsyncLabelStudio
750
+
751
+ client = AsyncLabelStudio(
752
+ api_key="YOUR_API_KEY",
753
+ )
754
+
755
+
756
+ async def main() -> None:
757
+ await client.import_storage.databricks.list()
758
+
759
+
760
+ asyncio.run(main())
761
+ """
762
+ _response = await self._client_wrapper.httpx_client.request(
763
+ "api/storages/databricks/",
764
+ method="GET",
765
+ params={
766
+ "ordering": ordering,
767
+ "project": project,
768
+ },
769
+ request_options=request_options,
770
+ )
771
+ try:
772
+ if 200 <= _response.status_code < 300:
773
+ return typing.cast(
774
+ typing.List[DatabricksImportStorage],
775
+ construct_type(
776
+ type_=typing.List[DatabricksImportStorage], # type: ignore
777
+ object_=_response.json(),
778
+ ),
779
+ )
780
+ _response_json = _response.json()
781
+ except JSONDecodeError:
782
+ raise ApiError(status_code=_response.status_code, body=_response.text)
783
+ raise ApiError(status_code=_response.status_code, body=_response_json)
784
+
785
+ async def create(
786
+ self,
787
+ *,
788
+ catalog: str,
789
+ host: str,
790
+ project: int,
791
+ schema: str,
792
+ volume: str,
793
+ description: typing.Optional[str] = OMIT,
794
+ last_sync: typing.Optional[dt.datetime] = OMIT,
795
+ last_sync_count: typing.Optional[int] = OMIT,
796
+ last_sync_job: typing.Optional[str] = OMIT,
797
+ meta: typing.Optional[typing.Optional[typing.Any]] = OMIT,
798
+ prefix: typing.Optional[str] = OMIT,
799
+ presign: typing.Optional[bool] = OMIT,
800
+ presign_ttl: typing.Optional[int] = OMIT,
801
+ recursive_scan: typing.Optional[bool] = OMIT,
802
+ regex_filter: typing.Optional[str] = OMIT,
803
+ request_timeout_s: typing.Optional[int] = OMIT,
804
+ status: typing.Optional[StatusC5AEnum] = OMIT,
805
+ stream_chunk_bytes: typing.Optional[int] = OMIT,
806
+ synchronizable: typing.Optional[bool] = OMIT,
807
+ title: typing.Optional[str] = OMIT,
808
+ token: typing.Optional[str] = OMIT,
809
+ traceback: typing.Optional[str] = OMIT,
810
+ use_blob_urls: typing.Optional[bool] = OMIT,
811
+ verify_tls: typing.Optional[bool] = OMIT,
812
+ request_options: typing.Optional[RequestOptions] = None,
813
+ ) -> DatabricksImportStorage:
814
+ """
815
+ Create a Databricks Files import storage connection.
816
+
817
+ Parameters
818
+ ----------
819
+ catalog : str
820
+ UC catalog name
821
+
822
+ host : str
823
+ Databricks workspace base URL (https://...)
824
+
825
+ project : int
826
+ A unique integer value identifying this project.
827
+
828
+ schema : str
829
+ UC schema name
830
+
831
+ volume : str
832
+ UC volume name
833
+
834
+ description : typing.Optional[str]
835
+ Cloud storage description
836
+
837
+ last_sync : typing.Optional[dt.datetime]
838
+ Last sync finished time
839
+
840
+ last_sync_count : typing.Optional[int]
841
+ Count of tasks synced last time
842
+
843
+ last_sync_job : typing.Optional[str]
844
+ Last sync job ID
845
+
846
+ meta : typing.Optional[typing.Optional[typing.Any]]
847
+
848
+ prefix : typing.Optional[str]
849
+ Path under the volume
850
+
851
+ presign : typing.Optional[bool]
852
+ Presign not supported; always proxied
853
+
854
+ presign_ttl : typing.Optional[int]
855
+ Unused for Databricks; kept for compatibility
856
+
857
+ recursive_scan : typing.Optional[bool]
858
+ Perform recursive scan
859
+
860
+ regex_filter : typing.Optional[str]
861
+ Regex for filtering objects
862
+
863
+ request_timeout_s : typing.Optional[int]
864
+
865
+ status : typing.Optional[StatusC5AEnum]
866
+
867
+ stream_chunk_bytes : typing.Optional[int]
868
+
869
+ synchronizable : typing.Optional[bool]
870
+
871
+ title : typing.Optional[str]
872
+ Cloud storage title
873
+
874
+ token : typing.Optional[str]
875
+
876
+ traceback : typing.Optional[str]
877
+ Traceback report for the last failed sync
878
+
879
+ use_blob_urls : typing.Optional[bool]
880
+ Generate blob URLs in tasks
881
+
882
+ verify_tls : typing.Optional[bool]
883
+ Verify TLS certificates
884
+
885
+ request_options : typing.Optional[RequestOptions]
886
+ Request-specific configuration.
887
+
888
+ Returns
889
+ -------
890
+ DatabricksImportStorage
891
+
892
+
893
+ Examples
894
+ --------
895
+ import asyncio
896
+
897
+ from label_studio_sdk import AsyncLabelStudio
898
+
899
+ client = AsyncLabelStudio(
900
+ api_key="YOUR_API_KEY",
901
+ )
902
+
903
+
904
+ async def main() -> None:
905
+ await client.import_storage.databricks.create(
906
+ catalog="catalog",
907
+ host="host",
908
+ project=1,
909
+ schema="schema",
910
+ volume="volume",
911
+ )
912
+
913
+
914
+ asyncio.run(main())
915
+ """
916
+ _response = await self._client_wrapper.httpx_client.request(
917
+ "api/storages/databricks/",
918
+ method="POST",
919
+ json={
920
+ "catalog": catalog,
921
+ "description": description,
922
+ "host": host,
923
+ "last_sync": last_sync,
924
+ "last_sync_count": last_sync_count,
925
+ "last_sync_job": last_sync_job,
926
+ "meta": meta,
927
+ "prefix": prefix,
928
+ "presign": presign,
929
+ "presign_ttl": presign_ttl,
930
+ "project": project,
931
+ "recursive_scan": recursive_scan,
932
+ "regex_filter": regex_filter,
933
+ "request_timeout_s": request_timeout_s,
934
+ "schema": schema,
935
+ "status": status,
936
+ "stream_chunk_bytes": stream_chunk_bytes,
937
+ "synchronizable": synchronizable,
938
+ "title": title,
939
+ "token": token,
940
+ "traceback": traceback,
941
+ "use_blob_urls": use_blob_urls,
942
+ "verify_tls": verify_tls,
943
+ "volume": volume,
944
+ },
945
+ request_options=request_options,
946
+ omit=OMIT,
947
+ )
948
+ try:
949
+ if 200 <= _response.status_code < 300:
950
+ return typing.cast(
951
+ DatabricksImportStorage,
952
+ construct_type(
953
+ type_=DatabricksImportStorage, # type: ignore
954
+ object_=_response.json(),
955
+ ),
956
+ )
957
+ _response_json = _response.json()
958
+ except JSONDecodeError:
959
+ raise ApiError(status_code=_response.status_code, body=_response.text)
960
+ raise ApiError(status_code=_response.status_code, body=_response_json)
961
+
962
+ async def validate(
963
+ self,
964
+ *,
965
+ catalog: str,
966
+ host: str,
967
+ project: int,
968
+ schema: str,
969
+ volume: str,
970
+ description: typing.Optional[str] = OMIT,
971
+ last_sync: typing.Optional[dt.datetime] = OMIT,
972
+ last_sync_count: typing.Optional[int] = OMIT,
973
+ last_sync_job: typing.Optional[str] = OMIT,
974
+ meta: typing.Optional[typing.Optional[typing.Any]] = OMIT,
975
+ prefix: typing.Optional[str] = OMIT,
976
+ presign: typing.Optional[bool] = OMIT,
977
+ presign_ttl: typing.Optional[int] = OMIT,
978
+ recursive_scan: typing.Optional[bool] = OMIT,
979
+ regex_filter: typing.Optional[str] = OMIT,
980
+ request_timeout_s: typing.Optional[int] = OMIT,
981
+ status: typing.Optional[StatusC5AEnum] = OMIT,
982
+ stream_chunk_bytes: typing.Optional[int] = OMIT,
983
+ synchronizable: typing.Optional[bool] = OMIT,
984
+ title: typing.Optional[str] = OMIT,
985
+ token: typing.Optional[str] = OMIT,
986
+ traceback: typing.Optional[str] = OMIT,
987
+ use_blob_urls: typing.Optional[bool] = OMIT,
988
+ verify_tls: typing.Optional[bool] = OMIT,
989
+ request_options: typing.Optional[RequestOptions] = None,
990
+ ) -> None:
991
+ """
992
+ Validate a specific Databricks Files import storage connection.
993
+
994
+ Parameters
995
+ ----------
996
+ catalog : str
997
+ UC catalog name
998
+
999
+ host : str
1000
+ Databricks workspace base URL (https://...)
1001
+
1002
+ project : int
1003
+ A unique integer value identifying this project.
1004
+
1005
+ schema : str
1006
+ UC schema name
1007
+
1008
+ volume : str
1009
+ UC volume name
1010
+
1011
+ description : typing.Optional[str]
1012
+ Cloud storage description
1013
+
1014
+ last_sync : typing.Optional[dt.datetime]
1015
+ Last sync finished time
1016
+
1017
+ last_sync_count : typing.Optional[int]
1018
+ Count of tasks synced last time
1019
+
1020
+ last_sync_job : typing.Optional[str]
1021
+ Last sync job ID
1022
+
1023
+ meta : typing.Optional[typing.Optional[typing.Any]]
1024
+
1025
+ prefix : typing.Optional[str]
1026
+ Path under the volume
1027
+
1028
+ presign : typing.Optional[bool]
1029
+ Presign not supported; always proxied
1030
+
1031
+ presign_ttl : typing.Optional[int]
1032
+ Unused for Databricks; kept for compatibility
1033
+
1034
+ recursive_scan : typing.Optional[bool]
1035
+ Perform recursive scan
1036
+
1037
+ regex_filter : typing.Optional[str]
1038
+ Regex for filtering objects
1039
+
1040
+ request_timeout_s : typing.Optional[int]
1041
+
1042
+ status : typing.Optional[StatusC5AEnum]
1043
+
1044
+ stream_chunk_bytes : typing.Optional[int]
1045
+
1046
+ synchronizable : typing.Optional[bool]
1047
+
1048
+ title : typing.Optional[str]
1049
+ Cloud storage title
1050
+
1051
+ token : typing.Optional[str]
1052
+
1053
+ traceback : typing.Optional[str]
1054
+ Traceback report for the last failed sync
1055
+
1056
+ use_blob_urls : typing.Optional[bool]
1057
+ Generate blob URLs in tasks
1058
+
1059
+ verify_tls : typing.Optional[bool]
1060
+ Verify TLS certificates
1061
+
1062
+ request_options : typing.Optional[RequestOptions]
1063
+ Request-specific configuration.
1064
+
1065
+ Returns
1066
+ -------
1067
+ None
1068
+
1069
+ Examples
1070
+ --------
1071
+ import asyncio
1072
+
1073
+ from label_studio_sdk import AsyncLabelStudio
1074
+
1075
+ client = AsyncLabelStudio(
1076
+ api_key="YOUR_API_KEY",
1077
+ )
1078
+
1079
+
1080
+ async def main() -> None:
1081
+ await client.import_storage.databricks.validate(
1082
+ catalog="catalog",
1083
+ host="host",
1084
+ project=1,
1085
+ schema="schema",
1086
+ volume="volume",
1087
+ )
1088
+
1089
+
1090
+ asyncio.run(main())
1091
+ """
1092
+ _response = await self._client_wrapper.httpx_client.request(
1093
+ "api/storages/databricks/validate",
1094
+ method="POST",
1095
+ json={
1096
+ "catalog": catalog,
1097
+ "description": description,
1098
+ "host": host,
1099
+ "last_sync": last_sync,
1100
+ "last_sync_count": last_sync_count,
1101
+ "last_sync_job": last_sync_job,
1102
+ "meta": meta,
1103
+ "prefix": prefix,
1104
+ "presign": presign,
1105
+ "presign_ttl": presign_ttl,
1106
+ "project": project,
1107
+ "recursive_scan": recursive_scan,
1108
+ "regex_filter": regex_filter,
1109
+ "request_timeout_s": request_timeout_s,
1110
+ "schema": schema,
1111
+ "status": status,
1112
+ "stream_chunk_bytes": stream_chunk_bytes,
1113
+ "synchronizable": synchronizable,
1114
+ "title": title,
1115
+ "token": token,
1116
+ "traceback": traceback,
1117
+ "use_blob_urls": use_blob_urls,
1118
+ "verify_tls": verify_tls,
1119
+ "volume": volume,
1120
+ },
1121
+ request_options=request_options,
1122
+ omit=OMIT,
1123
+ )
1124
+ try:
1125
+ if 200 <= _response.status_code < 300:
1126
+ return
1127
+ _response_json = _response.json()
1128
+ except JSONDecodeError:
1129
+ raise ApiError(status_code=_response.status_code, body=_response.text)
1130
+ raise ApiError(status_code=_response.status_code, body=_response_json)
1131
+
1132
+ async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> DatabricksImportStorage:
1133
+ """
1134
+ Get a specific Databricks Files import storage connection.
1135
+
1136
+ Parameters
1137
+ ----------
1138
+ id : int
1139
+
1140
+ request_options : typing.Optional[RequestOptions]
1141
+ Request-specific configuration.
1142
+
1143
+ Returns
1144
+ -------
1145
+ DatabricksImportStorage
1146
+
1147
+
1148
+ Examples
1149
+ --------
1150
+ import asyncio
1151
+
1152
+ from label_studio_sdk import AsyncLabelStudio
1153
+
1154
+ client = AsyncLabelStudio(
1155
+ api_key="YOUR_API_KEY",
1156
+ )
1157
+
1158
+
1159
+ async def main() -> None:
1160
+ await client.import_storage.databricks.get(
1161
+ id=1,
1162
+ )
1163
+
1164
+
1165
+ asyncio.run(main())
1166
+ """
1167
+ _response = await self._client_wrapper.httpx_client.request(
1168
+ f"api/storages/databricks/{jsonable_encoder(id)}",
1169
+ method="GET",
1170
+ request_options=request_options,
1171
+ )
1172
+ try:
1173
+ if 200 <= _response.status_code < 300:
1174
+ return typing.cast(
1175
+ DatabricksImportStorage,
1176
+ construct_type(
1177
+ type_=DatabricksImportStorage, # type: ignore
1178
+ object_=_response.json(),
1179
+ ),
1180
+ )
1181
+ _response_json = _response.json()
1182
+ except JSONDecodeError:
1183
+ raise ApiError(status_code=_response.status_code, body=_response.text)
1184
+ raise ApiError(status_code=_response.status_code, body=_response_json)
1185
+
1186
+ async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None:
1187
+ """
1188
+ Delete a specific Databricks Files import storage connection.
1189
+
1190
+ Parameters
1191
+ ----------
1192
+ id : int
1193
+
1194
+ request_options : typing.Optional[RequestOptions]
1195
+ Request-specific configuration.
1196
+
1197
+ Returns
1198
+ -------
1199
+ None
1200
+
1201
+ Examples
1202
+ --------
1203
+ import asyncio
1204
+
1205
+ from label_studio_sdk import AsyncLabelStudio
1206
+
1207
+ client = AsyncLabelStudio(
1208
+ api_key="YOUR_API_KEY",
1209
+ )
1210
+
1211
+
1212
+ async def main() -> None:
1213
+ await client.import_storage.databricks.delete(
1214
+ id=1,
1215
+ )
1216
+
1217
+
1218
+ asyncio.run(main())
1219
+ """
1220
+ _response = await self._client_wrapper.httpx_client.request(
1221
+ f"api/storages/databricks/{jsonable_encoder(id)}",
1222
+ method="DELETE",
1223
+ request_options=request_options,
1224
+ )
1225
+ try:
1226
+ if 200 <= _response.status_code < 300:
1227
+ return
1228
+ _response_json = _response.json()
1229
+ except JSONDecodeError:
1230
+ raise ApiError(status_code=_response.status_code, body=_response.text)
1231
+ raise ApiError(status_code=_response.status_code, body=_response_json)
1232
+
1233
+ async def update(
1234
+ self,
1235
+ id: int,
1236
+ *,
1237
+ catalog: typing.Optional[str] = OMIT,
1238
+ description: typing.Optional[str] = OMIT,
1239
+ host: typing.Optional[str] = OMIT,
1240
+ last_sync: typing.Optional[dt.datetime] = OMIT,
1241
+ last_sync_count: typing.Optional[int] = OMIT,
1242
+ last_sync_job: typing.Optional[str] = OMIT,
1243
+ meta: typing.Optional[typing.Optional[typing.Any]] = OMIT,
1244
+ prefix: typing.Optional[str] = OMIT,
1245
+ presign: typing.Optional[bool] = OMIT,
1246
+ presign_ttl: typing.Optional[int] = OMIT,
1247
+ project: typing.Optional[int] = OMIT,
1248
+ recursive_scan: typing.Optional[bool] = OMIT,
1249
+ regex_filter: typing.Optional[str] = OMIT,
1250
+ request_timeout_s: typing.Optional[int] = OMIT,
1251
+ schema: typing.Optional[str] = OMIT,
1252
+ status: typing.Optional[StatusC5AEnum] = OMIT,
1253
+ stream_chunk_bytes: typing.Optional[int] = OMIT,
1254
+ synchronizable: typing.Optional[bool] = OMIT,
1255
+ title: typing.Optional[str] = OMIT,
1256
+ token: typing.Optional[str] = OMIT,
1257
+ traceback: typing.Optional[str] = OMIT,
1258
+ use_blob_urls: typing.Optional[bool] = OMIT,
1259
+ verify_tls: typing.Optional[bool] = OMIT,
1260
+ volume: typing.Optional[str] = OMIT,
1261
+ request_options: typing.Optional[RequestOptions] = None,
1262
+ ) -> DatabricksImportStorage:
1263
+ """
1264
+ Update a specific Databricks Files import storage connection.
1265
+
1266
+ Parameters
1267
+ ----------
1268
+ id : int
1269
+
1270
+ catalog : typing.Optional[str]
1271
+ UC catalog name
1272
+
1273
+ description : typing.Optional[str]
1274
+ Cloud storage description
1275
+
1276
+ host : typing.Optional[str]
1277
+ Databricks workspace base URL (https://...)
1278
+
1279
+ last_sync : typing.Optional[dt.datetime]
1280
+ Last sync finished time
1281
+
1282
+ last_sync_count : typing.Optional[int]
1283
+ Count of tasks synced last time
1284
+
1285
+ last_sync_job : typing.Optional[str]
1286
+ Last sync job ID
1287
+
1288
+ meta : typing.Optional[typing.Optional[typing.Any]]
1289
+
1290
+ prefix : typing.Optional[str]
1291
+ Path under the volume
1292
+
1293
+ presign : typing.Optional[bool]
1294
+ Presign not supported; always proxied
1295
+
1296
+ presign_ttl : typing.Optional[int]
1297
+ Unused for Databricks; kept for compatibility
1298
+
1299
+ project : typing.Optional[int]
1300
+ A unique integer value identifying this project.
1301
+
1302
+ recursive_scan : typing.Optional[bool]
1303
+ Perform recursive scan
1304
+
1305
+ regex_filter : typing.Optional[str]
1306
+ Regex for filtering objects
1307
+
1308
+ request_timeout_s : typing.Optional[int]
1309
+
1310
+ schema : typing.Optional[str]
1311
+ UC schema name
1312
+
1313
+ status : typing.Optional[StatusC5AEnum]
1314
+
1315
+ stream_chunk_bytes : typing.Optional[int]
1316
+
1317
+ synchronizable : typing.Optional[bool]
1318
+
1319
+ title : typing.Optional[str]
1320
+ Cloud storage title
1321
+
1322
+ token : typing.Optional[str]
1323
+
1324
+ traceback : typing.Optional[str]
1325
+ Traceback report for the last failed sync
1326
+
1327
+ use_blob_urls : typing.Optional[bool]
1328
+ Generate blob URLs in tasks
1329
+
1330
+ verify_tls : typing.Optional[bool]
1331
+ Verify TLS certificates
1332
+
1333
+ volume : typing.Optional[str]
1334
+ UC volume name
1335
+
1336
+ request_options : typing.Optional[RequestOptions]
1337
+ Request-specific configuration.
1338
+
1339
+ Returns
1340
+ -------
1341
+ DatabricksImportStorage
1342
+
1343
+
1344
+ Examples
1345
+ --------
1346
+ import asyncio
1347
+
1348
+ from label_studio_sdk import AsyncLabelStudio
1349
+
1350
+ client = AsyncLabelStudio(
1351
+ api_key="YOUR_API_KEY",
1352
+ )
1353
+
1354
+
1355
+ async def main() -> None:
1356
+ await client.import_storage.databricks.update(
1357
+ id=1,
1358
+ )
1359
+
1360
+
1361
+ asyncio.run(main())
1362
+ """
1363
+ _response = await self._client_wrapper.httpx_client.request(
1364
+ f"api/storages/databricks/{jsonable_encoder(id)}",
1365
+ method="PATCH",
1366
+ json={
1367
+ "catalog": catalog,
1368
+ "description": description,
1369
+ "host": host,
1370
+ "last_sync": last_sync,
1371
+ "last_sync_count": last_sync_count,
1372
+ "last_sync_job": last_sync_job,
1373
+ "meta": meta,
1374
+ "prefix": prefix,
1375
+ "presign": presign,
1376
+ "presign_ttl": presign_ttl,
1377
+ "project": project,
1378
+ "recursive_scan": recursive_scan,
1379
+ "regex_filter": regex_filter,
1380
+ "request_timeout_s": request_timeout_s,
1381
+ "schema": schema,
1382
+ "status": status,
1383
+ "stream_chunk_bytes": stream_chunk_bytes,
1384
+ "synchronizable": synchronizable,
1385
+ "title": title,
1386
+ "token": token,
1387
+ "traceback": traceback,
1388
+ "use_blob_urls": use_blob_urls,
1389
+ "verify_tls": verify_tls,
1390
+ "volume": volume,
1391
+ },
1392
+ headers={
1393
+ "content-type": "application/json",
1394
+ },
1395
+ request_options=request_options,
1396
+ omit=OMIT,
1397
+ )
1398
+ try:
1399
+ if 200 <= _response.status_code < 300:
1400
+ return typing.cast(
1401
+ DatabricksImportStorage,
1402
+ construct_type(
1403
+ type_=DatabricksImportStorage, # type: ignore
1404
+ object_=_response.json(),
1405
+ ),
1406
+ )
1407
+ _response_json = _response.json()
1408
+ except JSONDecodeError:
1409
+ raise ApiError(status_code=_response.status_code, body=_response.text)
1410
+ raise ApiError(status_code=_response.status_code, body=_response_json)
1411
+
1412
+ async def sync(
1413
+ self, id: int, *, request_options: typing.Optional[RequestOptions] = None
1414
+ ) -> DatabricksImportStorage:
1415
+ """
1416
+ Sync tasks from a Databricks Files import storage.
1417
+
1418
+ Parameters
1419
+ ----------
1420
+ id : int
1421
+
1422
+ request_options : typing.Optional[RequestOptions]
1423
+ Request-specific configuration.
1424
+
1425
+ Returns
1426
+ -------
1427
+ DatabricksImportStorage
1428
+
1429
+
1430
+ Examples
1431
+ --------
1432
+ import asyncio
1433
+
1434
+ from label_studio_sdk import AsyncLabelStudio
1435
+
1436
+ client = AsyncLabelStudio(
1437
+ api_key="YOUR_API_KEY",
1438
+ )
1439
+
1440
+
1441
+ async def main() -> None:
1442
+ await client.import_storage.databricks.sync(
1443
+ id=1,
1444
+ )
1445
+
1446
+
1447
+ asyncio.run(main())
1448
+ """
1449
+ _response = await self._client_wrapper.httpx_client.request(
1450
+ f"api/storages/databricks/{jsonable_encoder(id)}/sync",
1451
+ method="POST",
1452
+ request_options=request_options,
1453
+ )
1454
+ try:
1455
+ if 200 <= _response.status_code < 300:
1456
+ return typing.cast(
1457
+ DatabricksImportStorage,
1458
+ construct_type(
1459
+ type_=DatabricksImportStorage, # type: ignore
1460
+ object_=_response.json(),
1461
+ ),
1462
+ )
1463
+ _response_json = _response.json()
1464
+ except JSONDecodeError:
1465
+ raise ApiError(status_code=_response.status_code, body=_response.text)
1466
+ raise ApiError(status_code=_response.status_code, body=_response_json)