label-studio-sdk 1.0.2__py3-none-any.whl → 1.0.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of label-studio-sdk might be problematic. Click here for more details.
- label_studio_sdk/__init__.py +20 -1
- label_studio_sdk/actions/client.py +8 -8
- label_studio_sdk/annotations/client.py +24 -24
- label_studio_sdk/base_client.py +3 -0
- label_studio_sdk/core/client_wrapper.py +1 -1
- label_studio_sdk/core/http_client.py +36 -8
- label_studio_sdk/core/request_options.py +2 -2
- label_studio_sdk/export_storage/__init__.py +2 -1
- label_studio_sdk/export_storage/azure/client.py +28 -28
- label_studio_sdk/export_storage/client.py +7 -4
- label_studio_sdk/export_storage/gcs/client.py +28 -28
- label_studio_sdk/export_storage/local/client.py +28 -28
- label_studio_sdk/export_storage/redis/client.py +28 -28
- label_studio_sdk/export_storage/s3/client.py +28 -28
- label_studio_sdk/export_storage/s3s/__init__.py +2 -0
- label_studio_sdk/export_storage/s3s/client.py +836 -0
- label_studio_sdk/files/client.py +24 -24
- label_studio_sdk/import_storage/__init__.py +2 -1
- label_studio_sdk/import_storage/azure/client.py +28 -28
- label_studio_sdk/import_storage/client.py +7 -4
- label_studio_sdk/import_storage/gcs/client.py +28 -28
- label_studio_sdk/import_storage/local/client.py +28 -28
- label_studio_sdk/import_storage/redis/client.py +28 -28
- label_studio_sdk/import_storage/s3/client.py +28 -28
- label_studio_sdk/import_storage/s3s/__init__.py +2 -0
- label_studio_sdk/import_storage/s3s/client.py +1054 -0
- label_studio_sdk/label_interface/base.py +2 -2
- label_studio_sdk/label_interface/control_tags.py +32 -18
- label_studio_sdk/label_interface/create.py +241 -0
- label_studio_sdk/label_interface/interface.py +68 -0
- label_studio_sdk/label_interface/object_tags.py +26 -10
- label_studio_sdk/label_interface/objects.py +5 -5
- label_studio_sdk/ml/client.py +36 -36
- label_studio_sdk/predictions/client.py +24 -24
- label_studio_sdk/projects/__init__.py +8 -2
- label_studio_sdk/projects/client.py +232 -69
- label_studio_sdk/projects/client_ext.py +16 -1
- label_studio_sdk/projects/exports/client.py +38 -38
- label_studio_sdk/projects/types/__init__.py +2 -1
- label_studio_sdk/projects/types/projects_update_response.py +96 -0
- label_studio_sdk/tasks/client.py +70 -60
- label_studio_sdk/tasks/client_ext.py +4 -0
- label_studio_sdk/types/__init__.py +16 -0
- label_studio_sdk/types/base_task.py +4 -2
- label_studio_sdk/types/base_task_file_upload.py +5 -0
- label_studio_sdk/types/base_task_updated_by.py +5 -0
- label_studio_sdk/types/data_manager_task_serializer.py +3 -2
- label_studio_sdk/types/data_manager_task_serializer_annotators_item.py +5 -0
- label_studio_sdk/types/s3s_export_storage.py +80 -0
- label_studio_sdk/types/s3s_import_storage.py +129 -0
- label_studio_sdk/types/s3s_import_storage_status.py +7 -0
- label_studio_sdk/types/task.py +3 -2
- label_studio_sdk/types/task_annotators_item.py +5 -0
- label_studio_sdk/types/workspace.py +77 -0
- label_studio_sdk/users/client.py +32 -32
- label_studio_sdk/views/client.py +24 -24
- label_studio_sdk/webhooks/client.py +24 -24
- label_studio_sdk/workspaces/__init__.py +6 -0
- label_studio_sdk/workspaces/client.py +569 -0
- label_studio_sdk/workspaces/members/__init__.py +5 -0
- label_studio_sdk/workspaces/members/client.py +297 -0
- label_studio_sdk/workspaces/members/types/__init__.py +6 -0
- label_studio_sdk/workspaces/members/types/members_create_response.py +32 -0
- label_studio_sdk/workspaces/members/types/members_list_response_item.py +32 -0
- {label_studio_sdk-1.0.2.dist-info → label_studio_sdk-1.0.4.dist-info}/METADATA +11 -12
- {label_studio_sdk-1.0.2.dist-info → label_studio_sdk-1.0.4.dist-info}/RECORD +67 -46
- {label_studio_sdk-1.0.2.dist-info → label_studio_sdk-1.0.4.dist-info}/WHEEL +0 -0
|
@@ -0,0 +1,1054 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
import typing
|
|
4
|
+
from json.decoder import JSONDecodeError
|
|
5
|
+
|
|
6
|
+
from ...core.api_error import ApiError
|
|
7
|
+
from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper
|
|
8
|
+
from ...core.jsonable_encoder import jsonable_encoder
|
|
9
|
+
from ...core.pydantic_utilities import pydantic_v1
|
|
10
|
+
from ...core.request_options import RequestOptions
|
|
11
|
+
from ...types.s3s_import_storage import S3SImportStorage
|
|
12
|
+
|
|
13
|
+
# this is used as the default value for optional parameters
|
|
14
|
+
OMIT = typing.cast(typing.Any, ...)
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class S3SClient:
|
|
18
|
+
def __init__(self, *, client_wrapper: SyncClientWrapper):
|
|
19
|
+
self._client_wrapper = client_wrapper
|
|
20
|
+
|
|
21
|
+
def list(
|
|
22
|
+
self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None
|
|
23
|
+
) -> typing.List[S3SImportStorage]:
|
|
24
|
+
"""
|
|
25
|
+
You can connect your S3 bucket to Label Studio as a source storage or target storage. Use this API request to get a list of all Google import (source) storage connections for a specific project.
|
|
26
|
+
|
|
27
|
+
The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list).
|
|
28
|
+
|
|
29
|
+
For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage).
|
|
30
|
+
|
|
31
|
+
Parameters
|
|
32
|
+
----------
|
|
33
|
+
project : typing.Optional[int]
|
|
34
|
+
Project ID
|
|
35
|
+
|
|
36
|
+
request_options : typing.Optional[RequestOptions]
|
|
37
|
+
Request-specific configuration.
|
|
38
|
+
|
|
39
|
+
Returns
|
|
40
|
+
-------
|
|
41
|
+
typing.List[S3SImportStorage]
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
Examples
|
|
45
|
+
--------
|
|
46
|
+
from label_studio_sdk.client import LabelStudio
|
|
47
|
+
|
|
48
|
+
client = LabelStudio(
|
|
49
|
+
api_key="YOUR_API_KEY",
|
|
50
|
+
)
|
|
51
|
+
client.import_storage.s3s.list()
|
|
52
|
+
"""
|
|
53
|
+
_response = self._client_wrapper.httpx_client.request(
|
|
54
|
+
"api/storages/s3s", method="GET", params={"project": project}, request_options=request_options
|
|
55
|
+
)
|
|
56
|
+
try:
|
|
57
|
+
if 200 <= _response.status_code < 300:
|
|
58
|
+
return pydantic_v1.parse_obj_as(typing.List[S3SImportStorage], _response.json()) # type: ignore
|
|
59
|
+
_response_json = _response.json()
|
|
60
|
+
except JSONDecodeError:
|
|
61
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
62
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
63
|
+
|
|
64
|
+
def create(
|
|
65
|
+
self,
|
|
66
|
+
*,
|
|
67
|
+
regex_filter: typing.Optional[str] = OMIT,
|
|
68
|
+
use_blob_urls: typing.Optional[bool] = OMIT,
|
|
69
|
+
presign: typing.Optional[bool] = OMIT,
|
|
70
|
+
presign_ttl: typing.Optional[int] = OMIT,
|
|
71
|
+
recursive_scan: typing.Optional[bool] = OMIT,
|
|
72
|
+
title: typing.Optional[str] = OMIT,
|
|
73
|
+
description: typing.Optional[str] = OMIT,
|
|
74
|
+
project: typing.Optional[int] = OMIT,
|
|
75
|
+
bucket: typing.Optional[str] = OMIT,
|
|
76
|
+
prefix: typing.Optional[str] = OMIT,
|
|
77
|
+
external_id: typing.Optional[str] = OMIT,
|
|
78
|
+
role_arn: typing.Optional[str] = OMIT,
|
|
79
|
+
region_name: typing.Optional[str] = OMIT,
|
|
80
|
+
s3endpoint: typing.Optional[str] = OMIT,
|
|
81
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
82
|
+
) -> S3SImportStorage:
|
|
83
|
+
"""
|
|
84
|
+
Create a new source storage connection to a S3 bucket.
|
|
85
|
+
|
|
86
|
+
For information about the required fields and prerequisites, see [Amazon S3](https://labelstud.io/guide/storage#Amazon-S3) in the Label Studio documentation.
|
|
87
|
+
|
|
88
|
+
<Info>Ensure you configure CORS before adding cloud storage. This ensures you will be able to see the content of the data rather than just a link.</Info>
|
|
89
|
+
|
|
90
|
+
<Tip>After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be imported until you [sync your connection](sync).</Tip>
|
|
91
|
+
|
|
92
|
+
Parameters
|
|
93
|
+
----------
|
|
94
|
+
regex_filter : typing.Optional[str]
|
|
95
|
+
Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported.
|
|
96
|
+
|
|
97
|
+
use_blob_urls : typing.Optional[bool]
|
|
98
|
+
Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio.
|
|
99
|
+
|
|
100
|
+
presign : typing.Optional[bool]
|
|
101
|
+
Presign URLs for download
|
|
102
|
+
|
|
103
|
+
presign_ttl : typing.Optional[int]
|
|
104
|
+
Presign TTL in minutes
|
|
105
|
+
|
|
106
|
+
recursive_scan : typing.Optional[bool]
|
|
107
|
+
Scan recursively
|
|
108
|
+
|
|
109
|
+
title : typing.Optional[str]
|
|
110
|
+
Storage title
|
|
111
|
+
|
|
112
|
+
description : typing.Optional[str]
|
|
113
|
+
Storage description
|
|
114
|
+
|
|
115
|
+
project : typing.Optional[int]
|
|
116
|
+
Project ID
|
|
117
|
+
|
|
118
|
+
bucket : typing.Optional[str]
|
|
119
|
+
S3 bucket name
|
|
120
|
+
|
|
121
|
+
prefix : typing.Optional[str]
|
|
122
|
+
S3 bucket prefix
|
|
123
|
+
|
|
124
|
+
external_id : typing.Optional[str]
|
|
125
|
+
AWS External ID
|
|
126
|
+
|
|
127
|
+
role_arn : typing.Optional[str]
|
|
128
|
+
AWS Role ARN
|
|
129
|
+
|
|
130
|
+
region_name : typing.Optional[str]
|
|
131
|
+
AWS Region
|
|
132
|
+
|
|
133
|
+
s3endpoint : typing.Optional[str]
|
|
134
|
+
S3 Endpoint
|
|
135
|
+
|
|
136
|
+
request_options : typing.Optional[RequestOptions]
|
|
137
|
+
Request-specific configuration.
|
|
138
|
+
|
|
139
|
+
Returns
|
|
140
|
+
-------
|
|
141
|
+
S3SImportStorage
|
|
142
|
+
|
|
143
|
+
|
|
144
|
+
Examples
|
|
145
|
+
--------
|
|
146
|
+
from label_studio_sdk.client import LabelStudio
|
|
147
|
+
|
|
148
|
+
client = LabelStudio(
|
|
149
|
+
api_key="YOUR_API_KEY",
|
|
150
|
+
)
|
|
151
|
+
client.import_storage.s3s.create()
|
|
152
|
+
"""
|
|
153
|
+
_response = self._client_wrapper.httpx_client.request(
|
|
154
|
+
"api/storages/s3s",
|
|
155
|
+
method="POST",
|
|
156
|
+
json={
|
|
157
|
+
"regex_filter": regex_filter,
|
|
158
|
+
"use_blob_urls": use_blob_urls,
|
|
159
|
+
"presign": presign,
|
|
160
|
+
"presign_ttl": presign_ttl,
|
|
161
|
+
"recursive_scan": recursive_scan,
|
|
162
|
+
"title": title,
|
|
163
|
+
"description": description,
|
|
164
|
+
"project": project,
|
|
165
|
+
"bucket": bucket,
|
|
166
|
+
"prefix": prefix,
|
|
167
|
+
"external_id": external_id,
|
|
168
|
+
"role_arn": role_arn,
|
|
169
|
+
"region_name": region_name,
|
|
170
|
+
"s3_endpoint": s3endpoint,
|
|
171
|
+
},
|
|
172
|
+
request_options=request_options,
|
|
173
|
+
omit=OMIT,
|
|
174
|
+
)
|
|
175
|
+
try:
|
|
176
|
+
if 200 <= _response.status_code < 300:
|
|
177
|
+
return pydantic_v1.parse_obj_as(S3SImportStorage, _response.json()) # type: ignore
|
|
178
|
+
_response_json = _response.json()
|
|
179
|
+
except JSONDecodeError:
|
|
180
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
181
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
182
|
+
|
|
183
|
+
def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> S3SImportStorage:
|
|
184
|
+
"""
|
|
185
|
+
Get a specific S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list).
|
|
186
|
+
|
|
187
|
+
Parameters
|
|
188
|
+
----------
|
|
189
|
+
id : int
|
|
190
|
+
Import storage ID
|
|
191
|
+
|
|
192
|
+
request_options : typing.Optional[RequestOptions]
|
|
193
|
+
Request-specific configuration.
|
|
194
|
+
|
|
195
|
+
Returns
|
|
196
|
+
-------
|
|
197
|
+
S3SImportStorage
|
|
198
|
+
|
|
199
|
+
|
|
200
|
+
Examples
|
|
201
|
+
--------
|
|
202
|
+
from label_studio_sdk.client import LabelStudio
|
|
203
|
+
|
|
204
|
+
client = LabelStudio(
|
|
205
|
+
api_key="YOUR_API_KEY",
|
|
206
|
+
)
|
|
207
|
+
client.import_storage.s3s.get(
|
|
208
|
+
id=1,
|
|
209
|
+
)
|
|
210
|
+
"""
|
|
211
|
+
_response = self._client_wrapper.httpx_client.request(
|
|
212
|
+
f"api/storages/s3s/{jsonable_encoder(id)}", method="GET", request_options=request_options
|
|
213
|
+
)
|
|
214
|
+
try:
|
|
215
|
+
if 200 <= _response.status_code < 300:
|
|
216
|
+
return pydantic_v1.parse_obj_as(S3SImportStorage, _response.json()) # type: ignore
|
|
217
|
+
_response_json = _response.json()
|
|
218
|
+
except JSONDecodeError:
|
|
219
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
220
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
221
|
+
|
|
222
|
+
def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None:
|
|
223
|
+
"""
|
|
224
|
+
Delete a specific S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list).
|
|
225
|
+
|
|
226
|
+
Deleting a source storage connection does not affect tasks with synced data in Label Studio. The sync process is designed to import new or updated tasks from the connected storage into the project, but it does not track deletions of files from the storage. Therefore, if you remove the external storage connection, the tasks that were created from that storage will remain in the project.
|
|
227
|
+
|
|
228
|
+
If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API.
|
|
229
|
+
|
|
230
|
+
Parameters
|
|
231
|
+
----------
|
|
232
|
+
id : int
|
|
233
|
+
Import storage ID
|
|
234
|
+
|
|
235
|
+
request_options : typing.Optional[RequestOptions]
|
|
236
|
+
Request-specific configuration.
|
|
237
|
+
|
|
238
|
+
Returns
|
|
239
|
+
-------
|
|
240
|
+
None
|
|
241
|
+
|
|
242
|
+
Examples
|
|
243
|
+
--------
|
|
244
|
+
from label_studio_sdk.client import LabelStudio
|
|
245
|
+
|
|
246
|
+
client = LabelStudio(
|
|
247
|
+
api_key="YOUR_API_KEY",
|
|
248
|
+
)
|
|
249
|
+
client.import_storage.s3s.delete(
|
|
250
|
+
id=1,
|
|
251
|
+
)
|
|
252
|
+
"""
|
|
253
|
+
_response = self._client_wrapper.httpx_client.request(
|
|
254
|
+
f"api/storages/s3s/{jsonable_encoder(id)}", method="DELETE", request_options=request_options
|
|
255
|
+
)
|
|
256
|
+
try:
|
|
257
|
+
if 200 <= _response.status_code < 300:
|
|
258
|
+
return
|
|
259
|
+
_response_json = _response.json()
|
|
260
|
+
except JSONDecodeError:
|
|
261
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
262
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
263
|
+
|
|
264
|
+
def update(
|
|
265
|
+
self,
|
|
266
|
+
id: int,
|
|
267
|
+
*,
|
|
268
|
+
regex_filter: typing.Optional[str] = OMIT,
|
|
269
|
+
use_blob_urls: typing.Optional[bool] = OMIT,
|
|
270
|
+
presign: typing.Optional[bool] = OMIT,
|
|
271
|
+
presign_ttl: typing.Optional[int] = OMIT,
|
|
272
|
+
recursive_scan: typing.Optional[bool] = OMIT,
|
|
273
|
+
title: typing.Optional[str] = OMIT,
|
|
274
|
+
description: typing.Optional[str] = OMIT,
|
|
275
|
+
project: typing.Optional[int] = OMIT,
|
|
276
|
+
bucket: typing.Optional[str] = OMIT,
|
|
277
|
+
prefix: typing.Optional[str] = OMIT,
|
|
278
|
+
external_id: typing.Optional[str] = OMIT,
|
|
279
|
+
role_arn: typing.Optional[str] = OMIT,
|
|
280
|
+
region_name: typing.Optional[str] = OMIT,
|
|
281
|
+
s3endpoint: typing.Optional[str] = OMIT,
|
|
282
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
283
|
+
) -> S3SImportStorage:
|
|
284
|
+
"""
|
|
285
|
+
Update a specific S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list).
|
|
286
|
+
|
|
287
|
+
For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage).
|
|
288
|
+
|
|
289
|
+
Parameters
|
|
290
|
+
----------
|
|
291
|
+
id : int
|
|
292
|
+
Import storage ID
|
|
293
|
+
|
|
294
|
+
regex_filter : typing.Optional[str]
|
|
295
|
+
Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported.
|
|
296
|
+
|
|
297
|
+
use_blob_urls : typing.Optional[bool]
|
|
298
|
+
Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio.
|
|
299
|
+
|
|
300
|
+
presign : typing.Optional[bool]
|
|
301
|
+
Presign URLs for download
|
|
302
|
+
|
|
303
|
+
presign_ttl : typing.Optional[int]
|
|
304
|
+
Presign TTL in minutes
|
|
305
|
+
|
|
306
|
+
recursive_scan : typing.Optional[bool]
|
|
307
|
+
Scan recursively
|
|
308
|
+
|
|
309
|
+
title : typing.Optional[str]
|
|
310
|
+
Storage title
|
|
311
|
+
|
|
312
|
+
description : typing.Optional[str]
|
|
313
|
+
Storage description
|
|
314
|
+
|
|
315
|
+
project : typing.Optional[int]
|
|
316
|
+
Project ID
|
|
317
|
+
|
|
318
|
+
bucket : typing.Optional[str]
|
|
319
|
+
S3 bucket name
|
|
320
|
+
|
|
321
|
+
prefix : typing.Optional[str]
|
|
322
|
+
S3 bucket prefix
|
|
323
|
+
|
|
324
|
+
external_id : typing.Optional[str]
|
|
325
|
+
AWS External ID
|
|
326
|
+
|
|
327
|
+
role_arn : typing.Optional[str]
|
|
328
|
+
AWS Role ARN
|
|
329
|
+
|
|
330
|
+
region_name : typing.Optional[str]
|
|
331
|
+
AWS Region
|
|
332
|
+
|
|
333
|
+
s3endpoint : typing.Optional[str]
|
|
334
|
+
S3 Endpoint
|
|
335
|
+
|
|
336
|
+
request_options : typing.Optional[RequestOptions]
|
|
337
|
+
Request-specific configuration.
|
|
338
|
+
|
|
339
|
+
Returns
|
|
340
|
+
-------
|
|
341
|
+
S3SImportStorage
|
|
342
|
+
|
|
343
|
+
|
|
344
|
+
Examples
|
|
345
|
+
--------
|
|
346
|
+
from label_studio_sdk.client import LabelStudio
|
|
347
|
+
|
|
348
|
+
client = LabelStudio(
|
|
349
|
+
api_key="YOUR_API_KEY",
|
|
350
|
+
)
|
|
351
|
+
client.import_storage.s3s.update(
|
|
352
|
+
id=1,
|
|
353
|
+
)
|
|
354
|
+
"""
|
|
355
|
+
_response = self._client_wrapper.httpx_client.request(
|
|
356
|
+
f"api/storages/s3s/{jsonable_encoder(id)}",
|
|
357
|
+
method="PATCH",
|
|
358
|
+
json={
|
|
359
|
+
"regex_filter": regex_filter,
|
|
360
|
+
"use_blob_urls": use_blob_urls,
|
|
361
|
+
"presign": presign,
|
|
362
|
+
"presign_ttl": presign_ttl,
|
|
363
|
+
"recursive_scan": recursive_scan,
|
|
364
|
+
"title": title,
|
|
365
|
+
"description": description,
|
|
366
|
+
"project": project,
|
|
367
|
+
"bucket": bucket,
|
|
368
|
+
"prefix": prefix,
|
|
369
|
+
"external_id": external_id,
|
|
370
|
+
"role_arn": role_arn,
|
|
371
|
+
"region_name": region_name,
|
|
372
|
+
"s3_endpoint": s3endpoint,
|
|
373
|
+
},
|
|
374
|
+
request_options=request_options,
|
|
375
|
+
omit=OMIT,
|
|
376
|
+
)
|
|
377
|
+
try:
|
|
378
|
+
if 200 <= _response.status_code < 300:
|
|
379
|
+
return pydantic_v1.parse_obj_as(S3SImportStorage, _response.json()) # type: ignore
|
|
380
|
+
_response_json = _response.json()
|
|
381
|
+
except JSONDecodeError:
|
|
382
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
383
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
384
|
+
|
|
385
|
+
def validate(
|
|
386
|
+
self,
|
|
387
|
+
*,
|
|
388
|
+
regex_filter: typing.Optional[str] = OMIT,
|
|
389
|
+
use_blob_urls: typing.Optional[bool] = OMIT,
|
|
390
|
+
presign: typing.Optional[bool] = OMIT,
|
|
391
|
+
presign_ttl: typing.Optional[int] = OMIT,
|
|
392
|
+
recursive_scan: typing.Optional[bool] = OMIT,
|
|
393
|
+
title: typing.Optional[str] = OMIT,
|
|
394
|
+
description: typing.Optional[str] = OMIT,
|
|
395
|
+
project: typing.Optional[int] = OMIT,
|
|
396
|
+
bucket: typing.Optional[str] = OMIT,
|
|
397
|
+
prefix: typing.Optional[str] = OMIT,
|
|
398
|
+
external_id: typing.Optional[str] = OMIT,
|
|
399
|
+
role_arn: typing.Optional[str] = OMIT,
|
|
400
|
+
region_name: typing.Optional[str] = OMIT,
|
|
401
|
+
s3endpoint: typing.Optional[str] = OMIT,
|
|
402
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
403
|
+
) -> None:
|
|
404
|
+
"""
|
|
405
|
+
Validate a specific S3 import storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to import data.
|
|
406
|
+
|
|
407
|
+
Parameters
|
|
408
|
+
----------
|
|
409
|
+
regex_filter : typing.Optional[str]
|
|
410
|
+
Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported.
|
|
411
|
+
|
|
412
|
+
use_blob_urls : typing.Optional[bool]
|
|
413
|
+
Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio.
|
|
414
|
+
|
|
415
|
+
presign : typing.Optional[bool]
|
|
416
|
+
Presign URLs for download
|
|
417
|
+
|
|
418
|
+
presign_ttl : typing.Optional[int]
|
|
419
|
+
Presign TTL in minutes
|
|
420
|
+
|
|
421
|
+
recursive_scan : typing.Optional[bool]
|
|
422
|
+
Scan recursively
|
|
423
|
+
|
|
424
|
+
title : typing.Optional[str]
|
|
425
|
+
Storage title
|
|
426
|
+
|
|
427
|
+
description : typing.Optional[str]
|
|
428
|
+
Storage description
|
|
429
|
+
|
|
430
|
+
project : typing.Optional[int]
|
|
431
|
+
Project ID
|
|
432
|
+
|
|
433
|
+
bucket : typing.Optional[str]
|
|
434
|
+
S3 bucket name
|
|
435
|
+
|
|
436
|
+
prefix : typing.Optional[str]
|
|
437
|
+
S3 bucket prefix
|
|
438
|
+
|
|
439
|
+
external_id : typing.Optional[str]
|
|
440
|
+
AWS External ID
|
|
441
|
+
|
|
442
|
+
role_arn : typing.Optional[str]
|
|
443
|
+
AWS Role ARN
|
|
444
|
+
|
|
445
|
+
region_name : typing.Optional[str]
|
|
446
|
+
AWS Region
|
|
447
|
+
|
|
448
|
+
s3endpoint : typing.Optional[str]
|
|
449
|
+
S3 Endpoint
|
|
450
|
+
|
|
451
|
+
request_options : typing.Optional[RequestOptions]
|
|
452
|
+
Request-specific configuration.
|
|
453
|
+
|
|
454
|
+
Returns
|
|
455
|
+
-------
|
|
456
|
+
None
|
|
457
|
+
|
|
458
|
+
Examples
|
|
459
|
+
--------
|
|
460
|
+
from label_studio_sdk.client import LabelStudio
|
|
461
|
+
|
|
462
|
+
client = LabelStudio(
|
|
463
|
+
api_key="YOUR_API_KEY",
|
|
464
|
+
)
|
|
465
|
+
client.import_storage.s3s.validate()
|
|
466
|
+
"""
|
|
467
|
+
_response = self._client_wrapper.httpx_client.request(
|
|
468
|
+
"api/storages/s3s/validate",
|
|
469
|
+
method="POST",
|
|
470
|
+
json={
|
|
471
|
+
"regex_filter": regex_filter,
|
|
472
|
+
"use_blob_urls": use_blob_urls,
|
|
473
|
+
"presign": presign,
|
|
474
|
+
"presign_ttl": presign_ttl,
|
|
475
|
+
"recursive_scan": recursive_scan,
|
|
476
|
+
"title": title,
|
|
477
|
+
"description": description,
|
|
478
|
+
"project": project,
|
|
479
|
+
"bucket": bucket,
|
|
480
|
+
"prefix": prefix,
|
|
481
|
+
"external_id": external_id,
|
|
482
|
+
"role_arn": role_arn,
|
|
483
|
+
"region_name": region_name,
|
|
484
|
+
"s3_endpoint": s3endpoint,
|
|
485
|
+
},
|
|
486
|
+
request_options=request_options,
|
|
487
|
+
omit=OMIT,
|
|
488
|
+
)
|
|
489
|
+
try:
|
|
490
|
+
if 200 <= _response.status_code < 300:
|
|
491
|
+
return
|
|
492
|
+
_response_json = _response.json()
|
|
493
|
+
except JSONDecodeError:
|
|
494
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
495
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
496
|
+
|
|
497
|
+
def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> S3SImportStorage:
|
|
498
|
+
"""
|
|
499
|
+
Sync tasks from an S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list).
|
|
500
|
+
|
|
501
|
+
Parameters
|
|
502
|
+
----------
|
|
503
|
+
id : int
|
|
504
|
+
Storage ID
|
|
505
|
+
|
|
506
|
+
request_options : typing.Optional[RequestOptions]
|
|
507
|
+
Request-specific configuration.
|
|
508
|
+
|
|
509
|
+
Returns
|
|
510
|
+
-------
|
|
511
|
+
S3SImportStorage
|
|
512
|
+
|
|
513
|
+
|
|
514
|
+
Examples
|
|
515
|
+
--------
|
|
516
|
+
from label_studio_sdk.client import LabelStudio
|
|
517
|
+
|
|
518
|
+
client = LabelStudio(
|
|
519
|
+
api_key="YOUR_API_KEY",
|
|
520
|
+
)
|
|
521
|
+
client.import_storage.s3s.sync(
|
|
522
|
+
id=1,
|
|
523
|
+
)
|
|
524
|
+
"""
|
|
525
|
+
_response = self._client_wrapper.httpx_client.request(
|
|
526
|
+
f"api/storages/s3s/{jsonable_encoder(id)}/sync", method="POST", request_options=request_options
|
|
527
|
+
)
|
|
528
|
+
try:
|
|
529
|
+
if 200 <= _response.status_code < 300:
|
|
530
|
+
return pydantic_v1.parse_obj_as(S3SImportStorage, _response.json()) # type: ignore
|
|
531
|
+
_response_json = _response.json()
|
|
532
|
+
except JSONDecodeError:
|
|
533
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
534
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
535
|
+
|
|
536
|
+
|
|
537
|
+
class AsyncS3SClient:
|
|
538
|
+
def __init__(self, *, client_wrapper: AsyncClientWrapper):
|
|
539
|
+
self._client_wrapper = client_wrapper
|
|
540
|
+
|
|
541
|
+
async def list(
|
|
542
|
+
self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None
|
|
543
|
+
) -> typing.List[S3SImportStorage]:
|
|
544
|
+
"""
|
|
545
|
+
You can connect your S3 bucket to Label Studio as a source storage or target storage. Use this API request to get a list of all Google import (source) storage connections for a specific project.
|
|
546
|
+
|
|
547
|
+
The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list).
|
|
548
|
+
|
|
549
|
+
For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage).
|
|
550
|
+
|
|
551
|
+
Parameters
|
|
552
|
+
----------
|
|
553
|
+
project : typing.Optional[int]
|
|
554
|
+
Project ID
|
|
555
|
+
|
|
556
|
+
request_options : typing.Optional[RequestOptions]
|
|
557
|
+
Request-specific configuration.
|
|
558
|
+
|
|
559
|
+
Returns
|
|
560
|
+
-------
|
|
561
|
+
typing.List[S3SImportStorage]
|
|
562
|
+
|
|
563
|
+
|
|
564
|
+
Examples
|
|
565
|
+
--------
|
|
566
|
+
from label_studio_sdk.client import AsyncLabelStudio
|
|
567
|
+
|
|
568
|
+
client = AsyncLabelStudio(
|
|
569
|
+
api_key="YOUR_API_KEY",
|
|
570
|
+
)
|
|
571
|
+
await client.import_storage.s3s.list()
|
|
572
|
+
"""
|
|
573
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
574
|
+
"api/storages/s3s", method="GET", params={"project": project}, request_options=request_options
|
|
575
|
+
)
|
|
576
|
+
try:
|
|
577
|
+
if 200 <= _response.status_code < 300:
|
|
578
|
+
return pydantic_v1.parse_obj_as(typing.List[S3SImportStorage], _response.json()) # type: ignore
|
|
579
|
+
_response_json = _response.json()
|
|
580
|
+
except JSONDecodeError:
|
|
581
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
582
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
583
|
+
|
|
584
|
+
async def create(
|
|
585
|
+
self,
|
|
586
|
+
*,
|
|
587
|
+
regex_filter: typing.Optional[str] = OMIT,
|
|
588
|
+
use_blob_urls: typing.Optional[bool] = OMIT,
|
|
589
|
+
presign: typing.Optional[bool] = OMIT,
|
|
590
|
+
presign_ttl: typing.Optional[int] = OMIT,
|
|
591
|
+
recursive_scan: typing.Optional[bool] = OMIT,
|
|
592
|
+
title: typing.Optional[str] = OMIT,
|
|
593
|
+
description: typing.Optional[str] = OMIT,
|
|
594
|
+
project: typing.Optional[int] = OMIT,
|
|
595
|
+
bucket: typing.Optional[str] = OMIT,
|
|
596
|
+
prefix: typing.Optional[str] = OMIT,
|
|
597
|
+
external_id: typing.Optional[str] = OMIT,
|
|
598
|
+
role_arn: typing.Optional[str] = OMIT,
|
|
599
|
+
region_name: typing.Optional[str] = OMIT,
|
|
600
|
+
s3endpoint: typing.Optional[str] = OMIT,
|
|
601
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
602
|
+
) -> S3SImportStorage:
|
|
603
|
+
"""
|
|
604
|
+
Create a new source storage connection to a S3 bucket.
|
|
605
|
+
|
|
606
|
+
For information about the required fields and prerequisites, see [Amazon S3](https://labelstud.io/guide/storage#Amazon-S3) in the Label Studio documentation.
|
|
607
|
+
|
|
608
|
+
<Info>Ensure you configure CORS before adding cloud storage. This ensures you will be able to see the content of the data rather than just a link.</Info>
|
|
609
|
+
|
|
610
|
+
<Tip>After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be imported until you [sync your connection](sync).</Tip>
|
|
611
|
+
|
|
612
|
+
Parameters
|
|
613
|
+
----------
|
|
614
|
+
regex_filter : typing.Optional[str]
|
|
615
|
+
Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported.
|
|
616
|
+
|
|
617
|
+
use_blob_urls : typing.Optional[bool]
|
|
618
|
+
Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio.
|
|
619
|
+
|
|
620
|
+
presign : typing.Optional[bool]
|
|
621
|
+
Presign URLs for download
|
|
622
|
+
|
|
623
|
+
presign_ttl : typing.Optional[int]
|
|
624
|
+
Presign TTL in minutes
|
|
625
|
+
|
|
626
|
+
recursive_scan : typing.Optional[bool]
|
|
627
|
+
Scan recursively
|
|
628
|
+
|
|
629
|
+
title : typing.Optional[str]
|
|
630
|
+
Storage title
|
|
631
|
+
|
|
632
|
+
description : typing.Optional[str]
|
|
633
|
+
Storage description
|
|
634
|
+
|
|
635
|
+
project : typing.Optional[int]
|
|
636
|
+
Project ID
|
|
637
|
+
|
|
638
|
+
bucket : typing.Optional[str]
|
|
639
|
+
S3 bucket name
|
|
640
|
+
|
|
641
|
+
prefix : typing.Optional[str]
|
|
642
|
+
S3 bucket prefix
|
|
643
|
+
|
|
644
|
+
external_id : typing.Optional[str]
|
|
645
|
+
AWS External ID
|
|
646
|
+
|
|
647
|
+
role_arn : typing.Optional[str]
|
|
648
|
+
AWS Role ARN
|
|
649
|
+
|
|
650
|
+
region_name : typing.Optional[str]
|
|
651
|
+
AWS Region
|
|
652
|
+
|
|
653
|
+
s3endpoint : typing.Optional[str]
|
|
654
|
+
S3 Endpoint
|
|
655
|
+
|
|
656
|
+
request_options : typing.Optional[RequestOptions]
|
|
657
|
+
Request-specific configuration.
|
|
658
|
+
|
|
659
|
+
Returns
|
|
660
|
+
-------
|
|
661
|
+
S3SImportStorage
|
|
662
|
+
|
|
663
|
+
|
|
664
|
+
Examples
|
|
665
|
+
--------
|
|
666
|
+
from label_studio_sdk.client import AsyncLabelStudio
|
|
667
|
+
|
|
668
|
+
client = AsyncLabelStudio(
|
|
669
|
+
api_key="YOUR_API_KEY",
|
|
670
|
+
)
|
|
671
|
+
await client.import_storage.s3s.create()
|
|
672
|
+
"""
|
|
673
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
674
|
+
"api/storages/s3s",
|
|
675
|
+
method="POST",
|
|
676
|
+
json={
|
|
677
|
+
"regex_filter": regex_filter,
|
|
678
|
+
"use_blob_urls": use_blob_urls,
|
|
679
|
+
"presign": presign,
|
|
680
|
+
"presign_ttl": presign_ttl,
|
|
681
|
+
"recursive_scan": recursive_scan,
|
|
682
|
+
"title": title,
|
|
683
|
+
"description": description,
|
|
684
|
+
"project": project,
|
|
685
|
+
"bucket": bucket,
|
|
686
|
+
"prefix": prefix,
|
|
687
|
+
"external_id": external_id,
|
|
688
|
+
"role_arn": role_arn,
|
|
689
|
+
"region_name": region_name,
|
|
690
|
+
"s3_endpoint": s3endpoint,
|
|
691
|
+
},
|
|
692
|
+
request_options=request_options,
|
|
693
|
+
omit=OMIT,
|
|
694
|
+
)
|
|
695
|
+
try:
|
|
696
|
+
if 200 <= _response.status_code < 300:
|
|
697
|
+
return pydantic_v1.parse_obj_as(S3SImportStorage, _response.json()) # type: ignore
|
|
698
|
+
_response_json = _response.json()
|
|
699
|
+
except JSONDecodeError:
|
|
700
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
701
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
702
|
+
|
|
703
|
+
async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> S3SImportStorage:
|
|
704
|
+
"""
|
|
705
|
+
Get a specific S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list).
|
|
706
|
+
|
|
707
|
+
Parameters
|
|
708
|
+
----------
|
|
709
|
+
id : int
|
|
710
|
+
Import storage ID
|
|
711
|
+
|
|
712
|
+
request_options : typing.Optional[RequestOptions]
|
|
713
|
+
Request-specific configuration.
|
|
714
|
+
|
|
715
|
+
Returns
|
|
716
|
+
-------
|
|
717
|
+
S3SImportStorage
|
|
718
|
+
|
|
719
|
+
|
|
720
|
+
Examples
|
|
721
|
+
--------
|
|
722
|
+
from label_studio_sdk.client import AsyncLabelStudio
|
|
723
|
+
|
|
724
|
+
client = AsyncLabelStudio(
|
|
725
|
+
api_key="YOUR_API_KEY",
|
|
726
|
+
)
|
|
727
|
+
await client.import_storage.s3s.get(
|
|
728
|
+
id=1,
|
|
729
|
+
)
|
|
730
|
+
"""
|
|
731
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
732
|
+
f"api/storages/s3s/{jsonable_encoder(id)}", method="GET", request_options=request_options
|
|
733
|
+
)
|
|
734
|
+
try:
|
|
735
|
+
if 200 <= _response.status_code < 300:
|
|
736
|
+
return pydantic_v1.parse_obj_as(S3SImportStorage, _response.json()) # type: ignore
|
|
737
|
+
_response_json = _response.json()
|
|
738
|
+
except JSONDecodeError:
|
|
739
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
740
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
741
|
+
|
|
742
|
+
async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None:
|
|
743
|
+
"""
|
|
744
|
+
Delete a specific S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list).
|
|
745
|
+
|
|
746
|
+
Deleting a source storage connection does not affect tasks with synced data in Label Studio. The sync process is designed to import new or updated tasks from the connected storage into the project, but it does not track deletions of files from the storage. Therefore, if you remove the external storage connection, the tasks that were created from that storage will remain in the project.
|
|
747
|
+
|
|
748
|
+
If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API.
|
|
749
|
+
|
|
750
|
+
Parameters
|
|
751
|
+
----------
|
|
752
|
+
id : int
|
|
753
|
+
Import storage ID
|
|
754
|
+
|
|
755
|
+
request_options : typing.Optional[RequestOptions]
|
|
756
|
+
Request-specific configuration.
|
|
757
|
+
|
|
758
|
+
Returns
|
|
759
|
+
-------
|
|
760
|
+
None
|
|
761
|
+
|
|
762
|
+
Examples
|
|
763
|
+
--------
|
|
764
|
+
from label_studio_sdk.client import AsyncLabelStudio
|
|
765
|
+
|
|
766
|
+
client = AsyncLabelStudio(
|
|
767
|
+
api_key="YOUR_API_KEY",
|
|
768
|
+
)
|
|
769
|
+
await client.import_storage.s3s.delete(
|
|
770
|
+
id=1,
|
|
771
|
+
)
|
|
772
|
+
"""
|
|
773
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
774
|
+
f"api/storages/s3s/{jsonable_encoder(id)}", method="DELETE", request_options=request_options
|
|
775
|
+
)
|
|
776
|
+
try:
|
|
777
|
+
if 200 <= _response.status_code < 300:
|
|
778
|
+
return
|
|
779
|
+
_response_json = _response.json()
|
|
780
|
+
except JSONDecodeError:
|
|
781
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
782
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
783
|
+
|
|
784
|
+
async def update(
|
|
785
|
+
self,
|
|
786
|
+
id: int,
|
|
787
|
+
*,
|
|
788
|
+
regex_filter: typing.Optional[str] = OMIT,
|
|
789
|
+
use_blob_urls: typing.Optional[bool] = OMIT,
|
|
790
|
+
presign: typing.Optional[bool] = OMIT,
|
|
791
|
+
presign_ttl: typing.Optional[int] = OMIT,
|
|
792
|
+
recursive_scan: typing.Optional[bool] = OMIT,
|
|
793
|
+
title: typing.Optional[str] = OMIT,
|
|
794
|
+
description: typing.Optional[str] = OMIT,
|
|
795
|
+
project: typing.Optional[int] = OMIT,
|
|
796
|
+
bucket: typing.Optional[str] = OMIT,
|
|
797
|
+
prefix: typing.Optional[str] = OMIT,
|
|
798
|
+
external_id: typing.Optional[str] = OMIT,
|
|
799
|
+
role_arn: typing.Optional[str] = OMIT,
|
|
800
|
+
region_name: typing.Optional[str] = OMIT,
|
|
801
|
+
s3endpoint: typing.Optional[str] = OMIT,
|
|
802
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
803
|
+
) -> S3SImportStorage:
|
|
804
|
+
"""
|
|
805
|
+
Update a specific S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list).
|
|
806
|
+
|
|
807
|
+
For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage).
|
|
808
|
+
|
|
809
|
+
Parameters
|
|
810
|
+
----------
|
|
811
|
+
id : int
|
|
812
|
+
Import storage ID
|
|
813
|
+
|
|
814
|
+
regex_filter : typing.Optional[str]
|
|
815
|
+
Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported.
|
|
816
|
+
|
|
817
|
+
use_blob_urls : typing.Optional[bool]
|
|
818
|
+
Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio.
|
|
819
|
+
|
|
820
|
+
presign : typing.Optional[bool]
|
|
821
|
+
Presign URLs for download
|
|
822
|
+
|
|
823
|
+
presign_ttl : typing.Optional[int]
|
|
824
|
+
Presign TTL in minutes
|
|
825
|
+
|
|
826
|
+
recursive_scan : typing.Optional[bool]
|
|
827
|
+
Scan recursively
|
|
828
|
+
|
|
829
|
+
title : typing.Optional[str]
|
|
830
|
+
Storage title
|
|
831
|
+
|
|
832
|
+
description : typing.Optional[str]
|
|
833
|
+
Storage description
|
|
834
|
+
|
|
835
|
+
project : typing.Optional[int]
|
|
836
|
+
Project ID
|
|
837
|
+
|
|
838
|
+
bucket : typing.Optional[str]
|
|
839
|
+
S3 bucket name
|
|
840
|
+
|
|
841
|
+
prefix : typing.Optional[str]
|
|
842
|
+
S3 bucket prefix
|
|
843
|
+
|
|
844
|
+
external_id : typing.Optional[str]
|
|
845
|
+
AWS External ID
|
|
846
|
+
|
|
847
|
+
role_arn : typing.Optional[str]
|
|
848
|
+
AWS Role ARN
|
|
849
|
+
|
|
850
|
+
region_name : typing.Optional[str]
|
|
851
|
+
AWS Region
|
|
852
|
+
|
|
853
|
+
s3endpoint : typing.Optional[str]
|
|
854
|
+
S3 Endpoint
|
|
855
|
+
|
|
856
|
+
request_options : typing.Optional[RequestOptions]
|
|
857
|
+
Request-specific configuration.
|
|
858
|
+
|
|
859
|
+
Returns
|
|
860
|
+
-------
|
|
861
|
+
S3SImportStorage
|
|
862
|
+
|
|
863
|
+
|
|
864
|
+
Examples
|
|
865
|
+
--------
|
|
866
|
+
from label_studio_sdk.client import AsyncLabelStudio
|
|
867
|
+
|
|
868
|
+
client = AsyncLabelStudio(
|
|
869
|
+
api_key="YOUR_API_KEY",
|
|
870
|
+
)
|
|
871
|
+
await client.import_storage.s3s.update(
|
|
872
|
+
id=1,
|
|
873
|
+
)
|
|
874
|
+
"""
|
|
875
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
876
|
+
f"api/storages/s3s/{jsonable_encoder(id)}",
|
|
877
|
+
method="PATCH",
|
|
878
|
+
json={
|
|
879
|
+
"regex_filter": regex_filter,
|
|
880
|
+
"use_blob_urls": use_blob_urls,
|
|
881
|
+
"presign": presign,
|
|
882
|
+
"presign_ttl": presign_ttl,
|
|
883
|
+
"recursive_scan": recursive_scan,
|
|
884
|
+
"title": title,
|
|
885
|
+
"description": description,
|
|
886
|
+
"project": project,
|
|
887
|
+
"bucket": bucket,
|
|
888
|
+
"prefix": prefix,
|
|
889
|
+
"external_id": external_id,
|
|
890
|
+
"role_arn": role_arn,
|
|
891
|
+
"region_name": region_name,
|
|
892
|
+
"s3_endpoint": s3endpoint,
|
|
893
|
+
},
|
|
894
|
+
request_options=request_options,
|
|
895
|
+
omit=OMIT,
|
|
896
|
+
)
|
|
897
|
+
try:
|
|
898
|
+
if 200 <= _response.status_code < 300:
|
|
899
|
+
return pydantic_v1.parse_obj_as(S3SImportStorage, _response.json()) # type: ignore
|
|
900
|
+
_response_json = _response.json()
|
|
901
|
+
except JSONDecodeError:
|
|
902
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
903
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
904
|
+
|
|
905
|
+
async def validate(
|
|
906
|
+
self,
|
|
907
|
+
*,
|
|
908
|
+
regex_filter: typing.Optional[str] = OMIT,
|
|
909
|
+
use_blob_urls: typing.Optional[bool] = OMIT,
|
|
910
|
+
presign: typing.Optional[bool] = OMIT,
|
|
911
|
+
presign_ttl: typing.Optional[int] = OMIT,
|
|
912
|
+
recursive_scan: typing.Optional[bool] = OMIT,
|
|
913
|
+
title: typing.Optional[str] = OMIT,
|
|
914
|
+
description: typing.Optional[str] = OMIT,
|
|
915
|
+
project: typing.Optional[int] = OMIT,
|
|
916
|
+
bucket: typing.Optional[str] = OMIT,
|
|
917
|
+
prefix: typing.Optional[str] = OMIT,
|
|
918
|
+
external_id: typing.Optional[str] = OMIT,
|
|
919
|
+
role_arn: typing.Optional[str] = OMIT,
|
|
920
|
+
region_name: typing.Optional[str] = OMIT,
|
|
921
|
+
s3endpoint: typing.Optional[str] = OMIT,
|
|
922
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
923
|
+
) -> None:
|
|
924
|
+
"""
|
|
925
|
+
Validate a specific S3 import storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to import data.
|
|
926
|
+
|
|
927
|
+
Parameters
|
|
928
|
+
----------
|
|
929
|
+
regex_filter : typing.Optional[str]
|
|
930
|
+
Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported.
|
|
931
|
+
|
|
932
|
+
use_blob_urls : typing.Optional[bool]
|
|
933
|
+
Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio.
|
|
934
|
+
|
|
935
|
+
presign : typing.Optional[bool]
|
|
936
|
+
Presign URLs for download
|
|
937
|
+
|
|
938
|
+
presign_ttl : typing.Optional[int]
|
|
939
|
+
Presign TTL in minutes
|
|
940
|
+
|
|
941
|
+
recursive_scan : typing.Optional[bool]
|
|
942
|
+
Scan recursively
|
|
943
|
+
|
|
944
|
+
title : typing.Optional[str]
|
|
945
|
+
Storage title
|
|
946
|
+
|
|
947
|
+
description : typing.Optional[str]
|
|
948
|
+
Storage description
|
|
949
|
+
|
|
950
|
+
project : typing.Optional[int]
|
|
951
|
+
Project ID
|
|
952
|
+
|
|
953
|
+
bucket : typing.Optional[str]
|
|
954
|
+
S3 bucket name
|
|
955
|
+
|
|
956
|
+
prefix : typing.Optional[str]
|
|
957
|
+
S3 bucket prefix
|
|
958
|
+
|
|
959
|
+
external_id : typing.Optional[str]
|
|
960
|
+
AWS External ID
|
|
961
|
+
|
|
962
|
+
role_arn : typing.Optional[str]
|
|
963
|
+
AWS Role ARN
|
|
964
|
+
|
|
965
|
+
region_name : typing.Optional[str]
|
|
966
|
+
AWS Region
|
|
967
|
+
|
|
968
|
+
s3endpoint : typing.Optional[str]
|
|
969
|
+
S3 Endpoint
|
|
970
|
+
|
|
971
|
+
request_options : typing.Optional[RequestOptions]
|
|
972
|
+
Request-specific configuration.
|
|
973
|
+
|
|
974
|
+
Returns
|
|
975
|
+
-------
|
|
976
|
+
None
|
|
977
|
+
|
|
978
|
+
Examples
|
|
979
|
+
--------
|
|
980
|
+
from label_studio_sdk.client import AsyncLabelStudio
|
|
981
|
+
|
|
982
|
+
client = AsyncLabelStudio(
|
|
983
|
+
api_key="YOUR_API_KEY",
|
|
984
|
+
)
|
|
985
|
+
await client.import_storage.s3s.validate()
|
|
986
|
+
"""
|
|
987
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
988
|
+
"api/storages/s3s/validate",
|
|
989
|
+
method="POST",
|
|
990
|
+
json={
|
|
991
|
+
"regex_filter": regex_filter,
|
|
992
|
+
"use_blob_urls": use_blob_urls,
|
|
993
|
+
"presign": presign,
|
|
994
|
+
"presign_ttl": presign_ttl,
|
|
995
|
+
"recursive_scan": recursive_scan,
|
|
996
|
+
"title": title,
|
|
997
|
+
"description": description,
|
|
998
|
+
"project": project,
|
|
999
|
+
"bucket": bucket,
|
|
1000
|
+
"prefix": prefix,
|
|
1001
|
+
"external_id": external_id,
|
|
1002
|
+
"role_arn": role_arn,
|
|
1003
|
+
"region_name": region_name,
|
|
1004
|
+
"s3_endpoint": s3endpoint,
|
|
1005
|
+
},
|
|
1006
|
+
request_options=request_options,
|
|
1007
|
+
omit=OMIT,
|
|
1008
|
+
)
|
|
1009
|
+
try:
|
|
1010
|
+
if 200 <= _response.status_code < 300:
|
|
1011
|
+
return
|
|
1012
|
+
_response_json = _response.json()
|
|
1013
|
+
except JSONDecodeError:
|
|
1014
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
1015
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
1016
|
+
|
|
1017
|
+
async def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> S3SImportStorage:
|
|
1018
|
+
"""
|
|
1019
|
+
Sync tasks from an S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list).
|
|
1020
|
+
|
|
1021
|
+
Parameters
|
|
1022
|
+
----------
|
|
1023
|
+
id : int
|
|
1024
|
+
Storage ID
|
|
1025
|
+
|
|
1026
|
+
request_options : typing.Optional[RequestOptions]
|
|
1027
|
+
Request-specific configuration.
|
|
1028
|
+
|
|
1029
|
+
Returns
|
|
1030
|
+
-------
|
|
1031
|
+
S3SImportStorage
|
|
1032
|
+
|
|
1033
|
+
|
|
1034
|
+
Examples
|
|
1035
|
+
--------
|
|
1036
|
+
from label_studio_sdk.client import AsyncLabelStudio
|
|
1037
|
+
|
|
1038
|
+
client = AsyncLabelStudio(
|
|
1039
|
+
api_key="YOUR_API_KEY",
|
|
1040
|
+
)
|
|
1041
|
+
await client.import_storage.s3s.sync(
|
|
1042
|
+
id=1,
|
|
1043
|
+
)
|
|
1044
|
+
"""
|
|
1045
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
1046
|
+
f"api/storages/s3s/{jsonable_encoder(id)}/sync", method="POST", request_options=request_options
|
|
1047
|
+
)
|
|
1048
|
+
try:
|
|
1049
|
+
if 200 <= _response.status_code < 300:
|
|
1050
|
+
return pydantic_v1.parse_obj_as(S3SImportStorage, _response.json()) # type: ignore
|
|
1051
|
+
_response_json = _response.json()
|
|
1052
|
+
except JSONDecodeError:
|
|
1053
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
1054
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|