dtlpy 1.114.17__py3-none-any.whl → 1.116.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dtlpy/__init__.py +491 -491
- dtlpy/__version__.py +1 -1
- dtlpy/assets/__init__.py +26 -26
- dtlpy/assets/code_server/config.yaml +2 -2
- dtlpy/assets/code_server/installation.sh +24 -24
- dtlpy/assets/code_server/launch.json +13 -13
- dtlpy/assets/code_server/settings.json +2 -2
- dtlpy/assets/main.py +53 -53
- dtlpy/assets/main_partial.py +18 -18
- dtlpy/assets/mock.json +11 -11
- dtlpy/assets/model_adapter.py +83 -83
- dtlpy/assets/package.json +61 -61
- dtlpy/assets/package_catalog.json +29 -29
- dtlpy/assets/package_gitignore +307 -307
- dtlpy/assets/service_runners/__init__.py +33 -33
- dtlpy/assets/service_runners/converter.py +96 -96
- dtlpy/assets/service_runners/multi_method.py +49 -49
- dtlpy/assets/service_runners/multi_method_annotation.py +54 -54
- dtlpy/assets/service_runners/multi_method_dataset.py +55 -55
- dtlpy/assets/service_runners/multi_method_item.py +52 -52
- dtlpy/assets/service_runners/multi_method_json.py +52 -52
- dtlpy/assets/service_runners/single_method.py +37 -37
- dtlpy/assets/service_runners/single_method_annotation.py +43 -43
- dtlpy/assets/service_runners/single_method_dataset.py +43 -43
- dtlpy/assets/service_runners/single_method_item.py +41 -41
- dtlpy/assets/service_runners/single_method_json.py +42 -42
- dtlpy/assets/service_runners/single_method_multi_input.py +45 -45
- dtlpy/assets/voc_annotation_template.xml +23 -23
- dtlpy/caches/base_cache.py +32 -32
- dtlpy/caches/cache.py +473 -473
- dtlpy/caches/dl_cache.py +201 -201
- dtlpy/caches/filesystem_cache.py +89 -89
- dtlpy/caches/redis_cache.py +84 -84
- dtlpy/dlp/__init__.py +20 -20
- dtlpy/dlp/cli_utilities.py +367 -367
- dtlpy/dlp/command_executor.py +764 -764
- dtlpy/dlp/dlp +1 -1
- dtlpy/dlp/dlp.bat +1 -1
- dtlpy/dlp/dlp.py +128 -128
- dtlpy/dlp/parser.py +651 -651
- dtlpy/entities/__init__.py +83 -83
- dtlpy/entities/analytic.py +347 -311
- dtlpy/entities/annotation.py +1879 -1879
- dtlpy/entities/annotation_collection.py +699 -699
- dtlpy/entities/annotation_definitions/__init__.py +20 -20
- dtlpy/entities/annotation_definitions/base_annotation_definition.py +100 -100
- dtlpy/entities/annotation_definitions/box.py +195 -195
- dtlpy/entities/annotation_definitions/classification.py +67 -67
- dtlpy/entities/annotation_definitions/comparison.py +72 -72
- dtlpy/entities/annotation_definitions/cube.py +204 -204
- dtlpy/entities/annotation_definitions/cube_3d.py +149 -149
- dtlpy/entities/annotation_definitions/description.py +32 -32
- dtlpy/entities/annotation_definitions/ellipse.py +124 -124
- dtlpy/entities/annotation_definitions/free_text.py +62 -62
- dtlpy/entities/annotation_definitions/gis.py +69 -69
- dtlpy/entities/annotation_definitions/note.py +139 -139
- dtlpy/entities/annotation_definitions/point.py +117 -117
- dtlpy/entities/annotation_definitions/polygon.py +182 -182
- dtlpy/entities/annotation_definitions/polyline.py +111 -111
- dtlpy/entities/annotation_definitions/pose.py +92 -92
- dtlpy/entities/annotation_definitions/ref_image.py +86 -86
- dtlpy/entities/annotation_definitions/segmentation.py +240 -240
- dtlpy/entities/annotation_definitions/subtitle.py +34 -34
- dtlpy/entities/annotation_definitions/text.py +85 -85
- dtlpy/entities/annotation_definitions/undefined_annotation.py +74 -74
- dtlpy/entities/app.py +220 -220
- dtlpy/entities/app_module.py +107 -107
- dtlpy/entities/artifact.py +174 -174
- dtlpy/entities/assignment.py +399 -399
- dtlpy/entities/base_entity.py +214 -214
- dtlpy/entities/bot.py +113 -113
- dtlpy/entities/codebase.py +292 -296
- dtlpy/entities/collection.py +38 -38
- dtlpy/entities/command.py +169 -169
- dtlpy/entities/compute.py +449 -442
- dtlpy/entities/dataset.py +1299 -1285
- dtlpy/entities/directory_tree.py +44 -44
- dtlpy/entities/dpk.py +470 -470
- dtlpy/entities/driver.py +235 -223
- dtlpy/entities/execution.py +397 -397
- dtlpy/entities/feature.py +124 -124
- dtlpy/entities/feature_set.py +145 -145
- dtlpy/entities/filters.py +798 -645
- dtlpy/entities/gis_item.py +107 -107
- dtlpy/entities/integration.py +184 -184
- dtlpy/entities/item.py +959 -953
- dtlpy/entities/label.py +123 -123
- dtlpy/entities/links.py +85 -85
- dtlpy/entities/message.py +175 -175
- dtlpy/entities/model.py +684 -684
- dtlpy/entities/node.py +1005 -1005
- dtlpy/entities/ontology.py +810 -803
- dtlpy/entities/organization.py +287 -287
- dtlpy/entities/package.py +657 -657
- dtlpy/entities/package_defaults.py +5 -5
- dtlpy/entities/package_function.py +185 -185
- dtlpy/entities/package_module.py +113 -113
- dtlpy/entities/package_slot.py +118 -118
- dtlpy/entities/paged_entities.py +299 -299
- dtlpy/entities/pipeline.py +624 -624
- dtlpy/entities/pipeline_execution.py +279 -279
- dtlpy/entities/project.py +394 -394
- dtlpy/entities/prompt_item.py +505 -499
- dtlpy/entities/recipe.py +301 -301
- dtlpy/entities/reflect_dict.py +102 -102
- dtlpy/entities/resource_execution.py +138 -138
- dtlpy/entities/service.py +963 -958
- dtlpy/entities/service_driver.py +117 -117
- dtlpy/entities/setting.py +294 -294
- dtlpy/entities/task.py +495 -495
- dtlpy/entities/time_series.py +143 -143
- dtlpy/entities/trigger.py +426 -426
- dtlpy/entities/user.py +118 -118
- dtlpy/entities/webhook.py +124 -124
- dtlpy/examples/__init__.py +19 -19
- dtlpy/examples/add_labels.py +135 -135
- dtlpy/examples/add_metadata_to_item.py +21 -21
- dtlpy/examples/annotate_items_using_model.py +65 -65
- dtlpy/examples/annotate_video_using_model_and_tracker.py +75 -75
- dtlpy/examples/annotations_convert_to_voc.py +9 -9
- dtlpy/examples/annotations_convert_to_yolo.py +9 -9
- dtlpy/examples/convert_annotation_types.py +51 -51
- dtlpy/examples/converter.py +143 -143
- dtlpy/examples/copy_annotations.py +22 -22
- dtlpy/examples/copy_folder.py +31 -31
- dtlpy/examples/create_annotations.py +51 -51
- dtlpy/examples/create_video_annotations.py +83 -83
- dtlpy/examples/delete_annotations.py +26 -26
- dtlpy/examples/filters.py +113 -113
- dtlpy/examples/move_item.py +23 -23
- dtlpy/examples/play_video_annotation.py +13 -13
- dtlpy/examples/show_item_and_mask.py +53 -53
- dtlpy/examples/triggers.py +49 -49
- dtlpy/examples/upload_batch_of_items.py +20 -20
- dtlpy/examples/upload_items_and_custom_format_annotations.py +55 -55
- dtlpy/examples/upload_items_with_modalities.py +43 -43
- dtlpy/examples/upload_segmentation_annotations_from_mask_image.py +44 -44
- dtlpy/examples/upload_yolo_format_annotations.py +70 -70
- dtlpy/exceptions.py +125 -125
- dtlpy/miscellaneous/__init__.py +20 -20
- dtlpy/miscellaneous/dict_differ.py +95 -95
- dtlpy/miscellaneous/git_utils.py +217 -217
- dtlpy/miscellaneous/json_utils.py +14 -14
- dtlpy/miscellaneous/list_print.py +105 -105
- dtlpy/miscellaneous/zipping.py +130 -130
- dtlpy/ml/__init__.py +20 -20
- dtlpy/ml/base_feature_extractor_adapter.py +27 -27
- dtlpy/ml/base_model_adapter.py +1257 -1086
- dtlpy/ml/metrics.py +461 -461
- dtlpy/ml/predictions_utils.py +274 -274
- dtlpy/ml/summary_writer.py +57 -57
- dtlpy/ml/train_utils.py +60 -60
- dtlpy/new_instance.py +252 -252
- dtlpy/repositories/__init__.py +56 -56
- dtlpy/repositories/analytics.py +85 -85
- dtlpy/repositories/annotations.py +916 -916
- dtlpy/repositories/apps.py +383 -383
- dtlpy/repositories/artifacts.py +452 -452
- dtlpy/repositories/assignments.py +599 -599
- dtlpy/repositories/bots.py +213 -213
- dtlpy/repositories/codebases.py +559 -559
- dtlpy/repositories/collections.py +332 -332
- dtlpy/repositories/commands.py +152 -158
- dtlpy/repositories/compositions.py +61 -61
- dtlpy/repositories/computes.py +439 -435
- dtlpy/repositories/datasets.py +1504 -1291
- dtlpy/repositories/downloader.py +976 -903
- dtlpy/repositories/dpks.py +433 -433
- dtlpy/repositories/drivers.py +482 -470
- dtlpy/repositories/executions.py +815 -817
- dtlpy/repositories/feature_sets.py +226 -226
- dtlpy/repositories/features.py +255 -238
- dtlpy/repositories/integrations.py +484 -484
- dtlpy/repositories/items.py +912 -909
- dtlpy/repositories/messages.py +94 -94
- dtlpy/repositories/models.py +1000 -988
- dtlpy/repositories/nodes.py +80 -80
- dtlpy/repositories/ontologies.py +511 -511
- dtlpy/repositories/organizations.py +525 -525
- dtlpy/repositories/packages.py +1941 -1941
- dtlpy/repositories/pipeline_executions.py +451 -451
- dtlpy/repositories/pipelines.py +640 -640
- dtlpy/repositories/projects.py +539 -539
- dtlpy/repositories/recipes.py +419 -399
- dtlpy/repositories/resource_executions.py +137 -137
- dtlpy/repositories/schema.py +120 -120
- dtlpy/repositories/service_drivers.py +213 -213
- dtlpy/repositories/services.py +1704 -1704
- dtlpy/repositories/settings.py +339 -339
- dtlpy/repositories/tasks.py +1477 -1477
- dtlpy/repositories/times_series.py +278 -278
- dtlpy/repositories/triggers.py +536 -536
- dtlpy/repositories/upload_element.py +257 -257
- dtlpy/repositories/uploader.py +661 -651
- dtlpy/repositories/webhooks.py +249 -249
- dtlpy/services/__init__.py +22 -22
- dtlpy/services/aihttp_retry.py +131 -131
- dtlpy/services/api_client.py +1785 -1782
- dtlpy/services/api_reference.py +40 -40
- dtlpy/services/async_utils.py +133 -133
- dtlpy/services/calls_counter.py +44 -44
- dtlpy/services/check_sdk.py +68 -68
- dtlpy/services/cookie.py +115 -115
- dtlpy/services/create_logger.py +156 -156
- dtlpy/services/events.py +84 -84
- dtlpy/services/logins.py +235 -235
- dtlpy/services/reporter.py +256 -256
- dtlpy/services/service_defaults.py +91 -91
- dtlpy/utilities/__init__.py +20 -20
- dtlpy/utilities/annotations/__init__.py +16 -16
- dtlpy/utilities/annotations/annotation_converters.py +269 -269
- dtlpy/utilities/base_package_runner.py +285 -264
- dtlpy/utilities/converter.py +1650 -1650
- dtlpy/utilities/dataset_generators/__init__.py +1 -1
- dtlpy/utilities/dataset_generators/dataset_generator.py +670 -670
- dtlpy/utilities/dataset_generators/dataset_generator_tensorflow.py +23 -23
- dtlpy/utilities/dataset_generators/dataset_generator_torch.py +21 -21
- dtlpy/utilities/local_development/__init__.py +1 -1
- dtlpy/utilities/local_development/local_session.py +179 -179
- dtlpy/utilities/reports/__init__.py +2 -2
- dtlpy/utilities/reports/figures.py +343 -343
- dtlpy/utilities/reports/report.py +71 -71
- dtlpy/utilities/videos/__init__.py +17 -17
- dtlpy/utilities/videos/video_player.py +598 -598
- dtlpy/utilities/videos/videos.py +470 -470
- {dtlpy-1.114.17.data → dtlpy-1.116.6.data}/scripts/dlp +1 -1
- dtlpy-1.116.6.data/scripts/dlp.bat +2 -0
- {dtlpy-1.114.17.data → dtlpy-1.116.6.data}/scripts/dlp.py +128 -128
- {dtlpy-1.114.17.dist-info → dtlpy-1.116.6.dist-info}/METADATA +186 -183
- dtlpy-1.116.6.dist-info/RECORD +239 -0
- {dtlpy-1.114.17.dist-info → dtlpy-1.116.6.dist-info}/WHEEL +1 -1
- {dtlpy-1.114.17.dist-info → dtlpy-1.116.6.dist-info}/licenses/LICENSE +200 -200
- tests/features/environment.py +551 -551
- dtlpy/assets/__pycache__/__init__.cpython-310.pyc +0 -0
- dtlpy-1.114.17.data/scripts/dlp.bat +0 -2
- dtlpy-1.114.17.dist-info/RECORD +0 -240
- {dtlpy-1.114.17.dist-info → dtlpy-1.116.6.dist-info}/entry_points.txt +0 -0
- {dtlpy-1.114.17.dist-info → dtlpy-1.116.6.dist-info}/top_level.txt +0 -0
dtlpy/entities/dataset.py
CHANGED
|
@@ -1,1285 +1,1299 @@
|
|
|
1
|
-
from collections import namedtuple
|
|
2
|
-
import traceback
|
|
3
|
-
import logging
|
|
4
|
-
from enum import Enum
|
|
5
|
-
|
|
6
|
-
import attr
|
|
7
|
-
import os
|
|
8
|
-
|
|
9
|
-
from .. import repositories, entities, services, exceptions
|
|
10
|
-
from ..services.api_client import ApiClient
|
|
11
|
-
from .annotation import ViewAnnotationOptions, AnnotationType, ExportVersion
|
|
12
|
-
|
|
13
|
-
logger = logging.getLogger(name='dtlpy')
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
class IndexDriver(str, Enum):
|
|
17
|
-
V1 = "v1"
|
|
18
|
-
V2 = "v2"
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
class ExportType(str, Enum):
|
|
22
|
-
JSON = "json"
|
|
23
|
-
ZIP = "zip"
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
""
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
#
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
@
|
|
104
|
-
def
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
:param
|
|
117
|
-
:
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
:param
|
|
146
|
-
:
|
|
147
|
-
:
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
attr.fields(Dataset).
|
|
199
|
-
attr.fields(Dataset).
|
|
200
|
-
attr.fields(Dataset).
|
|
201
|
-
attr.fields(Dataset).
|
|
202
|
-
attr.fields(Dataset).
|
|
203
|
-
attr.fields(Dataset).
|
|
204
|
-
attr.fields(Dataset).
|
|
205
|
-
attr.fields(Dataset).
|
|
206
|
-
attr.fields(Dataset).
|
|
207
|
-
attr.fields(Dataset).
|
|
208
|
-
attr.fields(Dataset).
|
|
209
|
-
attr.fields(Dataset).
|
|
210
|
-
attr.fields(Dataset).
|
|
211
|
-
attr.fields(Dataset).
|
|
212
|
-
attr.fields(Dataset).
|
|
213
|
-
attr.fields(Dataset).
|
|
214
|
-
attr.fields(Dataset).
|
|
215
|
-
)
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
_json
|
|
221
|
-
_json['
|
|
222
|
-
_json['
|
|
223
|
-
_json['
|
|
224
|
-
_json['
|
|
225
|
-
_json['
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
if self.
|
|
231
|
-
_json['
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
return self.
|
|
243
|
-
|
|
244
|
-
@property
|
|
245
|
-
def
|
|
246
|
-
return self.
|
|
247
|
-
|
|
248
|
-
@
|
|
249
|
-
def
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
@property
|
|
258
|
-
def
|
|
259
|
-
return self._get_ontology().
|
|
260
|
-
|
|
261
|
-
@
|
|
262
|
-
def instance_map(self
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
"""
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
|
|
344
|
-
|
|
345
|
-
|
|
346
|
-
|
|
347
|
-
|
|
348
|
-
|
|
349
|
-
|
|
350
|
-
|
|
351
|
-
|
|
352
|
-
|
|
353
|
-
|
|
354
|
-
|
|
355
|
-
|
|
356
|
-
|
|
357
|
-
|
|
358
|
-
|
|
359
|
-
|
|
360
|
-
|
|
361
|
-
|
|
362
|
-
|
|
363
|
-
|
|
364
|
-
|
|
365
|
-
|
|
366
|
-
|
|
367
|
-
|
|
368
|
-
|
|
369
|
-
|
|
370
|
-
|
|
371
|
-
|
|
372
|
-
|
|
373
|
-
|
|
374
|
-
|
|
375
|
-
|
|
376
|
-
|
|
377
|
-
|
|
378
|
-
|
|
379
|
-
|
|
380
|
-
|
|
381
|
-
|
|
382
|
-
|
|
383
|
-
|
|
384
|
-
|
|
385
|
-
|
|
386
|
-
|
|
387
|
-
|
|
388
|
-
|
|
389
|
-
|
|
390
|
-
|
|
391
|
-
|
|
392
|
-
|
|
393
|
-
|
|
394
|
-
|
|
395
|
-
|
|
396
|
-
|
|
397
|
-
|
|
398
|
-
|
|
399
|
-
|
|
400
|
-
|
|
401
|
-
|
|
402
|
-
|
|
403
|
-
|
|
404
|
-
|
|
405
|
-
|
|
406
|
-
|
|
407
|
-
|
|
408
|
-
'datasets',
|
|
409
|
-
|
|
410
|
-
|
|
411
|
-
|
|
412
|
-
|
|
413
|
-
|
|
414
|
-
|
|
415
|
-
|
|
416
|
-
|
|
417
|
-
|
|
418
|
-
|
|
419
|
-
|
|
420
|
-
|
|
421
|
-
|
|
422
|
-
|
|
423
|
-
|
|
424
|
-
|
|
425
|
-
|
|
426
|
-
|
|
427
|
-
|
|
428
|
-
|
|
429
|
-
|
|
430
|
-
|
|
431
|
-
|
|
432
|
-
|
|
433
|
-
|
|
434
|
-
|
|
435
|
-
|
|
436
|
-
|
|
437
|
-
|
|
438
|
-
|
|
439
|
-
|
|
440
|
-
|
|
441
|
-
|
|
442
|
-
|
|
443
|
-
|
|
444
|
-
|
|
445
|
-
|
|
446
|
-
|
|
447
|
-
|
|
448
|
-
|
|
449
|
-
|
|
450
|
-
|
|
451
|
-
"""
|
|
452
|
-
|
|
453
|
-
|
|
454
|
-
|
|
455
|
-
|
|
456
|
-
|
|
457
|
-
|
|
458
|
-
|
|
459
|
-
|
|
460
|
-
|
|
461
|
-
|
|
462
|
-
|
|
463
|
-
|
|
464
|
-
|
|
465
|
-
|
|
466
|
-
|
|
467
|
-
|
|
468
|
-
|
|
469
|
-
|
|
470
|
-
|
|
471
|
-
|
|
472
|
-
|
|
473
|
-
|
|
474
|
-
|
|
475
|
-
|
|
476
|
-
|
|
477
|
-
|
|
478
|
-
|
|
479
|
-
|
|
480
|
-
|
|
481
|
-
|
|
482
|
-
|
|
483
|
-
|
|
484
|
-
|
|
485
|
-
|
|
486
|
-
|
|
487
|
-
|
|
488
|
-
|
|
489
|
-
|
|
490
|
-
|
|
491
|
-
|
|
492
|
-
|
|
493
|
-
|
|
494
|
-
|
|
495
|
-
|
|
496
|
-
|
|
497
|
-
|
|
498
|
-
|
|
499
|
-
|
|
500
|
-
|
|
501
|
-
|
|
502
|
-
|
|
503
|
-
|
|
504
|
-
|
|
505
|
-
|
|
506
|
-
|
|
507
|
-
|
|
508
|
-
|
|
509
|
-
|
|
510
|
-
|
|
511
|
-
|
|
512
|
-
|
|
513
|
-
|
|
514
|
-
|
|
515
|
-
|
|
516
|
-
|
|
517
|
-
|
|
518
|
-
|
|
519
|
-
|
|
520
|
-
|
|
521
|
-
|
|
522
|
-
|
|
523
|
-
|
|
524
|
-
|
|
525
|
-
|
|
526
|
-
|
|
527
|
-
|
|
528
|
-
|
|
529
|
-
|
|
530
|
-
|
|
531
|
-
|
|
532
|
-
|
|
533
|
-
|
|
534
|
-
|
|
535
|
-
|
|
536
|
-
|
|
537
|
-
"""
|
|
538
|
-
|
|
539
|
-
|
|
540
|
-
|
|
541
|
-
|
|
542
|
-
|
|
543
|
-
|
|
544
|
-
**
|
|
545
|
-
|
|
546
|
-
|
|
547
|
-
|
|
548
|
-
|
|
549
|
-
|
|
550
|
-
|
|
551
|
-
|
|
552
|
-
|
|
553
|
-
|
|
554
|
-
|
|
555
|
-
|
|
556
|
-
|
|
557
|
-
|
|
558
|
-
|
|
559
|
-
|
|
560
|
-
|
|
561
|
-
|
|
562
|
-
|
|
563
|
-
|
|
564
|
-
|
|
565
|
-
|
|
566
|
-
|
|
567
|
-
|
|
568
|
-
|
|
569
|
-
|
|
570
|
-
|
|
571
|
-
:param
|
|
572
|
-
:param
|
|
573
|
-
:param
|
|
574
|
-
:
|
|
575
|
-
:
|
|
576
|
-
|
|
577
|
-
|
|
578
|
-
|
|
579
|
-
|
|
580
|
-
|
|
581
|
-
|
|
582
|
-
|
|
583
|
-
|
|
584
|
-
|
|
585
|
-
|
|
586
|
-
|
|
587
|
-
|
|
588
|
-
|
|
589
|
-
|
|
590
|
-
|
|
591
|
-
|
|
592
|
-
|
|
593
|
-
|
|
594
|
-
|
|
595
|
-
|
|
596
|
-
|
|
597
|
-
|
|
598
|
-
|
|
599
|
-
|
|
600
|
-
|
|
601
|
-
|
|
602
|
-
|
|
603
|
-
|
|
604
|
-
|
|
605
|
-
|
|
606
|
-
|
|
607
|
-
|
|
608
|
-
|
|
609
|
-
|
|
610
|
-
|
|
611
|
-
|
|
612
|
-
|
|
613
|
-
|
|
614
|
-
|
|
615
|
-
|
|
616
|
-
|
|
617
|
-
|
|
618
|
-
|
|
619
|
-
|
|
620
|
-
|
|
621
|
-
|
|
622
|
-
|
|
623
|
-
|
|
624
|
-
|
|
625
|
-
|
|
626
|
-
|
|
627
|
-
|
|
628
|
-
|
|
629
|
-
|
|
630
|
-
|
|
631
|
-
|
|
632
|
-
|
|
633
|
-
|
|
634
|
-
|
|
635
|
-
|
|
636
|
-
|
|
637
|
-
|
|
638
|
-
|
|
639
|
-
|
|
640
|
-
|
|
641
|
-
|
|
642
|
-
|
|
643
|
-
:param
|
|
644
|
-
:param
|
|
645
|
-
:param
|
|
646
|
-
:param
|
|
647
|
-
:param
|
|
648
|
-
:param bool
|
|
649
|
-
:param
|
|
650
|
-
:param
|
|
651
|
-
:param
|
|
652
|
-
:param bool
|
|
653
|
-
:param
|
|
654
|
-
:param
|
|
655
|
-
:
|
|
656
|
-
:
|
|
657
|
-
|
|
658
|
-
|
|
659
|
-
|
|
660
|
-
|
|
661
|
-
|
|
662
|
-
|
|
663
|
-
|
|
664
|
-
|
|
665
|
-
|
|
666
|
-
|
|
667
|
-
|
|
668
|
-
|
|
669
|
-
|
|
670
|
-
|
|
671
|
-
|
|
672
|
-
|
|
673
|
-
|
|
674
|
-
|
|
675
|
-
|
|
676
|
-
|
|
677
|
-
|
|
678
|
-
|
|
679
|
-
|
|
680
|
-
|
|
681
|
-
|
|
682
|
-
|
|
683
|
-
|
|
684
|
-
|
|
685
|
-
|
|
686
|
-
|
|
687
|
-
|
|
688
|
-
|
|
689
|
-
|
|
690
|
-
|
|
691
|
-
|
|
692
|
-
|
|
693
|
-
|
|
694
|
-
|
|
695
|
-
|
|
696
|
-
|
|
697
|
-
|
|
698
|
-
|
|
699
|
-
|
|
700
|
-
|
|
701
|
-
|
|
702
|
-
|
|
703
|
-
|
|
704
|
-
|
|
705
|
-
|
|
706
|
-
|
|
707
|
-
|
|
708
|
-
|
|
709
|
-
|
|
710
|
-
|
|
711
|
-
|
|
712
|
-
|
|
713
|
-
|
|
714
|
-
|
|
715
|
-
|
|
716
|
-
|
|
717
|
-
|
|
718
|
-
|
|
719
|
-
:param
|
|
720
|
-
:param
|
|
721
|
-
:param
|
|
722
|
-
:param
|
|
723
|
-
:param
|
|
724
|
-
:param
|
|
725
|
-
:
|
|
726
|
-
:
|
|
727
|
-
|
|
728
|
-
|
|
729
|
-
|
|
730
|
-
|
|
731
|
-
|
|
732
|
-
|
|
733
|
-
|
|
734
|
-
|
|
735
|
-
|
|
736
|
-
|
|
737
|
-
|
|
738
|
-
|
|
739
|
-
|
|
740
|
-
|
|
741
|
-
|
|
742
|
-
|
|
743
|
-
|
|
744
|
-
|
|
745
|
-
|
|
746
|
-
|
|
747
|
-
|
|
748
|
-
|
|
749
|
-
|
|
750
|
-
|
|
751
|
-
|
|
752
|
-
|
|
753
|
-
|
|
754
|
-
|
|
755
|
-
|
|
756
|
-
|
|
757
|
-
|
|
758
|
-
|
|
759
|
-
|
|
760
|
-
|
|
761
|
-
|
|
762
|
-
|
|
763
|
-
|
|
764
|
-
|
|
765
|
-
|
|
766
|
-
|
|
767
|
-
|
|
768
|
-
|
|
769
|
-
|
|
770
|
-
|
|
771
|
-
|
|
772
|
-
|
|
773
|
-
|
|
774
|
-
|
|
775
|
-
|
|
776
|
-
|
|
777
|
-
|
|
778
|
-
|
|
779
|
-
|
|
780
|
-
|
|
781
|
-
|
|
782
|
-
|
|
783
|
-
dataset=
|
|
784
|
-
|
|
785
|
-
|
|
786
|
-
|
|
787
|
-
|
|
788
|
-
|
|
789
|
-
|
|
790
|
-
|
|
791
|
-
|
|
792
|
-
|
|
793
|
-
|
|
794
|
-
|
|
795
|
-
|
|
796
|
-
|
|
797
|
-
|
|
798
|
-
|
|
799
|
-
|
|
800
|
-
|
|
801
|
-
|
|
802
|
-
|
|
803
|
-
|
|
804
|
-
|
|
805
|
-
|
|
806
|
-
|
|
807
|
-
"""
|
|
808
|
-
|
|
809
|
-
|
|
810
|
-
|
|
811
|
-
|
|
812
|
-
|
|
813
|
-
|
|
814
|
-
|
|
815
|
-
|
|
816
|
-
|
|
817
|
-
|
|
818
|
-
|
|
819
|
-
|
|
820
|
-
:param str
|
|
821
|
-
:
|
|
822
|
-
:
|
|
823
|
-
|
|
824
|
-
|
|
825
|
-
|
|
826
|
-
|
|
827
|
-
|
|
828
|
-
|
|
829
|
-
|
|
830
|
-
|
|
831
|
-
|
|
832
|
-
|
|
833
|
-
|
|
834
|
-
|
|
835
|
-
|
|
836
|
-
|
|
837
|
-
|
|
838
|
-
|
|
839
|
-
|
|
840
|
-
|
|
841
|
-
|
|
842
|
-
|
|
843
|
-
|
|
844
|
-
|
|
845
|
-
|
|
846
|
-
|
|
847
|
-
|
|
848
|
-
|
|
849
|
-
|
|
850
|
-
|
|
851
|
-
|
|
852
|
-
|
|
853
|
-
|
|
854
|
-
|
|
855
|
-
|
|
856
|
-
|
|
857
|
-
|
|
858
|
-
|
|
859
|
-
|
|
860
|
-
|
|
861
|
-
|
|
862
|
-
|
|
863
|
-
|
|
864
|
-
|
|
865
|
-
|
|
866
|
-
|
|
867
|
-
|
|
868
|
-
|
|
869
|
-
|
|
870
|
-
|
|
871
|
-
|
|
872
|
-
|
|
873
|
-
|
|
874
|
-
|
|
875
|
-
|
|
876
|
-
|
|
877
|
-
|
|
878
|
-
|
|
879
|
-
|
|
880
|
-
|
|
881
|
-
|
|
882
|
-
|
|
883
|
-
|
|
884
|
-
|
|
885
|
-
|
|
886
|
-
|
|
887
|
-
|
|
888
|
-
|
|
889
|
-
|
|
890
|
-
|
|
891
|
-
|
|
892
|
-
|
|
893
|
-
|
|
894
|
-
|
|
895
|
-
|
|
896
|
-
|
|
897
|
-
|
|
898
|
-
|
|
899
|
-
|
|
900
|
-
:param str
|
|
901
|
-
|
|
902
|
-
:
|
|
903
|
-
:
|
|
904
|
-
|
|
905
|
-
|
|
906
|
-
|
|
907
|
-
|
|
908
|
-
|
|
909
|
-
|
|
910
|
-
|
|
911
|
-
|
|
912
|
-
|
|
913
|
-
|
|
914
|
-
|
|
915
|
-
|
|
916
|
-
|
|
917
|
-
|
|
918
|
-
|
|
919
|
-
|
|
920
|
-
|
|
921
|
-
|
|
922
|
-
|
|
923
|
-
|
|
924
|
-
|
|
925
|
-
|
|
926
|
-
|
|
927
|
-
|
|
928
|
-
|
|
929
|
-
|
|
930
|
-
|
|
931
|
-
|
|
932
|
-
|
|
933
|
-
|
|
934
|
-
|
|
935
|
-
|
|
936
|
-
|
|
937
|
-
|
|
938
|
-
|
|
939
|
-
|
|
940
|
-
|
|
941
|
-
|
|
942
|
-
|
|
943
|
-
|
|
944
|
-
|
|
945
|
-
|
|
946
|
-
|
|
947
|
-
|
|
948
|
-
|
|
949
|
-
|
|
950
|
-
|
|
951
|
-
|
|
952
|
-
|
|
953
|
-
|
|
954
|
-
|
|
955
|
-
|
|
956
|
-
|
|
957
|
-
|
|
958
|
-
|
|
959
|
-
|
|
960
|
-
|
|
961
|
-
|
|
962
|
-
|
|
963
|
-
|
|
964
|
-
|
|
965
|
-
|
|
966
|
-
|
|
967
|
-
|
|
968
|
-
|
|
969
|
-
|
|
970
|
-
|
|
971
|
-
|
|
972
|
-
|
|
973
|
-
|
|
974
|
-
|
|
975
|
-
|
|
976
|
-
|
|
977
|
-
|
|
978
|
-
|
|
979
|
-
|
|
980
|
-
|
|
981
|
-
|
|
982
|
-
|
|
983
|
-
|
|
984
|
-
|
|
985
|
-
|
|
986
|
-
|
|
987
|
-
|
|
988
|
-
|
|
989
|
-
|
|
990
|
-
|
|
991
|
-
|
|
992
|
-
|
|
993
|
-
|
|
994
|
-
|
|
995
|
-
|
|
996
|
-
|
|
997
|
-
|
|
998
|
-
|
|
999
|
-
|
|
1000
|
-
|
|
1001
|
-
|
|
1002
|
-
|
|
1003
|
-
|
|
1004
|
-
:param
|
|
1005
|
-
:param
|
|
1006
|
-
:param
|
|
1007
|
-
:param
|
|
1008
|
-
:param
|
|
1009
|
-
:param
|
|
1010
|
-
:
|
|
1011
|
-
|
|
1012
|
-
|
|
1013
|
-
|
|
1014
|
-
|
|
1015
|
-
|
|
1016
|
-
|
|
1017
|
-
|
|
1018
|
-
|
|
1019
|
-
|
|
1020
|
-
|
|
1021
|
-
|
|
1022
|
-
|
|
1023
|
-
|
|
1024
|
-
|
|
1025
|
-
|
|
1026
|
-
|
|
1027
|
-
|
|
1028
|
-
|
|
1029
|
-
|
|
1030
|
-
|
|
1031
|
-
|
|
1032
|
-
|
|
1033
|
-
|
|
1034
|
-
|
|
1035
|
-
|
|
1036
|
-
|
|
1037
|
-
|
|
1038
|
-
|
|
1039
|
-
|
|
1040
|
-
|
|
1041
|
-
|
|
1042
|
-
|
|
1043
|
-
|
|
1044
|
-
|
|
1045
|
-
|
|
1046
|
-
|
|
1047
|
-
|
|
1048
|
-
|
|
1049
|
-
|
|
1050
|
-
|
|
1051
|
-
|
|
1052
|
-
|
|
1053
|
-
|
|
1054
|
-
|
|
1055
|
-
|
|
1056
|
-
|
|
1057
|
-
|
|
1058
|
-
|
|
1059
|
-
|
|
1060
|
-
|
|
1061
|
-
|
|
1062
|
-
|
|
1063
|
-
|
|
1064
|
-
|
|
1065
|
-
|
|
1066
|
-
|
|
1067
|
-
|
|
1068
|
-
|
|
1069
|
-
|
|
1070
|
-
|
|
1071
|
-
|
|
1072
|
-
|
|
1073
|
-
|
|
1074
|
-
|
|
1075
|
-
|
|
1076
|
-
|
|
1077
|
-
|
|
1078
|
-
|
|
1079
|
-
|
|
1080
|
-
|
|
1081
|
-
:param
|
|
1082
|
-
:param
|
|
1083
|
-
:param
|
|
1084
|
-
:param
|
|
1085
|
-
:
|
|
1086
|
-
|
|
1087
|
-
|
|
1088
|
-
|
|
1089
|
-
|
|
1090
|
-
|
|
1091
|
-
|
|
1092
|
-
|
|
1093
|
-
|
|
1094
|
-
|
|
1095
|
-
|
|
1096
|
-
|
|
1097
|
-
|
|
1098
|
-
|
|
1099
|
-
|
|
1100
|
-
|
|
1101
|
-
|
|
1102
|
-
|
|
1103
|
-
|
|
1104
|
-
|
|
1105
|
-
|
|
1106
|
-
|
|
1107
|
-
|
|
1108
|
-
|
|
1109
|
-
|
|
1110
|
-
|
|
1111
|
-
|
|
1112
|
-
|
|
1113
|
-
|
|
1114
|
-
|
|
1115
|
-
|
|
1116
|
-
|
|
1117
|
-
|
|
1118
|
-
|
|
1119
|
-
|
|
1120
|
-
|
|
1121
|
-
|
|
1122
|
-
|
|
1123
|
-
|
|
1124
|
-
|
|
1125
|
-
|
|
1126
|
-
|
|
1127
|
-
|
|
1128
|
-
|
|
1129
|
-
|
|
1130
|
-
|
|
1131
|
-
|
|
1132
|
-
|
|
1133
|
-
|
|
1134
|
-
|
|
1135
|
-
|
|
1136
|
-
|
|
1137
|
-
|
|
1138
|
-
|
|
1139
|
-
|
|
1140
|
-
|
|
1141
|
-
|
|
1142
|
-
|
|
1143
|
-
|
|
1144
|
-
|
|
1145
|
-
|
|
1146
|
-
|
|
1147
|
-
|
|
1148
|
-
|
|
1149
|
-
|
|
1150
|
-
|
|
1151
|
-
|
|
1152
|
-
|
|
1153
|
-
|
|
1154
|
-
|
|
1155
|
-
|
|
1156
|
-
|
|
1157
|
-
|
|
1158
|
-
|
|
1159
|
-
|
|
1160
|
-
|
|
1161
|
-
|
|
1162
|
-
|
|
1163
|
-
|
|
1164
|
-
|
|
1165
|
-
|
|
1166
|
-
|
|
1167
|
-
|
|
1168
|
-
|
|
1169
|
-
|
|
1170
|
-
|
|
1171
|
-
|
|
1172
|
-
|
|
1173
|
-
|
|
1174
|
-
|
|
1175
|
-
|
|
1176
|
-
|
|
1177
|
-
|
|
1178
|
-
|
|
1179
|
-
|
|
1180
|
-
|
|
1181
|
-
|
|
1182
|
-
|
|
1183
|
-
|
|
1184
|
-
|
|
1185
|
-
|
|
1186
|
-
|
|
1187
|
-
|
|
1188
|
-
|
|
1189
|
-
|
|
1190
|
-
|
|
1191
|
-
|
|
1192
|
-
|
|
1193
|
-
|
|
1194
|
-
|
|
1195
|
-
|
|
1196
|
-
|
|
1197
|
-
|
|
1198
|
-
|
|
1199
|
-
|
|
1200
|
-
|
|
1201
|
-
|
|
1202
|
-
|
|
1203
|
-
|
|
1204
|
-
|
|
1205
|
-
|
|
1206
|
-
|
|
1207
|
-
|
|
1208
|
-
|
|
1209
|
-
|
|
1210
|
-
|
|
1211
|
-
|
|
1212
|
-
|
|
1213
|
-
|
|
1214
|
-
|
|
1215
|
-
|
|
1216
|
-
|
|
1217
|
-
|
|
1218
|
-
|
|
1219
|
-
|
|
1220
|
-
|
|
1221
|
-
|
|
1222
|
-
|
|
1223
|
-
|
|
1224
|
-
|
|
1225
|
-
|
|
1226
|
-
|
|
1227
|
-
|
|
1228
|
-
|
|
1229
|
-
|
|
1230
|
-
:
|
|
1231
|
-
|
|
1232
|
-
|
|
1233
|
-
|
|
1234
|
-
|
|
1235
|
-
|
|
1236
|
-
|
|
1237
|
-
|
|
1238
|
-
|
|
1239
|
-
|
|
1240
|
-
|
|
1241
|
-
:param dl.Filters items_query: Filters to select items
|
|
1242
|
-
:param
|
|
1243
|
-
:return: True if successful
|
|
1244
|
-
:rtype: bool
|
|
1245
|
-
"""
|
|
1246
|
-
|
|
1247
|
-
|
|
1248
|
-
|
|
1249
|
-
|
|
1250
|
-
|
|
1251
|
-
|
|
1252
|
-
|
|
1253
|
-
|
|
1254
|
-
|
|
1255
|
-
|
|
1256
|
-
:param
|
|
1257
|
-
:return: True if successful
|
|
1258
|
-
:rtype: bool
|
|
1259
|
-
"""
|
|
1260
|
-
|
|
1261
|
-
|
|
1262
|
-
|
|
1263
|
-
|
|
1264
|
-
|
|
1265
|
-
def
|
|
1266
|
-
"""
|
|
1267
|
-
|
|
1268
|
-
|
|
1269
|
-
|
|
1270
|
-
:param dl.Filters
|
|
1271
|
-
:return:
|
|
1272
|
-
:rtype:
|
|
1273
|
-
"""
|
|
1274
|
-
|
|
1275
|
-
|
|
1276
|
-
|
|
1277
|
-
|
|
1278
|
-
|
|
1279
|
-
|
|
1280
|
-
|
|
1281
|
-
|
|
1282
|
-
|
|
1283
|
-
|
|
1284
|
-
|
|
1285
|
-
return
|
|
1
|
+
from collections import namedtuple
|
|
2
|
+
import traceback
|
|
3
|
+
import logging
|
|
4
|
+
from enum import Enum
|
|
5
|
+
|
|
6
|
+
import attr
|
|
7
|
+
import os
|
|
8
|
+
|
|
9
|
+
from .. import repositories, entities, services, exceptions
|
|
10
|
+
from ..services.api_client import ApiClient
|
|
11
|
+
from .annotation import ViewAnnotationOptions, AnnotationType, ExportVersion
|
|
12
|
+
|
|
13
|
+
logger = logging.getLogger(name='dtlpy')
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class IndexDriver(str, Enum):
|
|
17
|
+
V1 = "v1"
|
|
18
|
+
V2 = "v2"
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class ExportType(str, Enum):
|
|
22
|
+
JSON = "json"
|
|
23
|
+
ZIP = "zip"
|
|
24
|
+
|
|
25
|
+
class OutputExportType(str, Enum):
|
|
26
|
+
JSON = "json"
|
|
27
|
+
ZIP = "zip"
|
|
28
|
+
FOLDERS = "folders"
|
|
29
|
+
|
|
30
|
+
class ExpirationOptions:
|
|
31
|
+
"""
|
|
32
|
+
ExpirationOptions object
|
|
33
|
+
"""
|
|
34
|
+
|
|
35
|
+
def __init__(self, item_max_days: int = None):
|
|
36
|
+
"""
|
|
37
|
+
:param item_max_days: int. items in dataset will be auto delete after this number id days
|
|
38
|
+
"""
|
|
39
|
+
self.item_max_days = item_max_days
|
|
40
|
+
|
|
41
|
+
def to_json(self):
|
|
42
|
+
_json = dict()
|
|
43
|
+
if self.item_max_days is not None:
|
|
44
|
+
_json["itemMaxDays"] = self.item_max_days
|
|
45
|
+
return _json
|
|
46
|
+
|
|
47
|
+
@classmethod
|
|
48
|
+
def from_json(cls, _json: dict):
|
|
49
|
+
item_max_days = _json.get('itemMaxDays', None)
|
|
50
|
+
if item_max_days:
|
|
51
|
+
return cls(item_max_days=item_max_days)
|
|
52
|
+
return None
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
@attr.s
|
|
56
|
+
class Dataset(entities.BaseEntity):
|
|
57
|
+
"""
|
|
58
|
+
Dataset object
|
|
59
|
+
"""
|
|
60
|
+
# dataset information
|
|
61
|
+
id = attr.ib()
|
|
62
|
+
url = attr.ib()
|
|
63
|
+
name = attr.ib()
|
|
64
|
+
annotated = attr.ib(repr=False)
|
|
65
|
+
creator = attr.ib()
|
|
66
|
+
projects = attr.ib(repr=False)
|
|
67
|
+
items_count = attr.ib()
|
|
68
|
+
metadata = attr.ib(repr=False)
|
|
69
|
+
directoryTree = attr.ib(repr=False)
|
|
70
|
+
expiration_options = attr.ib()
|
|
71
|
+
index_driver = attr.ib()
|
|
72
|
+
enable_sync_with_cloned = attr.ib(repr=False)
|
|
73
|
+
|
|
74
|
+
# name change when to_json
|
|
75
|
+
created_at = attr.ib()
|
|
76
|
+
updated_at = attr.ib()
|
|
77
|
+
updated_by = attr.ib()
|
|
78
|
+
items_url = attr.ib(repr=False)
|
|
79
|
+
readable_type = attr.ib(repr=False)
|
|
80
|
+
access_level = attr.ib(repr=False)
|
|
81
|
+
driver = attr.ib(repr=False)
|
|
82
|
+
src_dataset = attr.ib(repr=False)
|
|
83
|
+
_readonly = attr.ib(repr=False)
|
|
84
|
+
annotations_count = attr.ib()
|
|
85
|
+
|
|
86
|
+
# api
|
|
87
|
+
_client_api = attr.ib(type=ApiClient, repr=False)
|
|
88
|
+
|
|
89
|
+
# entities
|
|
90
|
+
_project = attr.ib(default=None, repr=False)
|
|
91
|
+
|
|
92
|
+
# repositories
|
|
93
|
+
_datasets = attr.ib(repr=False, default=None)
|
|
94
|
+
_repositories = attr.ib(repr=False)
|
|
95
|
+
|
|
96
|
+
# defaults
|
|
97
|
+
_ontology_ids = attr.ib(default=None, repr=False)
|
|
98
|
+
_labels = attr.ib(default=None, repr=False)
|
|
99
|
+
_directory_tree = attr.ib(default=None, repr=False)
|
|
100
|
+
_recipe = attr.ib(default=None, repr=False)
|
|
101
|
+
_ontology = attr.ib(default=None, repr=False)
|
|
102
|
+
|
|
103
|
+
@property
|
|
104
|
+
def itemsCount(self):
|
|
105
|
+
return self.items_count
|
|
106
|
+
|
|
107
|
+
@staticmethod
|
|
108
|
+
def _protected_from_json(project: entities.Project,
|
|
109
|
+
_json: dict,
|
|
110
|
+
client_api: ApiClient,
|
|
111
|
+
datasets=None,
|
|
112
|
+
is_fetched=True):
|
|
113
|
+
"""
|
|
114
|
+
Same as from_json but with try-except to catch if error
|
|
115
|
+
|
|
116
|
+
:param project: dataset's project
|
|
117
|
+
:param _json: _json response from host
|
|
118
|
+
:param client_api: ApiClient entity
|
|
119
|
+
:param datasets: Datasets repository
|
|
120
|
+
:param is_fetched: is Entity fetched from Platform
|
|
121
|
+
:return: Dataset object
|
|
122
|
+
"""
|
|
123
|
+
try:
|
|
124
|
+
dataset = Dataset.from_json(project=project,
|
|
125
|
+
_json=_json,
|
|
126
|
+
client_api=client_api,
|
|
127
|
+
datasets=datasets,
|
|
128
|
+
is_fetched=is_fetched)
|
|
129
|
+
status = True
|
|
130
|
+
except Exception:
|
|
131
|
+
dataset = traceback.format_exc()
|
|
132
|
+
status = False
|
|
133
|
+
return status, dataset
|
|
134
|
+
|
|
135
|
+
@classmethod
|
|
136
|
+
def from_json(cls,
|
|
137
|
+
project: entities.Project,
|
|
138
|
+
_json: dict,
|
|
139
|
+
client_api: ApiClient,
|
|
140
|
+
datasets=None,
|
|
141
|
+
is_fetched=True):
|
|
142
|
+
"""
|
|
143
|
+
Build a Dataset entity object from a json
|
|
144
|
+
|
|
145
|
+
:param project: dataset's project
|
|
146
|
+
:param dict _json: _json response from host
|
|
147
|
+
:param client_api: ApiClient entity
|
|
148
|
+
:param datasets: Datasets repository
|
|
149
|
+
:param bool is_fetched: is Entity fetched from Platform
|
|
150
|
+
:return: Dataset object
|
|
151
|
+
:rtype: dtlpy.entities.dataset.Dataset
|
|
152
|
+
"""
|
|
153
|
+
projects = _json.get('projects', None)
|
|
154
|
+
if project is not None and projects is not None:
|
|
155
|
+
if project.id not in projects:
|
|
156
|
+
logger.warning('Dataset has been fetched from a project that is not in it projects list')
|
|
157
|
+
project = None
|
|
158
|
+
|
|
159
|
+
expiration_options = _json.get('expirationOptions', None)
|
|
160
|
+
if expiration_options:
|
|
161
|
+
expiration_options = ExpirationOptions.from_json(expiration_options)
|
|
162
|
+
inst = cls(metadata=_json.get('metadata', None),
|
|
163
|
+
directoryTree=_json.get('directoryTree', None),
|
|
164
|
+
readable_type=_json.get('readableType', None),
|
|
165
|
+
access_level=_json.get('accessLevel', None),
|
|
166
|
+
created_at=_json.get('createdAt', None),
|
|
167
|
+
updated_at=_json.get('updatedAt', None),
|
|
168
|
+
updated_by=_json.get('updatedBy', None),
|
|
169
|
+
annotations_count=_json.get("annotationsCount", None),
|
|
170
|
+
items_count=_json.get('itemsCount', None),
|
|
171
|
+
annotated=_json.get('annotated', None),
|
|
172
|
+
readonly=_json.get('readonly', None),
|
|
173
|
+
projects=projects,
|
|
174
|
+
creator=_json.get('creator', None),
|
|
175
|
+
items_url=_json.get('items', None),
|
|
176
|
+
driver=_json.get('driver', None),
|
|
177
|
+
name=_json.get('name', None),
|
|
178
|
+
url=_json.get('url', None),
|
|
179
|
+
id=_json.get('id', None),
|
|
180
|
+
datasets=datasets,
|
|
181
|
+
client_api=client_api,
|
|
182
|
+
project=project,
|
|
183
|
+
expiration_options=expiration_options,
|
|
184
|
+
index_driver=_json.get('indexDriver', None),
|
|
185
|
+
enable_sync_with_cloned=_json.get('enableSyncWithCloned', None),
|
|
186
|
+
src_dataset=_json.get('srcDataset', None))
|
|
187
|
+
inst.is_fetched = is_fetched
|
|
188
|
+
return inst
|
|
189
|
+
|
|
190
|
+
def to_json(self):
|
|
191
|
+
"""
|
|
192
|
+
Returns platform _json format of object
|
|
193
|
+
|
|
194
|
+
:return: platform json format of object
|
|
195
|
+
:rtype: dict
|
|
196
|
+
"""
|
|
197
|
+
_json = attr.asdict(self, filter=attr.filters.exclude(attr.fields(Dataset)._client_api,
|
|
198
|
+
attr.fields(Dataset)._project,
|
|
199
|
+
attr.fields(Dataset)._readonly,
|
|
200
|
+
attr.fields(Dataset)._datasets,
|
|
201
|
+
attr.fields(Dataset)._repositories,
|
|
202
|
+
attr.fields(Dataset)._ontology_ids,
|
|
203
|
+
attr.fields(Dataset)._labels,
|
|
204
|
+
attr.fields(Dataset)._recipe,
|
|
205
|
+
attr.fields(Dataset)._ontology,
|
|
206
|
+
attr.fields(Dataset)._directory_tree,
|
|
207
|
+
attr.fields(Dataset).access_level,
|
|
208
|
+
attr.fields(Dataset).readable_type,
|
|
209
|
+
attr.fields(Dataset).created_at,
|
|
210
|
+
attr.fields(Dataset).updated_at,
|
|
211
|
+
attr.fields(Dataset).updated_by,
|
|
212
|
+
attr.fields(Dataset).annotations_count,
|
|
213
|
+
attr.fields(Dataset).items_url,
|
|
214
|
+
attr.fields(Dataset).expiration_options,
|
|
215
|
+
attr.fields(Dataset).items_count,
|
|
216
|
+
attr.fields(Dataset).index_driver,
|
|
217
|
+
attr.fields(Dataset).enable_sync_with_cloned,
|
|
218
|
+
attr.fields(Dataset).src_dataset,
|
|
219
|
+
))
|
|
220
|
+
_json.update({'items': self.items_url})
|
|
221
|
+
_json['readableType'] = self.readable_type
|
|
222
|
+
_json['createdAt'] = self.created_at
|
|
223
|
+
_json['updatedAt'] = self.updated_at
|
|
224
|
+
_json['updatedBy'] = self.updated_by
|
|
225
|
+
_json['annotationsCount'] = self.annotations_count
|
|
226
|
+
_json['accessLevel'] = self.access_level
|
|
227
|
+
_json['readonly'] = self._readonly
|
|
228
|
+
_json['itemsCount'] = self.items_count
|
|
229
|
+
_json['indexDriver'] = self.index_driver
|
|
230
|
+
if self.expiration_options and self.expiration_options.to_json():
|
|
231
|
+
_json['expirationOptions'] = self.expiration_options.to_json()
|
|
232
|
+
if self.enable_sync_with_cloned is not None:
|
|
233
|
+
_json['enableSyncWithCloned'] = self.enable_sync_with_cloned
|
|
234
|
+
if self.src_dataset is not None:
|
|
235
|
+
_json['srcDataset'] = self.src_dataset
|
|
236
|
+
return _json
|
|
237
|
+
|
|
238
|
+
@property
|
|
239
|
+
def labels(self):
|
|
240
|
+
if self._labels is None:
|
|
241
|
+
self._labels = self._get_ontology().labels
|
|
242
|
+
return self._labels
|
|
243
|
+
|
|
244
|
+
@property
|
|
245
|
+
def readonly(self):
|
|
246
|
+
return self._readonly
|
|
247
|
+
|
|
248
|
+
@property
|
|
249
|
+
def platform_url(self):
|
|
250
|
+
return self._client_api._get_resource_url("projects/{}/datasets/{}/items".format(self.project.id, self.id))
|
|
251
|
+
|
|
252
|
+
@readonly.setter
|
|
253
|
+
def readonly(self, state):
|
|
254
|
+
import warnings
|
|
255
|
+
warnings.warn("`readonly` flag on dataset is deprecated, doing nothing.", DeprecationWarning)
|
|
256
|
+
|
|
257
|
+
@property
|
|
258
|
+
def labels_flat_dict(self):
|
|
259
|
+
return self._get_ontology().labels_flat_dict
|
|
260
|
+
|
|
261
|
+
@property
|
|
262
|
+
def instance_map(self) -> dict:
|
|
263
|
+
return self._get_ontology().instance_map
|
|
264
|
+
|
|
265
|
+
@instance_map.setter
|
|
266
|
+
def instance_map(self, value: dict):
|
|
267
|
+
"""
|
|
268
|
+
instance mapping for creating instance mask
|
|
269
|
+
|
|
270
|
+
:param value: dictionary {label: map_id}
|
|
271
|
+
"""
|
|
272
|
+
if not isinstance(value, dict):
|
|
273
|
+
raise ValueError('input must be a dictionary of {label_name: instance_id}')
|
|
274
|
+
self._get_ontology().instance_map = value
|
|
275
|
+
|
|
276
|
+
@property
|
|
277
|
+
def ontology_ids(self):
|
|
278
|
+
if self._ontology_ids is None:
|
|
279
|
+
self._ontology_ids = list()
|
|
280
|
+
if self.metadata is not None and 'system' in self.metadata and 'recipes' in self.metadata['system']:
|
|
281
|
+
recipe_ids = self.get_recipe_ids()
|
|
282
|
+
for rec_id in recipe_ids:
|
|
283
|
+
recipe = self.recipes.get(recipe_id=rec_id)
|
|
284
|
+
self._ontology_ids += recipe.ontology_ids
|
|
285
|
+
return self._ontology_ids
|
|
286
|
+
|
|
287
|
+
@_repositories.default
|
|
288
|
+
def set_repositories(self):
|
|
289
|
+
reps = namedtuple('repositories',
|
|
290
|
+
field_names=['items', 'recipes', 'datasets', 'assignments', 'tasks', 'annotations',
|
|
291
|
+
'ontologies', 'features', 'settings', 'schema', 'collections'])
|
|
292
|
+
if self._project is None:
|
|
293
|
+
datasets = repositories.Datasets(client_api=self._client_api, project=self._project)
|
|
294
|
+
else:
|
|
295
|
+
datasets = self._project.datasets
|
|
296
|
+
|
|
297
|
+
return reps(
|
|
298
|
+
items=repositories.Items(client_api=self._client_api, dataset=self, datasets=datasets),
|
|
299
|
+
recipes=repositories.Recipes(client_api=self._client_api, dataset=self),
|
|
300
|
+
assignments=repositories.Assignments(project=self._project, client_api=self._client_api, dataset=self),
|
|
301
|
+
tasks=repositories.Tasks(client_api=self._client_api, project=self._project, dataset=self),
|
|
302
|
+
annotations=repositories.Annotations(client_api=self._client_api, dataset=self),
|
|
303
|
+
datasets=datasets,
|
|
304
|
+
ontologies=repositories.Ontologies(client_api=self._client_api, dataset=self),
|
|
305
|
+
features=repositories.Features(client_api=self._client_api, project=self._project, dataset=self),
|
|
306
|
+
settings=repositories.Settings(client_api=self._client_api, dataset=self),
|
|
307
|
+
schema=repositories.Schema(client_api=self._client_api, dataset=self),
|
|
308
|
+
collections=repositories.Collections(client_api=self._client_api, dataset=self)
|
|
309
|
+
)
|
|
310
|
+
|
|
311
|
+
@property
|
|
312
|
+
def settings(self):
|
|
313
|
+
assert isinstance(self._repositories.settings, repositories.Settings)
|
|
314
|
+
return self._repositories.settings
|
|
315
|
+
|
|
316
|
+
@property
|
|
317
|
+
def items(self):
|
|
318
|
+
assert isinstance(self._repositories.items, repositories.Items)
|
|
319
|
+
return self._repositories.items
|
|
320
|
+
|
|
321
|
+
@property
|
|
322
|
+
def ontologies(self):
|
|
323
|
+
assert isinstance(self._repositories.ontologies, repositories.Ontologies)
|
|
324
|
+
return self._repositories.ontologies
|
|
325
|
+
|
|
326
|
+
@property
|
|
327
|
+
def recipes(self):
|
|
328
|
+
assert isinstance(self._repositories.recipes, repositories.Recipes)
|
|
329
|
+
return self._repositories.recipes
|
|
330
|
+
|
|
331
|
+
@property
|
|
332
|
+
def datasets(self):
|
|
333
|
+
assert isinstance(self._repositories.datasets, repositories.Datasets)
|
|
334
|
+
return self._repositories.datasets
|
|
335
|
+
|
|
336
|
+
@property
|
|
337
|
+
def assignments(self):
|
|
338
|
+
assert isinstance(self._repositories.assignments, repositories.Assignments)
|
|
339
|
+
return self._repositories.assignments
|
|
340
|
+
|
|
341
|
+
@property
|
|
342
|
+
def tasks(self):
|
|
343
|
+
assert isinstance(self._repositories.tasks, repositories.Tasks)
|
|
344
|
+
return self._repositories.tasks
|
|
345
|
+
|
|
346
|
+
@property
|
|
347
|
+
def annotations(self):
|
|
348
|
+
assert isinstance(self._repositories.annotations, repositories.Annotations)
|
|
349
|
+
return self._repositories.annotations
|
|
350
|
+
|
|
351
|
+
@property
|
|
352
|
+
def features(self):
|
|
353
|
+
assert isinstance(self._repositories.features, repositories.Features)
|
|
354
|
+
return self._repositories.features
|
|
355
|
+
|
|
356
|
+
@property
|
|
357
|
+
def collections(self):
|
|
358
|
+
assert isinstance(self._repositories.collections, repositories.Collections)
|
|
359
|
+
return self._repositories.collections
|
|
360
|
+
|
|
361
|
+
@property
|
|
362
|
+
def schema(self):
|
|
363
|
+
assert isinstance(self._repositories.schema, repositories.Schema)
|
|
364
|
+
return self._repositories.schema
|
|
365
|
+
|
|
366
|
+
@property
|
|
367
|
+
def project(self):
|
|
368
|
+
if self._project is None:
|
|
369
|
+
# get from cache
|
|
370
|
+
project = self._client_api.state_io.get('project')
|
|
371
|
+
if project is not None:
|
|
372
|
+
# build entity from json
|
|
373
|
+
p = entities.Project.from_json(_json=project, client_api=self._client_api)
|
|
374
|
+
# check if dataset belongs to project
|
|
375
|
+
if p.id in self.projects:
|
|
376
|
+
self._project = p
|
|
377
|
+
if self._project is None:
|
|
378
|
+
self._project = repositories.Projects(client_api=self._client_api).get(project_id=self.projects[0],
|
|
379
|
+
fetch=None)
|
|
380
|
+
assert isinstance(self._project, entities.Project)
|
|
381
|
+
return self._project
|
|
382
|
+
|
|
383
|
+
@project.setter
|
|
384
|
+
def project(self, project):
|
|
385
|
+
if not isinstance(project, entities.Project):
|
|
386
|
+
raise ValueError('Must input a valid Project entity')
|
|
387
|
+
self._project = project
|
|
388
|
+
|
|
389
|
+
@property
|
|
390
|
+
def directory_tree(self):
|
|
391
|
+
if self._directory_tree is None:
|
|
392
|
+
self._directory_tree = self.project.datasets.directory_tree(dataset_id=self.id)
|
|
393
|
+
assert isinstance(self._directory_tree, entities.DirectoryTree)
|
|
394
|
+
return self._directory_tree
|
|
395
|
+
|
|
396
|
+
def __copy__(self):
|
|
397
|
+
return Dataset.from_json(_json=self.to_json(),
|
|
398
|
+
project=self._project,
|
|
399
|
+
client_api=self._client_api,
|
|
400
|
+
is_fetched=self.is_fetched,
|
|
401
|
+
datasets=self.datasets)
|
|
402
|
+
|
|
403
|
+
def __get_local_path__(self):
|
|
404
|
+
if self._project is not None:
|
|
405
|
+
local_path = os.path.join(services.service_defaults.DATALOOP_PATH,
|
|
406
|
+
'projects',
|
|
407
|
+
self.project.name,
|
|
408
|
+
'datasets',
|
|
409
|
+
self.name)
|
|
410
|
+
else:
|
|
411
|
+
local_path = os.path.join(services.service_defaults.DATALOOP_PATH,
|
|
412
|
+
'datasets',
|
|
413
|
+
'%s_%s' % (self.name, self.id))
|
|
414
|
+
return local_path
|
|
415
|
+
|
|
416
|
+
def _get_recipe(self):
|
|
417
|
+
recipes = self.recipes.list()
|
|
418
|
+
if len(recipes) > 0:
|
|
419
|
+
return recipes[0]
|
|
420
|
+
else:
|
|
421
|
+
raise exceptions.PlatformException('404', 'Dataset {} has no recipe'.format(self.name))
|
|
422
|
+
|
|
423
|
+
def _get_ontology(self):
|
|
424
|
+
if self._ontology is None:
|
|
425
|
+
ontologies = self._get_recipe().ontologies.list()
|
|
426
|
+
if len(ontologies) > 0:
|
|
427
|
+
self._ontology = ontologies[0]
|
|
428
|
+
else:
|
|
429
|
+
raise exceptions.PlatformException('404', 'Dataset {} has no ontology'.format(self.name))
|
|
430
|
+
return self._ontology
|
|
431
|
+
|
|
432
|
+
@staticmethod
|
|
433
|
+
def serialize_labels(labels_dict):
|
|
434
|
+
"""
|
|
435
|
+
Convert hex color format to rgb
|
|
436
|
+
|
|
437
|
+
:param dict labels_dict: dict of labels
|
|
438
|
+
:return: dict of converted labels
|
|
439
|
+
"""
|
|
440
|
+
dataset_labels_dict = dict()
|
|
441
|
+
for label, color in labels_dict.items():
|
|
442
|
+
dataset_labels_dict[label] = '#%02x%02x%02x' % color
|
|
443
|
+
return dataset_labels_dict
|
|
444
|
+
|
|
445
|
+
def get_recipe_ids(self):
|
|
446
|
+
"""
|
|
447
|
+
Get dataset recipe Ids
|
|
448
|
+
|
|
449
|
+
:return: list of recipe ids
|
|
450
|
+
:rtype: list
|
|
451
|
+
"""
|
|
452
|
+
return self.metadata['system']['recipes']
|
|
453
|
+
|
|
454
|
+
def switch_recipe(self, recipe_id=None, recipe=None):
|
|
455
|
+
"""
|
|
456
|
+
Switch the recipe that linked to the dataset with the given one
|
|
457
|
+
|
|
458
|
+
:param str recipe_id: recipe id
|
|
459
|
+
:param dtlpy.entities.recipe.Recipe recipe: recipe entity
|
|
460
|
+
|
|
461
|
+
**Example**:
|
|
462
|
+
|
|
463
|
+
.. code-block:: python
|
|
464
|
+
|
|
465
|
+
dataset.switch_recipe(recipe_id='recipe_id')
|
|
466
|
+
"""
|
|
467
|
+
if recipe is None and recipe_id is None:
|
|
468
|
+
raise exceptions.PlatformException('400', 'Must provide recipe or recipe_id')
|
|
469
|
+
if recipe_id is None:
|
|
470
|
+
if not isinstance(recipe, entities.Recipe):
|
|
471
|
+
raise exceptions.PlatformException('400', 'Recipe must me entities.Recipe type')
|
|
472
|
+
else:
|
|
473
|
+
recipe_id = recipe.id
|
|
474
|
+
|
|
475
|
+
# add recipe id to dataset metadata
|
|
476
|
+
if 'system' not in self.metadata:
|
|
477
|
+
self.metadata['system'] = dict()
|
|
478
|
+
if 'recipes' not in self.metadata['system']:
|
|
479
|
+
self.metadata['system']['recipes'] = list()
|
|
480
|
+
self.metadata['system']['recipes'] = [recipe_id]
|
|
481
|
+
self.update(system_metadata=True)
|
|
482
|
+
|
|
483
|
+
def delete(self, sure=False, really=False):
|
|
484
|
+
"""
|
|
485
|
+
Delete a dataset forever!
|
|
486
|
+
|
|
487
|
+
**Prerequisites**: You must be an *owner* or *developer* to use this method.
|
|
488
|
+
|
|
489
|
+
:param bool sure: are you sure you want to delete?
|
|
490
|
+
:param bool really: really really?
|
|
491
|
+
:return: True is success
|
|
492
|
+
:rtype: bool
|
|
493
|
+
|
|
494
|
+
**Example**:
|
|
495
|
+
|
|
496
|
+
.. code-block:: python
|
|
497
|
+
|
|
498
|
+
is_deleted = dataset.delete(sure=True, really=True)
|
|
499
|
+
"""
|
|
500
|
+
return self.datasets.delete(dataset_id=self.id,
|
|
501
|
+
sure=sure,
|
|
502
|
+
really=really)
|
|
503
|
+
|
|
504
|
+
def update(self, system_metadata=False):
|
|
505
|
+
"""
|
|
506
|
+
Update dataset field
|
|
507
|
+
|
|
508
|
+
**Prerequisites**: You must be an *owner* or *developer* to use this method.
|
|
509
|
+
|
|
510
|
+
:param bool system_metadata: bool - True, if you want to change metadata system
|
|
511
|
+
:return: Dataset object
|
|
512
|
+
:rtype: dtlpy.entities.dataset.Dataset
|
|
513
|
+
|
|
514
|
+
**Example**:
|
|
515
|
+
|
|
516
|
+
.. code-block:: python
|
|
517
|
+
|
|
518
|
+
dataset = dataset.update()
|
|
519
|
+
"""
|
|
520
|
+
return self.datasets.update(dataset=self,
|
|
521
|
+
system_metadata=system_metadata)
|
|
522
|
+
|
|
523
|
+
def unlock(self):
|
|
524
|
+
"""
|
|
525
|
+
Unlock dataset
|
|
526
|
+
|
|
527
|
+
**Prerequisites**: You must be an *owner* or *developer* to use this method.
|
|
528
|
+
|
|
529
|
+
:return: Dataset object
|
|
530
|
+
:rtype: dtlpy.entities.dataset.Dataset
|
|
531
|
+
|
|
532
|
+
**Example**:
|
|
533
|
+
|
|
534
|
+
.. code-block:: python
|
|
535
|
+
|
|
536
|
+
dataset = dataset.unlock()
|
|
537
|
+
"""
|
|
538
|
+
return self.datasets.unlock(dataset=self)
|
|
539
|
+
|
|
540
|
+
def set_readonly(self, state: bool):
|
|
541
|
+
"""
|
|
542
|
+
Set dataset readonly mode
|
|
543
|
+
|
|
544
|
+
**Prerequisites**: You must be in the role of an *owner* or *developer*.
|
|
545
|
+
|
|
546
|
+
:param bool state: state
|
|
547
|
+
|
|
548
|
+
**Example**:
|
|
549
|
+
|
|
550
|
+
.. code-block:: python
|
|
551
|
+
|
|
552
|
+
dataset.set_readonly(state=True)
|
|
553
|
+
"""
|
|
554
|
+
import warnings
|
|
555
|
+
warnings.warn("`readonly` flag on dataset is deprecated, doing nothing.", DeprecationWarning)
|
|
556
|
+
|
|
557
|
+
def clone(self,
|
|
558
|
+
clone_name=None,
|
|
559
|
+
filters=None,
|
|
560
|
+
with_items_annotations=True,
|
|
561
|
+
with_metadata=True,
|
|
562
|
+
with_task_annotations_status=True,
|
|
563
|
+
dst_dataset_id=None,
|
|
564
|
+
target_directory=None,
|
|
565
|
+
):
|
|
566
|
+
"""
|
|
567
|
+
Clone dataset
|
|
568
|
+
|
|
569
|
+
**Prerequisites**: You must be in the role of an *owner* or *developer*.
|
|
570
|
+
|
|
571
|
+
:param str clone_name: new dataset name
|
|
572
|
+
:param dtlpy.entities.filters.Filters filters: Filters entity or a query dict
|
|
573
|
+
:param bool with_items_annotations: clone all item's annotations
|
|
574
|
+
:param bool with_metadata: clone metadata
|
|
575
|
+
:param bool with_task_annotations_status: clone task annotations status
|
|
576
|
+
:param str dst_dataset_id: destination dataset id
|
|
577
|
+
:param str target_directory: target directory
|
|
578
|
+
:return: dataset object
|
|
579
|
+
:rtype: dtlpy.entities.dataset.Dataset
|
|
580
|
+
|
|
581
|
+
**Example**:
|
|
582
|
+
|
|
583
|
+
.. code-block:: python
|
|
584
|
+
|
|
585
|
+
dataset = dataset.clone(dataset_id='dataset_id',
|
|
586
|
+
clone_name='dataset_clone_name',
|
|
587
|
+
with_metadata=True,
|
|
588
|
+
with_items_annotations=False,
|
|
589
|
+
with_task_annotations_status=False)
|
|
590
|
+
"""
|
|
591
|
+
return self.datasets.clone(dataset_id=self.id,
|
|
592
|
+
filters=filters,
|
|
593
|
+
clone_name=clone_name,
|
|
594
|
+
with_metadata=with_metadata,
|
|
595
|
+
with_items_annotations=with_items_annotations,
|
|
596
|
+
with_task_annotations_status=with_task_annotations_status,
|
|
597
|
+
dst_dataset_id=dst_dataset_id,
|
|
598
|
+
target_directory=target_directory)
|
|
599
|
+
|
|
600
|
+
def sync(self, wait=True):
|
|
601
|
+
"""
|
|
602
|
+
Sync dataset with external storage
|
|
603
|
+
|
|
604
|
+
**Prerequisites**: You must be in the role of an *owner* or *developer*.
|
|
605
|
+
|
|
606
|
+
:param bool wait: wait for the command to finish
|
|
607
|
+
:return: True if success
|
|
608
|
+
:rtype: bool
|
|
609
|
+
|
|
610
|
+
**Example**:
|
|
611
|
+
|
|
612
|
+
.. code-block:: python
|
|
613
|
+
|
|
614
|
+
success = dataset.sync()
|
|
615
|
+
"""
|
|
616
|
+
return self.datasets.sync(dataset_id=self.id, wait=wait)
|
|
617
|
+
|
|
618
|
+
def download_annotations(self,
|
|
619
|
+
local_path=None,
|
|
620
|
+
filters=None,
|
|
621
|
+
annotation_options: ViewAnnotationOptions = None,
|
|
622
|
+
annotation_filters=None,
|
|
623
|
+
overwrite=False,
|
|
624
|
+
thickness=1,
|
|
625
|
+
with_text=False,
|
|
626
|
+
remote_path=None,
|
|
627
|
+
include_annotations_in_output=True,
|
|
628
|
+
export_png_files=False,
|
|
629
|
+
filter_output_annotations=False,
|
|
630
|
+
alpha=1,
|
|
631
|
+
export_version=ExportVersion.V1,
|
|
632
|
+
dataset_lock=False,
|
|
633
|
+
lock_timeout_sec=None,
|
|
634
|
+
export_summary=False,
|
|
635
|
+
):
|
|
636
|
+
"""
|
|
637
|
+
Download dataset by filters.
|
|
638
|
+
Filtering the dataset for items and save them local
|
|
639
|
+
Optional - also download annotation, mask, instance and image mask of the item
|
|
640
|
+
|
|
641
|
+
**Prerequisites**: You must be in the role of an *owner* or *developer*.
|
|
642
|
+
|
|
643
|
+
:param str local_path: local folder or filename to save to.
|
|
644
|
+
:param dtlpy.entities.filters.Filters filters: Filters entity or a dictionary containing filters parameters
|
|
645
|
+
:param list(dtlpy.entities.annotation.ViewAnnotationOptions) annotation_options: download annotations options: list(dl.ViewAnnotationOptions)
|
|
646
|
+
:param dtlpy.entities.filters.Filters annotation_filters: Filters entity to filter annotations for download
|
|
647
|
+
:param bool overwrite: optional - default = False
|
|
648
|
+
:param bool dataset_lock: optional - default = False
|
|
649
|
+
:param bool export_summary: optional - default = False
|
|
650
|
+
:param int lock_timeout_sec: optional
|
|
651
|
+
:param int thickness: optional - line thickness, if -1 annotation will be filled, default =1
|
|
652
|
+
:param bool with_text: optional - add text to annotations, default = False
|
|
653
|
+
:param str remote_path: DEPRECATED and ignored
|
|
654
|
+
:param bool include_annotations_in_output: default - False , if export should contain annotations
|
|
655
|
+
:param bool export_png_files: default - if True, semantic annotations should be exported as png files
|
|
656
|
+
:param bool filter_output_annotations: default - False, given an export by filter - determine if to filter out annotations
|
|
657
|
+
:param float alpha: opacity value [0 1], default 1
|
|
658
|
+
:param str export_version: exported items will have original extension in filename, `V1` - no original extension in filenames
|
|
659
|
+
:return: local_path of the directory where all the downloaded item
|
|
660
|
+
:rtype: str
|
|
661
|
+
|
|
662
|
+
**Example**:
|
|
663
|
+
|
|
664
|
+
.. code-block:: python
|
|
665
|
+
|
|
666
|
+
local_path = dataset.download_annotations(dataset='dataset_entity',
|
|
667
|
+
local_path='local_path',
|
|
668
|
+
annotation_options=[dl.ViewAnnotationOptions.JSON, dl.ViewAnnotationOptions.MASK],
|
|
669
|
+
overwrite=False,
|
|
670
|
+
thickness=1,
|
|
671
|
+
with_text=False,
|
|
672
|
+
alpha=1,
|
|
673
|
+
dataset_lock=False,
|
|
674
|
+
lock_timeout_sec=300,
|
|
675
|
+
export_summary=False
|
|
676
|
+
)
|
|
677
|
+
"""
|
|
678
|
+
|
|
679
|
+
return self.datasets.download_annotations(
|
|
680
|
+
dataset=self,
|
|
681
|
+
local_path=local_path,
|
|
682
|
+
overwrite=overwrite,
|
|
683
|
+
filters=filters,
|
|
684
|
+
annotation_options=annotation_options,
|
|
685
|
+
annotation_filters=annotation_filters,
|
|
686
|
+
thickness=thickness,
|
|
687
|
+
with_text=with_text,
|
|
688
|
+
remote_path=remote_path,
|
|
689
|
+
include_annotations_in_output=include_annotations_in_output,
|
|
690
|
+
export_png_files=export_png_files,
|
|
691
|
+
filter_output_annotations=filter_output_annotations,
|
|
692
|
+
alpha=alpha,
|
|
693
|
+
export_version=export_version,
|
|
694
|
+
dataset_lock=dataset_lock,
|
|
695
|
+
lock_timeout_sec=lock_timeout_sec,
|
|
696
|
+
export_summary=export_summary
|
|
697
|
+
)
|
|
698
|
+
|
|
699
|
+
def export(self,
|
|
700
|
+
local_path=None,
|
|
701
|
+
filters=None,
|
|
702
|
+
annotation_filters=None,
|
|
703
|
+
feature_vector_filters=None,
|
|
704
|
+
include_feature_vectors: bool = False,
|
|
705
|
+
include_annotations: bool = False,
|
|
706
|
+
export_type: ExportType = ExportType.JSON,
|
|
707
|
+
timeout: int = 0,
|
|
708
|
+
dataset_lock: bool = False,
|
|
709
|
+
lock_timeout_sec: int = None,
|
|
710
|
+
export_summary: bool = False,
|
|
711
|
+
output_export_type: OutputExportType = None):
|
|
712
|
+
"""
|
|
713
|
+
Export dataset items and annotations.
|
|
714
|
+
|
|
715
|
+
**Prerequisites**: You must be an *owner* or *developer* to use this method.
|
|
716
|
+
|
|
717
|
+
You must provide at least ONE of the following params: dataset, dataset_name, dataset_id.
|
|
718
|
+
|
|
719
|
+
:param str local_path: The local path to save the exported dataset
|
|
720
|
+
:param Union[dict, dtlpy.entities.filters.Filters] filters: Filters entity or a query dictionary
|
|
721
|
+
:param dtlpy.entities.filters.Filters annotation_filters: Filters entity
|
|
722
|
+
:param dtlpy.entities.filters.Filters feature_vector_filters: Filters entity
|
|
723
|
+
:param bool include_feature_vectors: Include item feature vectors in the export
|
|
724
|
+
:param bool include_annotations: Include item annotations in the export
|
|
725
|
+
:param bool dataset_lock: Make dataset readonly during the export
|
|
726
|
+
:param bool export_summary: Download dataset export summary
|
|
727
|
+
:param int lock_timeout_sec: Timeout for locking the dataset during export in seconds
|
|
728
|
+
:param entities.ExportType export_type: Type of export ('json' or 'zip')
|
|
729
|
+
:param entities.OutputExportType output_export_type: Output format ('json', 'zip', or 'folders'). If None, defaults to 'json'
|
|
730
|
+
:param int timeout: Maximum time in seconds to wait for the export to complete
|
|
731
|
+
:return: Exported item
|
|
732
|
+
:rtype: dtlpy.entities.item.Item
|
|
733
|
+
|
|
734
|
+
**Example**:
|
|
735
|
+
|
|
736
|
+
.. code-block:: python
|
|
737
|
+
|
|
738
|
+
export_item = dataset.export(filters=filters,
|
|
739
|
+
include_feature_vectors=True,
|
|
740
|
+
include_annotations=True,
|
|
741
|
+
export_type=dl.ExportType.JSON,
|
|
742
|
+
output_export_type=dl.OutputExportType.JSON)
|
|
743
|
+
"""
|
|
744
|
+
|
|
745
|
+
return self.datasets.export(dataset=self,
|
|
746
|
+
local_path=local_path,
|
|
747
|
+
filters=filters,
|
|
748
|
+
annotation_filters=annotation_filters,
|
|
749
|
+
feature_vector_filters=feature_vector_filters,
|
|
750
|
+
include_feature_vectors=include_feature_vectors,
|
|
751
|
+
include_annotations=include_annotations,
|
|
752
|
+
export_type=export_type,
|
|
753
|
+
timeout=timeout,
|
|
754
|
+
dataset_lock=dataset_lock,
|
|
755
|
+
lock_timeout_sec=lock_timeout_sec,
|
|
756
|
+
export_summary=export_summary,
|
|
757
|
+
output_export_type=output_export_type)
|
|
758
|
+
|
|
759
|
+
def upload_annotations(self,
|
|
760
|
+
local_path,
|
|
761
|
+
filters=None,
|
|
762
|
+
clean=False,
|
|
763
|
+
remote_root_path='/',
|
|
764
|
+
export_version=ExportVersion.V1
|
|
765
|
+
):
|
|
766
|
+
"""
|
|
767
|
+
Upload annotations to dataset.
|
|
768
|
+
|
|
769
|
+
**Prerequisites**: You must have a dataset with items that are related to the annotations. The relationship between the dataset and annotations is shown in the name. You must be in the role of an *owner* or *developer*.
|
|
770
|
+
|
|
771
|
+
:param str local_path: str - local folder where the annotations files is.
|
|
772
|
+
:param dtlpy.entities.filters.Filters filters: Filters entity or a dictionary containing filters parameters
|
|
773
|
+
:param bool clean: bool - if True it remove the old annotations
|
|
774
|
+
:param str remote_root_path: str - the remote root path to match remote and local items
|
|
775
|
+
:param str export_version: `V2` - exported items will have original extension in filename, `V1` - no original extension in filenames
|
|
776
|
+
|
|
777
|
+
For example, if the item filepath is a/b/item and remote_root_path is /a the start folder will be b instead of a
|
|
778
|
+
|
|
779
|
+
**Example**:
|
|
780
|
+
|
|
781
|
+
.. code-block:: python
|
|
782
|
+
|
|
783
|
+
dataset.upload_annotations(dataset='dataset_entity',
|
|
784
|
+
local_path='local_path',
|
|
785
|
+
clean=False,
|
|
786
|
+
export_version=dl.ExportVersion.V1
|
|
787
|
+
)
|
|
788
|
+
"""
|
|
789
|
+
|
|
790
|
+
return self.datasets.upload_annotations(
|
|
791
|
+
dataset=self,
|
|
792
|
+
local_path=local_path,
|
|
793
|
+
filters=filters,
|
|
794
|
+
clean=clean,
|
|
795
|
+
remote_root_path=remote_root_path,
|
|
796
|
+
export_version=export_version
|
|
797
|
+
)
|
|
798
|
+
|
|
799
|
+
def checkout(self):
|
|
800
|
+
"""
|
|
801
|
+
Checkout the dataset
|
|
802
|
+
|
|
803
|
+
"""
|
|
804
|
+
self.datasets.checkout(dataset=self)
|
|
805
|
+
|
|
806
|
+
def open_in_web(self):
|
|
807
|
+
"""
|
|
808
|
+
Open the dataset in web platform
|
|
809
|
+
|
|
810
|
+
"""
|
|
811
|
+
self._client_api._open_in_web(url=self.platform_url)
|
|
812
|
+
|
|
813
|
+
def add_label(self, label_name, color=None, children=None, attributes=None, display_label=None, label=None,
|
|
814
|
+
recipe_id=None, ontology_id=None, icon_path=None):
|
|
815
|
+
"""
|
|
816
|
+
Add single label to dataset
|
|
817
|
+
|
|
818
|
+
**Prerequisites**: You must have a dataset with items that are related to the annotations. The relationship between the dataset and annotations is shown in the name. You must be in the role of an *owner* or *developer*.
|
|
819
|
+
|
|
820
|
+
:param str label_name: str - label name
|
|
821
|
+
:param tuple color: RGB color of the annotation, e.g (255,0,0) or '#ff0000' for red
|
|
822
|
+
:param children: children (sub labels). list of sub labels of this current label, each value is either dict or dl.Label
|
|
823
|
+
:param list attributes: add attributes to the labels
|
|
824
|
+
:param str display_label: name that display label
|
|
825
|
+
:param dtlpy.entities.label.Label label: label object
|
|
826
|
+
:param str recipe_id: optional recipe id
|
|
827
|
+
:param str ontology_id: optional ontology id
|
|
828
|
+
:param str icon_path: path to image to be display on label
|
|
829
|
+
:return: label entity
|
|
830
|
+
:rtype: dtlpy.entities.label.Label
|
|
831
|
+
|
|
832
|
+
**Example**:
|
|
833
|
+
|
|
834
|
+
.. code-block:: python
|
|
835
|
+
|
|
836
|
+
dataset.add_label(label_name='person', color=(34, 6, 231), attributes=['big', 'small'])
|
|
837
|
+
"""
|
|
838
|
+
# get recipe
|
|
839
|
+
if recipe_id is None:
|
|
840
|
+
recipe_id = self.get_recipe_ids()[0]
|
|
841
|
+
recipe = self.recipes.get(recipe_id=recipe_id)
|
|
842
|
+
|
|
843
|
+
# get ontology
|
|
844
|
+
if ontology_id is None:
|
|
845
|
+
ontology_id = recipe.ontology_ids[0]
|
|
846
|
+
ontology = recipe.ontologies.get(ontology_id=ontology_id)
|
|
847
|
+
# ontology._dataset = self
|
|
848
|
+
|
|
849
|
+
# add label
|
|
850
|
+
added_label = ontology.add_label(label_name=label_name,
|
|
851
|
+
color=color,
|
|
852
|
+
children=children,
|
|
853
|
+
attributes=attributes,
|
|
854
|
+
display_label=display_label,
|
|
855
|
+
label=label,
|
|
856
|
+
update_ontology=True,
|
|
857
|
+
icon_path=icon_path)
|
|
858
|
+
|
|
859
|
+
return added_label
|
|
860
|
+
|
|
861
|
+
def add_labels(self, label_list, ontology_id=None, recipe_id=None):
|
|
862
|
+
"""
|
|
863
|
+
Add labels to dataset
|
|
864
|
+
|
|
865
|
+
**Prerequisites**: You must have a dataset with items that are related to the annotations. The relationship between the dataset and annotations is shown in the name. You must be in the role of an *owner* or *developer*.
|
|
866
|
+
|
|
867
|
+
:param list label_list: a list of labels to add to the dataset's ontology. each value should be a dict, dl.Label or a string
|
|
868
|
+
:param str ontology_id: optional ontology id
|
|
869
|
+
:param str recipe_id: optional recipe id
|
|
870
|
+
:return: label entities
|
|
871
|
+
|
|
872
|
+
**Example**:
|
|
873
|
+
|
|
874
|
+
.. code-block:: python
|
|
875
|
+
|
|
876
|
+
dataset.add_labels(label_list=label_list)
|
|
877
|
+
"""
|
|
878
|
+
# get recipe
|
|
879
|
+
if recipe_id is None:
|
|
880
|
+
recipe_id = self.get_recipe_ids()[0]
|
|
881
|
+
recipe = self.recipes.get(recipe_id=recipe_id)
|
|
882
|
+
|
|
883
|
+
# get ontology
|
|
884
|
+
if ontology_id is None:
|
|
885
|
+
ontology_id = recipe.ontology_ids[0]
|
|
886
|
+
ontology = recipe.ontologies.get(ontology_id=ontology_id)
|
|
887
|
+
|
|
888
|
+
# add labels to ontology
|
|
889
|
+
added_labels = ontology.add_labels(label_list=label_list, update_ontology=True)
|
|
890
|
+
|
|
891
|
+
return added_labels
|
|
892
|
+
|
|
893
|
+
def update_label(self, label_name, color=None, children=None, attributes=None, display_label=None, label=None,
|
|
894
|
+
recipe_id=None, ontology_id=None, upsert=False, icon_path=None):
|
|
895
|
+
"""
|
|
896
|
+
Add single label to dataset
|
|
897
|
+
|
|
898
|
+
**Prerequisites**: You must have a dataset with items that are related to the annotations. The relationship between the dataset and annotations is shown in the name. You must be in the role of an *owner* or *developer*.
|
|
899
|
+
|
|
900
|
+
:param str label_name: str - label name
|
|
901
|
+
:param tuple color: color
|
|
902
|
+
:param children: children (sub labels)
|
|
903
|
+
:param list attributes: add attributes to the labels
|
|
904
|
+
:param str display_label: name that display label
|
|
905
|
+
:param dtlpy.entities.label.Label label: label
|
|
906
|
+
:param str recipe_id: optional recipe id
|
|
907
|
+
:param str ontology_id: optional ontology id
|
|
908
|
+
:param str icon_path: path to image to be display on label
|
|
909
|
+
|
|
910
|
+
:return: label entity
|
|
911
|
+
:rtype: dtlpy.entities.label.Label
|
|
912
|
+
|
|
913
|
+
**Example**:
|
|
914
|
+
|
|
915
|
+
.. code-block:: python
|
|
916
|
+
|
|
917
|
+
dataset.update_label(label_name='person', color=(34, 6, 231), attributes=['big', 'small'])
|
|
918
|
+
"""
|
|
919
|
+
# get recipe
|
|
920
|
+
|
|
921
|
+
if recipe_id is None:
|
|
922
|
+
recipe_id = self.get_recipe_ids()[0]
|
|
923
|
+
recipe = self.recipes.get(recipe_id=recipe_id)
|
|
924
|
+
|
|
925
|
+
# get ontology
|
|
926
|
+
if ontology_id is None:
|
|
927
|
+
ontology_id = recipe.ontology_ids[0]
|
|
928
|
+
ontology = recipe.ontologies.get(ontology_id=ontology_id)
|
|
929
|
+
|
|
930
|
+
# add label
|
|
931
|
+
added_label = ontology.update_label(label_name=label_name,
|
|
932
|
+
color=color,
|
|
933
|
+
children=children,
|
|
934
|
+
attributes=attributes,
|
|
935
|
+
display_label=display_label,
|
|
936
|
+
label=label,
|
|
937
|
+
update_ontology=True,
|
|
938
|
+
upsert=upsert,
|
|
939
|
+
icon_path=icon_path)
|
|
940
|
+
|
|
941
|
+
return added_label
|
|
942
|
+
|
|
943
|
+
def update_labels(self, label_list, ontology_id=None, recipe_id=None, upsert=False):
|
|
944
|
+
"""
|
|
945
|
+
Add labels to dataset
|
|
946
|
+
|
|
947
|
+
**Prerequisites**: You must have a dataset with items that are related to the annotations. The relationship between the dataset and annotations is shown in the name. You must be in the role of an *owner* or *developer*.
|
|
948
|
+
|
|
949
|
+
:param list label_list: label list
|
|
950
|
+
:param str ontology_id: optional ontology id
|
|
951
|
+
:param str recipe_id: optional recipe id
|
|
952
|
+
:param bool upsert: if True will add in case it does not existing
|
|
953
|
+
|
|
954
|
+
:return: label entities
|
|
955
|
+
:rtype: dtlpy.entities.label.Label
|
|
956
|
+
|
|
957
|
+
**Example**:
|
|
958
|
+
|
|
959
|
+
.. code-block:: python
|
|
960
|
+
|
|
961
|
+
dataset.update_labels(label_list=label_list)
|
|
962
|
+
"""
|
|
963
|
+
# get recipe
|
|
964
|
+
if recipe_id is None:
|
|
965
|
+
recipe_id = self.get_recipe_ids()[0]
|
|
966
|
+
recipe = self.recipes.get(recipe_id=recipe_id)
|
|
967
|
+
|
|
968
|
+
# get ontology
|
|
969
|
+
if ontology_id is None:
|
|
970
|
+
ontology_id = recipe.ontology_ids[0]
|
|
971
|
+
ontology = recipe.ontologies.get(ontology_id=ontology_id)
|
|
972
|
+
|
|
973
|
+
# add labels to ontology
|
|
974
|
+
added_labels = ontology.update_labels(label_list=label_list, update_ontology=True, upsert=upsert)
|
|
975
|
+
|
|
976
|
+
return added_labels
|
|
977
|
+
|
|
978
|
+
def download(
|
|
979
|
+
self,
|
|
980
|
+
filters=None,
|
|
981
|
+
local_path=None,
|
|
982
|
+
file_types=None,
|
|
983
|
+
annotation_options: ViewAnnotationOptions = None,
|
|
984
|
+
annotation_filters=None,
|
|
985
|
+
overwrite=False,
|
|
986
|
+
to_items_folder=True,
|
|
987
|
+
thickness=1,
|
|
988
|
+
with_text=False,
|
|
989
|
+
without_relative_path=None,
|
|
990
|
+
alpha=1,
|
|
991
|
+
export_version=ExportVersion.V1,
|
|
992
|
+
dataset_lock=False,
|
|
993
|
+
lock_timeout_sec=None,
|
|
994
|
+
export_summary=False,
|
|
995
|
+
raise_on_error=False
|
|
996
|
+
):
|
|
997
|
+
"""
|
|
998
|
+
Download dataset by filters.
|
|
999
|
+
Filtering the dataset for items and save them local
|
|
1000
|
+
Optional - also download annotation, mask, instance and image mask of the item
|
|
1001
|
+
|
|
1002
|
+
**Prerequisites**: You must be in the role of an *owner* or *developer*.
|
|
1003
|
+
|
|
1004
|
+
:param dtlpy.entities.filters.Filters filters: Filters entity or a dictionary containing filters parameters
|
|
1005
|
+
:param str local_path: local folder or filename to save to.
|
|
1006
|
+
:param list file_types: a list of file type to download. e.g ['video/webm', 'video/mp4', 'image/jpeg', 'image/png']
|
|
1007
|
+
:param list annotation_options: type of download annotations: list(dl.ViewAnnotationOptions)
|
|
1008
|
+
:param dtlpy.entities.filters.Filters annotation_filters: Filters entity to filter annotations for download
|
|
1009
|
+
:param bool overwrite: optional - default = False to overwrite the existing files
|
|
1010
|
+
:param bool dataset_lock: optional - default = False to make dataset readonly during the download
|
|
1011
|
+
:param bool export_summary: optional - default = False to get the symmary of the export
|
|
1012
|
+
:param int lock_timeout_sec: optional - Set lock timeout for the export
|
|
1013
|
+
:param bool to_items_folder: Create 'items' folder and download items to it
|
|
1014
|
+
:param int thickness: optional - line thickness, if -1 annotation will be filled, default =1
|
|
1015
|
+
:param bool with_text: optional - add text to annotations, default = False
|
|
1016
|
+
:param bool without_relative_path: bool - download items without the relative path from platform
|
|
1017
|
+
:param float alpha: opacity value [0 1], default 1
|
|
1018
|
+
:param str export_version: `V2` - exported items will have original extension in filename, `V1` - no original extension in filenames
|
|
1019
|
+
:param bool raise_on_error: raise an exception if an error occurs
|
|
1020
|
+
:return: `List` of local_path per each downloaded item
|
|
1021
|
+
|
|
1022
|
+
**Example**:
|
|
1023
|
+
|
|
1024
|
+
.. code-block:: python
|
|
1025
|
+
|
|
1026
|
+
dataset.download(local_path='local_path',
|
|
1027
|
+
annotation_options=[dl.ViewAnnotationOptions.JSON, dl.ViewAnnotationOptions.MASK],
|
|
1028
|
+
overwrite=False,
|
|
1029
|
+
thickness=1,
|
|
1030
|
+
with_text=False,
|
|
1031
|
+
alpha=1,
|
|
1032
|
+
dataset_lock=False,
|
|
1033
|
+
lock_timeout_sec=300,
|
|
1034
|
+
export_summary=False
|
|
1035
|
+
)
|
|
1036
|
+
"""
|
|
1037
|
+
return self.items.download(filters=filters,
|
|
1038
|
+
local_path=local_path,
|
|
1039
|
+
file_types=file_types,
|
|
1040
|
+
annotation_options=annotation_options,
|
|
1041
|
+
annotation_filters=annotation_filters,
|
|
1042
|
+
overwrite=overwrite,
|
|
1043
|
+
to_items_folder=to_items_folder,
|
|
1044
|
+
thickness=thickness,
|
|
1045
|
+
with_text=with_text,
|
|
1046
|
+
without_relative_path=without_relative_path,
|
|
1047
|
+
alpha=alpha,
|
|
1048
|
+
export_version=export_version,
|
|
1049
|
+
dataset_lock=dataset_lock,
|
|
1050
|
+
lock_timeout_sec=lock_timeout_sec,
|
|
1051
|
+
export_summary=export_summary,
|
|
1052
|
+
raise_on_error=raise_on_error
|
|
1053
|
+
)
|
|
1054
|
+
|
|
1055
|
+
def download_folder(
|
|
1056
|
+
self,
|
|
1057
|
+
folder_path,
|
|
1058
|
+
filters=None,
|
|
1059
|
+
local_path=None,
|
|
1060
|
+
file_types=None,
|
|
1061
|
+
annotation_options: ViewAnnotationOptions = None,
|
|
1062
|
+
annotation_filters=None,
|
|
1063
|
+
overwrite=False,
|
|
1064
|
+
to_items_folder=True,
|
|
1065
|
+
thickness=1,
|
|
1066
|
+
with_text=False,
|
|
1067
|
+
without_relative_path=None,
|
|
1068
|
+
alpha=1,
|
|
1069
|
+
export_version=ExportVersion.V1,
|
|
1070
|
+
dataset_lock=False,
|
|
1071
|
+
lock_timeout_sec=None,
|
|
1072
|
+
export_summary=False,
|
|
1073
|
+
raise_on_error=False
|
|
1074
|
+
):
|
|
1075
|
+
"""
|
|
1076
|
+
Download dataset folder.
|
|
1077
|
+
Optional - also download annotation, mask, instance and image mask of the item
|
|
1078
|
+
|
|
1079
|
+
**Prerequisites**: You must be in the role of an *owner* or *developer*.
|
|
1080
|
+
|
|
1081
|
+
:param str folder_path: the path of the folder that want to download
|
|
1082
|
+
:param dtlpy.entities.filters.Filters filters: Filters entity or a dictionary containing filters parameters
|
|
1083
|
+
:param str local_path: local folder or filename to save to.
|
|
1084
|
+
:param list file_types: a list of file type to download. e.g ['video/webm', 'video/mp4', 'image/jpeg', 'image/png']
|
|
1085
|
+
:param list annotation_options: type of download annotations: list(dl.ViewAnnotationOptions)
|
|
1086
|
+
:param dtlpy.entities.filters.Filters annotation_filters: Filters entity to filter annotations for download
|
|
1087
|
+
:param bool overwrite: optional - default = False to overwrite the existing files
|
|
1088
|
+
:param bool dataset_lock: optional - default = False to make the dataset readonly during the download
|
|
1089
|
+
:param bool export_summary: optional - default = False to get the symmary of the export
|
|
1090
|
+
:param bool lock_timeout_sec: optional - Set lock timeout for the export
|
|
1091
|
+
:param bool to_items_folder: Create 'items' folder and download items to it
|
|
1092
|
+
:param int thickness: optional - line thickness, if -1 annotation will be filled, default =1
|
|
1093
|
+
:param bool with_text: optional - add text to annotations, default = False
|
|
1094
|
+
:param bool without_relative_path: bool - download items without the relative path from platform
|
|
1095
|
+
:param float alpha: opacity value [0 1], default 1
|
|
1096
|
+
:param str export_version: `V2` - exported items will have original extension in filename, `V1` - no original extension in filenames
|
|
1097
|
+
:param bool raise_on_error: raise an exception if an error occurs
|
|
1098
|
+
:return: `List` of local_path per each downloaded item
|
|
1099
|
+
|
|
1100
|
+
**Example**:
|
|
1101
|
+
|
|
1102
|
+
.. code-block:: python
|
|
1103
|
+
|
|
1104
|
+
dataset.download_folder(folder_path='folder_path'
|
|
1105
|
+
local_path='local_path',
|
|
1106
|
+
annotation_options=[dl.ViewAnnotationOptions.JSON, dl.ViewAnnotationOptions.MASK],
|
|
1107
|
+
overwrite=False,
|
|
1108
|
+
thickness=1,
|
|
1109
|
+
with_text=False,
|
|
1110
|
+
alpha=1,
|
|
1111
|
+
save_locally=True,
|
|
1112
|
+
dataset_lock=False
|
|
1113
|
+
lock_timeout_sec=300,
|
|
1114
|
+
export_summary=False
|
|
1115
|
+
)
|
|
1116
|
+
"""
|
|
1117
|
+
filters = self.datasets._bulid_folder_filter(folder_path=folder_path, filters=filters)
|
|
1118
|
+
return self.items.download(filters=filters,
|
|
1119
|
+
local_path=local_path,
|
|
1120
|
+
file_types=file_types,
|
|
1121
|
+
annotation_options=annotation_options,
|
|
1122
|
+
annotation_filters=annotation_filters,
|
|
1123
|
+
overwrite=overwrite,
|
|
1124
|
+
to_items_folder=to_items_folder,
|
|
1125
|
+
thickness=thickness,
|
|
1126
|
+
with_text=with_text,
|
|
1127
|
+
without_relative_path=without_relative_path,
|
|
1128
|
+
alpha=alpha,
|
|
1129
|
+
export_version=export_version,
|
|
1130
|
+
dataset_lock=dataset_lock,
|
|
1131
|
+
lock_timeout_sec=lock_timeout_sec,
|
|
1132
|
+
export_summary=export_summary,
|
|
1133
|
+
raise_on_error=raise_on_error
|
|
1134
|
+
)
|
|
1135
|
+
|
|
1136
|
+
def delete_labels(self, label_names):
|
|
1137
|
+
"""
|
|
1138
|
+
Delete labels from dataset's ontologies
|
|
1139
|
+
|
|
1140
|
+
**Prerequisites**: You must be in the role of an *owner* or *developer*.
|
|
1141
|
+
|
|
1142
|
+
:param label_names: label object/ label name / list of label objects / list of label names
|
|
1143
|
+
|
|
1144
|
+
**Example**:
|
|
1145
|
+
|
|
1146
|
+
.. code-block:: python
|
|
1147
|
+
|
|
1148
|
+
dataset.delete_labels(label_names=['myLabel1', 'Mylabel2'])
|
|
1149
|
+
"""
|
|
1150
|
+
for recipe in self.recipes.list():
|
|
1151
|
+
for ontology in recipe.ontologies.list():
|
|
1152
|
+
ontology.delete_labels(label_names=label_names)
|
|
1153
|
+
self._labels = None
|
|
1154
|
+
|
|
1155
|
+
def update_attributes(self,
|
|
1156
|
+
title: str,
|
|
1157
|
+
key: str,
|
|
1158
|
+
attribute_type,
|
|
1159
|
+
recipe_id: str = None,
|
|
1160
|
+
ontology_id: str = None,
|
|
1161
|
+
scope: list = None,
|
|
1162
|
+
optional: bool = None,
|
|
1163
|
+
values: list = None,
|
|
1164
|
+
attribute_range=None):
|
|
1165
|
+
"""
|
|
1166
|
+
ADD a new attribute or update if exist
|
|
1167
|
+
|
|
1168
|
+
:param str ontology_id: ontology_id
|
|
1169
|
+
:param str title: attribute title
|
|
1170
|
+
:param str key: the key of the attribute must br unique
|
|
1171
|
+
:param AttributesTypes attribute_type: dl.AttributesTypes your attribute type
|
|
1172
|
+
:param list scope: list of the labels or * for all labels
|
|
1173
|
+
:param bool optional: optional attribute
|
|
1174
|
+
:param list values: list of the attribute values ( for checkbox and radio button)
|
|
1175
|
+
:param dict or AttributesRange attribute_range: dl.AttributesRange object
|
|
1176
|
+
:return: true in success
|
|
1177
|
+
:rtype: bool
|
|
1178
|
+
|
|
1179
|
+
**Example**:
|
|
1180
|
+
|
|
1181
|
+
.. code-block:: python
|
|
1182
|
+
|
|
1183
|
+
dataset.update_attributes(ontology_id='ontology_id',
|
|
1184
|
+
key='1',
|
|
1185
|
+
title='checkbox',
|
|
1186
|
+
attribute_type=dl.AttributesTypes.CHECKBOX,
|
|
1187
|
+
values=[1,2,3])
|
|
1188
|
+
"""
|
|
1189
|
+
# get recipe
|
|
1190
|
+
if recipe_id is None:
|
|
1191
|
+
recipe_id = self.get_recipe_ids()[0]
|
|
1192
|
+
recipe = self.recipes.get(recipe_id=recipe_id)
|
|
1193
|
+
|
|
1194
|
+
# get ontology
|
|
1195
|
+
if ontology_id is None:
|
|
1196
|
+
ontology_id = recipe.ontology_ids[0]
|
|
1197
|
+
ontology = recipe.ontologies.get(ontology_id=ontology_id)
|
|
1198
|
+
|
|
1199
|
+
# add attribute to ontology
|
|
1200
|
+
attribute = ontology.update_attributes(
|
|
1201
|
+
title=title,
|
|
1202
|
+
key=key,
|
|
1203
|
+
attribute_type=attribute_type,
|
|
1204
|
+
scope=scope,
|
|
1205
|
+
optional=optional,
|
|
1206
|
+
values=values,
|
|
1207
|
+
attribute_range=attribute_range)
|
|
1208
|
+
|
|
1209
|
+
return attribute
|
|
1210
|
+
|
|
1211
|
+
def delete_attributes(self, keys: list,
|
|
1212
|
+
recipe_id: str = None,
|
|
1213
|
+
ontology_id: str = None):
|
|
1214
|
+
"""
|
|
1215
|
+
Delete a bulk of attributes
|
|
1216
|
+
|
|
1217
|
+
:param str recipe_id: recipe id
|
|
1218
|
+
:param str ontology_id: ontology id
|
|
1219
|
+
:param list keys: Keys of attributes to delete
|
|
1220
|
+
:return: True if success
|
|
1221
|
+
:rtype: bool
|
|
1222
|
+
"""
|
|
1223
|
+
|
|
1224
|
+
# get recipe
|
|
1225
|
+
if recipe_id is None:
|
|
1226
|
+
recipe_id = self.get_recipe_ids()[0]
|
|
1227
|
+
recipe = self.recipes.get(recipe_id=recipe_id)
|
|
1228
|
+
|
|
1229
|
+
# get ontology
|
|
1230
|
+
if ontology_id is None:
|
|
1231
|
+
ontology_id = recipe.ontology_ids[0]
|
|
1232
|
+
ontology = recipe.ontologies.get(ontology_id=ontology_id)
|
|
1233
|
+
return ontology.delete_attributes(ontology_id=ontology.id, keys=keys)
|
|
1234
|
+
|
|
1235
|
+
def split_ml_subsets(self,
|
|
1236
|
+
items_query = None,
|
|
1237
|
+
percentages: dict = None ):
|
|
1238
|
+
"""
|
|
1239
|
+
Split dataset items into ML subsets.
|
|
1240
|
+
|
|
1241
|
+
:param dl.Filters items_query: Filters object to select items.
|
|
1242
|
+
:param dict percentages: {'train': x, 'validation': y, 'test': z}.
|
|
1243
|
+
:return: True if the split operation was successful.
|
|
1244
|
+
:rtype: bool
|
|
1245
|
+
"""
|
|
1246
|
+
return self.datasets.split_ml_subsets(dataset_id=self.id,
|
|
1247
|
+
items_query=items_query,
|
|
1248
|
+
ml_split_list=percentages)
|
|
1249
|
+
|
|
1250
|
+
def assign_subset_to_items(self, subset: str, items_query=None) -> bool:
|
|
1251
|
+
"""
|
|
1252
|
+
Assign a specific ML subset (train/validation/test) to items defined by the given filters.
|
|
1253
|
+
This will set the chosen subset to True and the others to None.
|
|
1254
|
+
|
|
1255
|
+
:param dl.Filters items_query: Filters to select items
|
|
1256
|
+
:param str subset: 'train', 'validation', or 'test'
|
|
1257
|
+
:return: True if successful
|
|
1258
|
+
:rtype: bool
|
|
1259
|
+
"""
|
|
1260
|
+
|
|
1261
|
+
return self.datasets.bulk_update_ml_subset(dataset_id=self.id,
|
|
1262
|
+
items_query=items_query,
|
|
1263
|
+
subset=subset)
|
|
1264
|
+
|
|
1265
|
+
def remove_subset_from_items(self, items_query= None,) -> bool:
|
|
1266
|
+
"""
|
|
1267
|
+
Remove any ML subset assignment from items defined by the given filters.
|
|
1268
|
+
This sets train, validation, and test tags to None.
|
|
1269
|
+
|
|
1270
|
+
:param dl.Filters items_query: Filters to select items
|
|
1271
|
+
:return: True if successful
|
|
1272
|
+
:rtype: bool
|
|
1273
|
+
"""
|
|
1274
|
+
return self.datasets.bulk_update_ml_subset(dataset_id=self.id,
|
|
1275
|
+
items_query=items_query,
|
|
1276
|
+
subset=None,
|
|
1277
|
+
deleteTag=True)
|
|
1278
|
+
|
|
1279
|
+
def get_items_missing_ml_subset(self, filters = None) -> list:
|
|
1280
|
+
"""
|
|
1281
|
+
Get the list of item IDs that are missing ML subset assignment.
|
|
1282
|
+
An item is considered missing ML subset if train, validation, and test tags are not True (all None).
|
|
1283
|
+
|
|
1284
|
+
:param dl.Filters filters: optional filters to narrow down items. If None, will use a default filter for files.
|
|
1285
|
+
:return: list of item IDs
|
|
1286
|
+
:rtype: list
|
|
1287
|
+
"""
|
|
1288
|
+
if filters is None:
|
|
1289
|
+
filters = entities.Filters()
|
|
1290
|
+
filters.add(field='metadata.system.tags.train', values=None)
|
|
1291
|
+
filters.add(field='metadata.system.tags.validation', values=None)
|
|
1292
|
+
filters.add(field='metadata.system.tags.test', values=None)
|
|
1293
|
+
missing_ids = []
|
|
1294
|
+
pages = self.items.list(filters=filters)
|
|
1295
|
+
for page in pages:
|
|
1296
|
+
for item in page:
|
|
1297
|
+
# item that pass filters means no subsets assigned
|
|
1298
|
+
missing_ids.append(item.id)
|
|
1299
|
+
return missing_ids
|