dtlpy 1.115.44__py3-none-any.whl → 1.117.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (238) hide show
  1. dtlpy/__init__.py +491 -491
  2. dtlpy/__version__.py +1 -1
  3. dtlpy/assets/__init__.py +26 -26
  4. dtlpy/assets/code_server/config.yaml +2 -2
  5. dtlpy/assets/code_server/installation.sh +24 -24
  6. dtlpy/assets/code_server/launch.json +13 -13
  7. dtlpy/assets/code_server/settings.json +2 -2
  8. dtlpy/assets/main.py +53 -53
  9. dtlpy/assets/main_partial.py +18 -18
  10. dtlpy/assets/mock.json +11 -11
  11. dtlpy/assets/model_adapter.py +83 -83
  12. dtlpy/assets/package.json +61 -61
  13. dtlpy/assets/package_catalog.json +29 -29
  14. dtlpy/assets/package_gitignore +307 -307
  15. dtlpy/assets/service_runners/__init__.py +33 -33
  16. dtlpy/assets/service_runners/converter.py +96 -96
  17. dtlpy/assets/service_runners/multi_method.py +49 -49
  18. dtlpy/assets/service_runners/multi_method_annotation.py +54 -54
  19. dtlpy/assets/service_runners/multi_method_dataset.py +55 -55
  20. dtlpy/assets/service_runners/multi_method_item.py +52 -52
  21. dtlpy/assets/service_runners/multi_method_json.py +52 -52
  22. dtlpy/assets/service_runners/single_method.py +37 -37
  23. dtlpy/assets/service_runners/single_method_annotation.py +43 -43
  24. dtlpy/assets/service_runners/single_method_dataset.py +43 -43
  25. dtlpy/assets/service_runners/single_method_item.py +41 -41
  26. dtlpy/assets/service_runners/single_method_json.py +42 -42
  27. dtlpy/assets/service_runners/single_method_multi_input.py +45 -45
  28. dtlpy/assets/voc_annotation_template.xml +23 -23
  29. dtlpy/caches/base_cache.py +32 -32
  30. dtlpy/caches/cache.py +473 -473
  31. dtlpy/caches/dl_cache.py +201 -201
  32. dtlpy/caches/filesystem_cache.py +89 -89
  33. dtlpy/caches/redis_cache.py +84 -84
  34. dtlpy/dlp/__init__.py +20 -20
  35. dtlpy/dlp/cli_utilities.py +367 -367
  36. dtlpy/dlp/command_executor.py +764 -764
  37. dtlpy/dlp/dlp +1 -1
  38. dtlpy/dlp/dlp.bat +1 -1
  39. dtlpy/dlp/dlp.py +128 -128
  40. dtlpy/dlp/parser.py +651 -651
  41. dtlpy/entities/__init__.py +83 -83
  42. dtlpy/entities/analytic.py +347 -347
  43. dtlpy/entities/annotation.py +1879 -1879
  44. dtlpy/entities/annotation_collection.py +699 -699
  45. dtlpy/entities/annotation_definitions/__init__.py +20 -20
  46. dtlpy/entities/annotation_definitions/base_annotation_definition.py +100 -100
  47. dtlpy/entities/annotation_definitions/box.py +195 -195
  48. dtlpy/entities/annotation_definitions/classification.py +67 -67
  49. dtlpy/entities/annotation_definitions/comparison.py +72 -72
  50. dtlpy/entities/annotation_definitions/cube.py +204 -204
  51. dtlpy/entities/annotation_definitions/cube_3d.py +149 -149
  52. dtlpy/entities/annotation_definitions/description.py +32 -32
  53. dtlpy/entities/annotation_definitions/ellipse.py +124 -124
  54. dtlpy/entities/annotation_definitions/free_text.py +62 -62
  55. dtlpy/entities/annotation_definitions/gis.py +69 -69
  56. dtlpy/entities/annotation_definitions/note.py +139 -139
  57. dtlpy/entities/annotation_definitions/point.py +117 -117
  58. dtlpy/entities/annotation_definitions/polygon.py +182 -182
  59. dtlpy/entities/annotation_definitions/polyline.py +111 -111
  60. dtlpy/entities/annotation_definitions/pose.py +92 -92
  61. dtlpy/entities/annotation_definitions/ref_image.py +86 -86
  62. dtlpy/entities/annotation_definitions/segmentation.py +240 -240
  63. dtlpy/entities/annotation_definitions/subtitle.py +34 -34
  64. dtlpy/entities/annotation_definitions/text.py +85 -85
  65. dtlpy/entities/annotation_definitions/undefined_annotation.py +74 -74
  66. dtlpy/entities/app.py +220 -220
  67. dtlpy/entities/app_module.py +107 -107
  68. dtlpy/entities/artifact.py +174 -174
  69. dtlpy/entities/assignment.py +399 -399
  70. dtlpy/entities/base_entity.py +214 -214
  71. dtlpy/entities/bot.py +113 -113
  72. dtlpy/entities/codebase.py +292 -292
  73. dtlpy/entities/collection.py +38 -38
  74. dtlpy/entities/command.py +169 -169
  75. dtlpy/entities/compute.py +449 -449
  76. dtlpy/entities/dataset.py +1299 -1299
  77. dtlpy/entities/directory_tree.py +44 -44
  78. dtlpy/entities/dpk.py +470 -470
  79. dtlpy/entities/driver.py +235 -235
  80. dtlpy/entities/execution.py +397 -397
  81. dtlpy/entities/feature.py +124 -124
  82. dtlpy/entities/feature_set.py +152 -145
  83. dtlpy/entities/filters.py +798 -798
  84. dtlpy/entities/gis_item.py +107 -107
  85. dtlpy/entities/integration.py +184 -184
  86. dtlpy/entities/item.py +975 -959
  87. dtlpy/entities/label.py +123 -123
  88. dtlpy/entities/links.py +85 -85
  89. dtlpy/entities/message.py +175 -175
  90. dtlpy/entities/model.py +684 -684
  91. dtlpy/entities/node.py +1005 -1005
  92. dtlpy/entities/ontology.py +810 -803
  93. dtlpy/entities/organization.py +287 -287
  94. dtlpy/entities/package.py +657 -657
  95. dtlpy/entities/package_defaults.py +5 -5
  96. dtlpy/entities/package_function.py +185 -185
  97. dtlpy/entities/package_module.py +113 -113
  98. dtlpy/entities/package_slot.py +118 -118
  99. dtlpy/entities/paged_entities.py +299 -299
  100. dtlpy/entities/pipeline.py +624 -624
  101. dtlpy/entities/pipeline_execution.py +279 -279
  102. dtlpy/entities/project.py +394 -394
  103. dtlpy/entities/prompt_item.py +505 -505
  104. dtlpy/entities/recipe.py +301 -301
  105. dtlpy/entities/reflect_dict.py +102 -102
  106. dtlpy/entities/resource_execution.py +138 -138
  107. dtlpy/entities/service.py +974 -963
  108. dtlpy/entities/service_driver.py +117 -117
  109. dtlpy/entities/setting.py +294 -294
  110. dtlpy/entities/task.py +495 -495
  111. dtlpy/entities/time_series.py +143 -143
  112. dtlpy/entities/trigger.py +426 -426
  113. dtlpy/entities/user.py +118 -118
  114. dtlpy/entities/webhook.py +124 -124
  115. dtlpy/examples/__init__.py +19 -19
  116. dtlpy/examples/add_labels.py +135 -135
  117. dtlpy/examples/add_metadata_to_item.py +21 -21
  118. dtlpy/examples/annotate_items_using_model.py +65 -65
  119. dtlpy/examples/annotate_video_using_model_and_tracker.py +75 -75
  120. dtlpy/examples/annotations_convert_to_voc.py +9 -9
  121. dtlpy/examples/annotations_convert_to_yolo.py +9 -9
  122. dtlpy/examples/convert_annotation_types.py +51 -51
  123. dtlpy/examples/converter.py +143 -143
  124. dtlpy/examples/copy_annotations.py +22 -22
  125. dtlpy/examples/copy_folder.py +31 -31
  126. dtlpy/examples/create_annotations.py +51 -51
  127. dtlpy/examples/create_video_annotations.py +83 -83
  128. dtlpy/examples/delete_annotations.py +26 -26
  129. dtlpy/examples/filters.py +113 -113
  130. dtlpy/examples/move_item.py +23 -23
  131. dtlpy/examples/play_video_annotation.py +13 -13
  132. dtlpy/examples/show_item_and_mask.py +53 -53
  133. dtlpy/examples/triggers.py +49 -49
  134. dtlpy/examples/upload_batch_of_items.py +20 -20
  135. dtlpy/examples/upload_items_and_custom_format_annotations.py +55 -55
  136. dtlpy/examples/upload_items_with_modalities.py +43 -43
  137. dtlpy/examples/upload_segmentation_annotations_from_mask_image.py +44 -44
  138. dtlpy/examples/upload_yolo_format_annotations.py +70 -70
  139. dtlpy/exceptions.py +125 -125
  140. dtlpy/miscellaneous/__init__.py +20 -20
  141. dtlpy/miscellaneous/dict_differ.py +95 -95
  142. dtlpy/miscellaneous/git_utils.py +217 -217
  143. dtlpy/miscellaneous/json_utils.py +14 -14
  144. dtlpy/miscellaneous/list_print.py +105 -105
  145. dtlpy/miscellaneous/zipping.py +130 -130
  146. dtlpy/ml/__init__.py +20 -20
  147. dtlpy/ml/base_feature_extractor_adapter.py +27 -27
  148. dtlpy/ml/base_model_adapter.py +1287 -1230
  149. dtlpy/ml/metrics.py +461 -461
  150. dtlpy/ml/predictions_utils.py +274 -274
  151. dtlpy/ml/summary_writer.py +57 -57
  152. dtlpy/ml/train_utils.py +60 -60
  153. dtlpy/new_instance.py +252 -252
  154. dtlpy/repositories/__init__.py +56 -56
  155. dtlpy/repositories/analytics.py +85 -85
  156. dtlpy/repositories/annotations.py +916 -916
  157. dtlpy/repositories/apps.py +383 -383
  158. dtlpy/repositories/artifacts.py +452 -452
  159. dtlpy/repositories/assignments.py +599 -599
  160. dtlpy/repositories/bots.py +213 -213
  161. dtlpy/repositories/codebases.py +559 -559
  162. dtlpy/repositories/collections.py +332 -332
  163. dtlpy/repositories/commands.py +152 -152
  164. dtlpy/repositories/compositions.py +61 -61
  165. dtlpy/repositories/computes.py +439 -439
  166. dtlpy/repositories/datasets.py +1585 -1504
  167. dtlpy/repositories/downloader.py +1157 -923
  168. dtlpy/repositories/dpks.py +433 -433
  169. dtlpy/repositories/drivers.py +482 -482
  170. dtlpy/repositories/executions.py +815 -815
  171. dtlpy/repositories/feature_sets.py +256 -226
  172. dtlpy/repositories/features.py +255 -255
  173. dtlpy/repositories/integrations.py +484 -484
  174. dtlpy/repositories/items.py +912 -912
  175. dtlpy/repositories/messages.py +94 -94
  176. dtlpy/repositories/models.py +1000 -1000
  177. dtlpy/repositories/nodes.py +80 -80
  178. dtlpy/repositories/ontologies.py +511 -511
  179. dtlpy/repositories/organizations.py +525 -525
  180. dtlpy/repositories/packages.py +1941 -1941
  181. dtlpy/repositories/pipeline_executions.py +451 -451
  182. dtlpy/repositories/pipelines.py +640 -640
  183. dtlpy/repositories/projects.py +539 -539
  184. dtlpy/repositories/recipes.py +429 -399
  185. dtlpy/repositories/resource_executions.py +137 -137
  186. dtlpy/repositories/schema.py +120 -120
  187. dtlpy/repositories/service_drivers.py +213 -213
  188. dtlpy/repositories/services.py +1704 -1704
  189. dtlpy/repositories/settings.py +339 -339
  190. dtlpy/repositories/tasks.py +1477 -1477
  191. dtlpy/repositories/times_series.py +278 -278
  192. dtlpy/repositories/triggers.py +536 -536
  193. dtlpy/repositories/upload_element.py +257 -257
  194. dtlpy/repositories/uploader.py +661 -661
  195. dtlpy/repositories/webhooks.py +249 -249
  196. dtlpy/services/__init__.py +22 -22
  197. dtlpy/services/aihttp_retry.py +131 -131
  198. dtlpy/services/api_client.py +1786 -1785
  199. dtlpy/services/api_reference.py +40 -40
  200. dtlpy/services/async_utils.py +133 -133
  201. dtlpy/services/calls_counter.py +44 -44
  202. dtlpy/services/check_sdk.py +68 -68
  203. dtlpy/services/cookie.py +115 -115
  204. dtlpy/services/create_logger.py +156 -156
  205. dtlpy/services/events.py +84 -84
  206. dtlpy/services/logins.py +235 -235
  207. dtlpy/services/reporter.py +256 -256
  208. dtlpy/services/service_defaults.py +91 -91
  209. dtlpy/utilities/__init__.py +20 -20
  210. dtlpy/utilities/annotations/__init__.py +16 -16
  211. dtlpy/utilities/annotations/annotation_converters.py +269 -269
  212. dtlpy/utilities/base_package_runner.py +285 -264
  213. dtlpy/utilities/converter.py +1650 -1650
  214. dtlpy/utilities/dataset_generators/__init__.py +1 -1
  215. dtlpy/utilities/dataset_generators/dataset_generator.py +670 -670
  216. dtlpy/utilities/dataset_generators/dataset_generator_tensorflow.py +23 -23
  217. dtlpy/utilities/dataset_generators/dataset_generator_torch.py +21 -21
  218. dtlpy/utilities/local_development/__init__.py +1 -1
  219. dtlpy/utilities/local_development/local_session.py +179 -179
  220. dtlpy/utilities/reports/__init__.py +2 -2
  221. dtlpy/utilities/reports/figures.py +343 -343
  222. dtlpy/utilities/reports/report.py +71 -71
  223. dtlpy/utilities/videos/__init__.py +17 -17
  224. dtlpy/utilities/videos/video_player.py +598 -598
  225. dtlpy/utilities/videos/videos.py +470 -470
  226. {dtlpy-1.115.44.data → dtlpy-1.117.6.data}/scripts/dlp +1 -1
  227. dtlpy-1.117.6.data/scripts/dlp.bat +2 -0
  228. {dtlpy-1.115.44.data → dtlpy-1.117.6.data}/scripts/dlp.py +128 -128
  229. {dtlpy-1.115.44.dist-info → dtlpy-1.117.6.dist-info}/METADATA +186 -186
  230. dtlpy-1.117.6.dist-info/RECORD +239 -0
  231. {dtlpy-1.115.44.dist-info → dtlpy-1.117.6.dist-info}/WHEEL +1 -1
  232. {dtlpy-1.115.44.dist-info → dtlpy-1.117.6.dist-info}/licenses/LICENSE +200 -200
  233. tests/features/environment.py +551 -551
  234. dtlpy/assets/__pycache__/__init__.cpython-310.pyc +0 -0
  235. dtlpy-1.115.44.data/scripts/dlp.bat +0 -2
  236. dtlpy-1.115.44.dist-info/RECORD +0 -240
  237. {dtlpy-1.115.44.dist-info → dtlpy-1.117.6.dist-info}/entry_points.txt +0 -0
  238. {dtlpy-1.115.44.dist-info → dtlpy-1.117.6.dist-info}/top_level.txt +0 -0
dtlpy/entities/dataset.py CHANGED
@@ -1,1299 +1,1299 @@
1
- from collections import namedtuple
2
- import traceback
3
- import logging
4
- from enum import Enum
5
-
6
- import attr
7
- import os
8
-
9
- from .. import repositories, entities, services, exceptions
10
- from ..services.api_client import ApiClient
11
- from .annotation import ViewAnnotationOptions, AnnotationType, ExportVersion
12
-
13
- logger = logging.getLogger(name='dtlpy')
14
-
15
-
16
- class IndexDriver(str, Enum):
17
- V1 = "v1"
18
- V2 = "v2"
19
-
20
-
21
- class ExportType(str, Enum):
22
- JSON = "json"
23
- ZIP = "zip"
24
-
25
- class OutputExportType(str, Enum):
26
- JSON = "json"
27
- ZIP = "zip"
28
- FOLDERS = "folders"
29
-
30
- class ExpirationOptions:
31
- """
32
- ExpirationOptions object
33
- """
34
-
35
- def __init__(self, item_max_days: int = None):
36
- """
37
- :param item_max_days: int. items in dataset will be auto delete after this number id days
38
- """
39
- self.item_max_days = item_max_days
40
-
41
- def to_json(self):
42
- _json = dict()
43
- if self.item_max_days is not None:
44
- _json["itemMaxDays"] = self.item_max_days
45
- return _json
46
-
47
- @classmethod
48
- def from_json(cls, _json: dict):
49
- item_max_days = _json.get('itemMaxDays', None)
50
- if item_max_days:
51
- return cls(item_max_days=item_max_days)
52
- return None
53
-
54
-
55
- @attr.s
56
- class Dataset(entities.BaseEntity):
57
- """
58
- Dataset object
59
- """
60
- # dataset information
61
- id = attr.ib()
62
- url = attr.ib()
63
- name = attr.ib()
64
- annotated = attr.ib(repr=False)
65
- creator = attr.ib()
66
- projects = attr.ib(repr=False)
67
- items_count = attr.ib()
68
- metadata = attr.ib(repr=False)
69
- directoryTree = attr.ib(repr=False)
70
- expiration_options = attr.ib()
71
- index_driver = attr.ib()
72
- enable_sync_with_cloned = attr.ib(repr=False)
73
-
74
- # name change when to_json
75
- created_at = attr.ib()
76
- updated_at = attr.ib()
77
- updated_by = attr.ib()
78
- items_url = attr.ib(repr=False)
79
- readable_type = attr.ib(repr=False)
80
- access_level = attr.ib(repr=False)
81
- driver = attr.ib(repr=False)
82
- src_dataset = attr.ib(repr=False)
83
- _readonly = attr.ib(repr=False)
84
- annotations_count = attr.ib()
85
-
86
- # api
87
- _client_api = attr.ib(type=ApiClient, repr=False)
88
-
89
- # entities
90
- _project = attr.ib(default=None, repr=False)
91
-
92
- # repositories
93
- _datasets = attr.ib(repr=False, default=None)
94
- _repositories = attr.ib(repr=False)
95
-
96
- # defaults
97
- _ontology_ids = attr.ib(default=None, repr=False)
98
- _labels = attr.ib(default=None, repr=False)
99
- _directory_tree = attr.ib(default=None, repr=False)
100
- _recipe = attr.ib(default=None, repr=False)
101
- _ontology = attr.ib(default=None, repr=False)
102
-
103
- @property
104
- def itemsCount(self):
105
- return self.items_count
106
-
107
- @staticmethod
108
- def _protected_from_json(project: entities.Project,
109
- _json: dict,
110
- client_api: ApiClient,
111
- datasets=None,
112
- is_fetched=True):
113
- """
114
- Same as from_json but with try-except to catch if error
115
-
116
- :param project: dataset's project
117
- :param _json: _json response from host
118
- :param client_api: ApiClient entity
119
- :param datasets: Datasets repository
120
- :param is_fetched: is Entity fetched from Platform
121
- :return: Dataset object
122
- """
123
- try:
124
- dataset = Dataset.from_json(project=project,
125
- _json=_json,
126
- client_api=client_api,
127
- datasets=datasets,
128
- is_fetched=is_fetched)
129
- status = True
130
- except Exception:
131
- dataset = traceback.format_exc()
132
- status = False
133
- return status, dataset
134
-
135
- @classmethod
136
- def from_json(cls,
137
- project: entities.Project,
138
- _json: dict,
139
- client_api: ApiClient,
140
- datasets=None,
141
- is_fetched=True):
142
- """
143
- Build a Dataset entity object from a json
144
-
145
- :param project: dataset's project
146
- :param dict _json: _json response from host
147
- :param client_api: ApiClient entity
148
- :param datasets: Datasets repository
149
- :param bool is_fetched: is Entity fetched from Platform
150
- :return: Dataset object
151
- :rtype: dtlpy.entities.dataset.Dataset
152
- """
153
- projects = _json.get('projects', None)
154
- if project is not None and projects is not None:
155
- if project.id not in projects:
156
- logger.warning('Dataset has been fetched from a project that is not in it projects list')
157
- project = None
158
-
159
- expiration_options = _json.get('expirationOptions', None)
160
- if expiration_options:
161
- expiration_options = ExpirationOptions.from_json(expiration_options)
162
- inst = cls(metadata=_json.get('metadata', None),
163
- directoryTree=_json.get('directoryTree', None),
164
- readable_type=_json.get('readableType', None),
165
- access_level=_json.get('accessLevel', None),
166
- created_at=_json.get('createdAt', None),
167
- updated_at=_json.get('updatedAt', None),
168
- updated_by=_json.get('updatedBy', None),
169
- annotations_count=_json.get("annotationsCount", None),
170
- items_count=_json.get('itemsCount', None),
171
- annotated=_json.get('annotated', None),
172
- readonly=_json.get('readonly', None),
173
- projects=projects,
174
- creator=_json.get('creator', None),
175
- items_url=_json.get('items', None),
176
- driver=_json.get('driver', None),
177
- name=_json.get('name', None),
178
- url=_json.get('url', None),
179
- id=_json.get('id', None),
180
- datasets=datasets,
181
- client_api=client_api,
182
- project=project,
183
- expiration_options=expiration_options,
184
- index_driver=_json.get('indexDriver', None),
185
- enable_sync_with_cloned=_json.get('enableSyncWithCloned', None),
186
- src_dataset=_json.get('srcDataset', None))
187
- inst.is_fetched = is_fetched
188
- return inst
189
-
190
- def to_json(self):
191
- """
192
- Returns platform _json format of object
193
-
194
- :return: platform json format of object
195
- :rtype: dict
196
- """
197
- _json = attr.asdict(self, filter=attr.filters.exclude(attr.fields(Dataset)._client_api,
198
- attr.fields(Dataset)._project,
199
- attr.fields(Dataset)._readonly,
200
- attr.fields(Dataset)._datasets,
201
- attr.fields(Dataset)._repositories,
202
- attr.fields(Dataset)._ontology_ids,
203
- attr.fields(Dataset)._labels,
204
- attr.fields(Dataset)._recipe,
205
- attr.fields(Dataset)._ontology,
206
- attr.fields(Dataset)._directory_tree,
207
- attr.fields(Dataset).access_level,
208
- attr.fields(Dataset).readable_type,
209
- attr.fields(Dataset).created_at,
210
- attr.fields(Dataset).updated_at,
211
- attr.fields(Dataset).updated_by,
212
- attr.fields(Dataset).annotations_count,
213
- attr.fields(Dataset).items_url,
214
- attr.fields(Dataset).expiration_options,
215
- attr.fields(Dataset).items_count,
216
- attr.fields(Dataset).index_driver,
217
- attr.fields(Dataset).enable_sync_with_cloned,
218
- attr.fields(Dataset).src_dataset,
219
- ))
220
- _json.update({'items': self.items_url})
221
- _json['readableType'] = self.readable_type
222
- _json['createdAt'] = self.created_at
223
- _json['updatedAt'] = self.updated_at
224
- _json['updatedBy'] = self.updated_by
225
- _json['annotationsCount'] = self.annotations_count
226
- _json['accessLevel'] = self.access_level
227
- _json['readonly'] = self._readonly
228
- _json['itemsCount'] = self.items_count
229
- _json['indexDriver'] = self.index_driver
230
- if self.expiration_options and self.expiration_options.to_json():
231
- _json['expirationOptions'] = self.expiration_options.to_json()
232
- if self.enable_sync_with_cloned is not None:
233
- _json['enableSyncWithCloned'] = self.enable_sync_with_cloned
234
- if self.src_dataset is not None:
235
- _json['srcDataset'] = self.src_dataset
236
- return _json
237
-
238
- @property
239
- def labels(self):
240
- if self._labels is None:
241
- self._labels = self._get_ontology().labels
242
- return self._labels
243
-
244
- @property
245
- def readonly(self):
246
- return self._readonly
247
-
248
- @property
249
- def platform_url(self):
250
- return self._client_api._get_resource_url("projects/{}/datasets/{}/items".format(self.project.id, self.id))
251
-
252
- @readonly.setter
253
- def readonly(self, state):
254
- import warnings
255
- warnings.warn("`readonly` flag on dataset is deprecated, doing nothing.", DeprecationWarning)
256
-
257
- @property
258
- def labels_flat_dict(self):
259
- return self._get_ontology().labels_flat_dict
260
-
261
- @property
262
- def instance_map(self) -> dict:
263
- return self._get_ontology().instance_map
264
-
265
- @instance_map.setter
266
- def instance_map(self, value: dict):
267
- """
268
- instance mapping for creating instance mask
269
-
270
- :param value: dictionary {label: map_id}
271
- """
272
- if not isinstance(value, dict):
273
- raise ValueError('input must be a dictionary of {label_name: instance_id}')
274
- self._get_ontology().instance_map = value
275
-
276
- @property
277
- def ontology_ids(self):
278
- if self._ontology_ids is None:
279
- self._ontology_ids = list()
280
- if self.metadata is not None and 'system' in self.metadata and 'recipes' in self.metadata['system']:
281
- recipe_ids = self.get_recipe_ids()
282
- for rec_id in recipe_ids:
283
- recipe = self.recipes.get(recipe_id=rec_id)
284
- self._ontology_ids += recipe.ontology_ids
285
- return self._ontology_ids
286
-
287
- @_repositories.default
288
- def set_repositories(self):
289
- reps = namedtuple('repositories',
290
- field_names=['items', 'recipes', 'datasets', 'assignments', 'tasks', 'annotations',
291
- 'ontologies', 'features', 'settings', 'schema', 'collections'])
292
- if self._project is None:
293
- datasets = repositories.Datasets(client_api=self._client_api, project=self._project)
294
- else:
295
- datasets = self._project.datasets
296
-
297
- return reps(
298
- items=repositories.Items(client_api=self._client_api, dataset=self, datasets=datasets),
299
- recipes=repositories.Recipes(client_api=self._client_api, dataset=self),
300
- assignments=repositories.Assignments(project=self._project, client_api=self._client_api, dataset=self),
301
- tasks=repositories.Tasks(client_api=self._client_api, project=self._project, dataset=self),
302
- annotations=repositories.Annotations(client_api=self._client_api, dataset=self),
303
- datasets=datasets,
304
- ontologies=repositories.Ontologies(client_api=self._client_api, dataset=self),
305
- features=repositories.Features(client_api=self._client_api, project=self._project, dataset=self),
306
- settings=repositories.Settings(client_api=self._client_api, dataset=self),
307
- schema=repositories.Schema(client_api=self._client_api, dataset=self),
308
- collections=repositories.Collections(client_api=self._client_api, dataset=self)
309
- )
310
-
311
- @property
312
- def settings(self):
313
- assert isinstance(self._repositories.settings, repositories.Settings)
314
- return self._repositories.settings
315
-
316
- @property
317
- def items(self):
318
- assert isinstance(self._repositories.items, repositories.Items)
319
- return self._repositories.items
320
-
321
- @property
322
- def ontologies(self):
323
- assert isinstance(self._repositories.ontologies, repositories.Ontologies)
324
- return self._repositories.ontologies
325
-
326
- @property
327
- def recipes(self):
328
- assert isinstance(self._repositories.recipes, repositories.Recipes)
329
- return self._repositories.recipes
330
-
331
- @property
332
- def datasets(self):
333
- assert isinstance(self._repositories.datasets, repositories.Datasets)
334
- return self._repositories.datasets
335
-
336
- @property
337
- def assignments(self):
338
- assert isinstance(self._repositories.assignments, repositories.Assignments)
339
- return self._repositories.assignments
340
-
341
- @property
342
- def tasks(self):
343
- assert isinstance(self._repositories.tasks, repositories.Tasks)
344
- return self._repositories.tasks
345
-
346
- @property
347
- def annotations(self):
348
- assert isinstance(self._repositories.annotations, repositories.Annotations)
349
- return self._repositories.annotations
350
-
351
- @property
352
- def features(self):
353
- assert isinstance(self._repositories.features, repositories.Features)
354
- return self._repositories.features
355
-
356
- @property
357
- def collections(self):
358
- assert isinstance(self._repositories.collections, repositories.Collections)
359
- return self._repositories.collections
360
-
361
- @property
362
- def schema(self):
363
- assert isinstance(self._repositories.schema, repositories.Schema)
364
- return self._repositories.schema
365
-
366
- @property
367
- def project(self):
368
- if self._project is None:
369
- # get from cache
370
- project = self._client_api.state_io.get('project')
371
- if project is not None:
372
- # build entity from json
373
- p = entities.Project.from_json(_json=project, client_api=self._client_api)
374
- # check if dataset belongs to project
375
- if p.id in self.projects:
376
- self._project = p
377
- if self._project is None:
378
- self._project = repositories.Projects(client_api=self._client_api).get(project_id=self.projects[0],
379
- fetch=None)
380
- assert isinstance(self._project, entities.Project)
381
- return self._project
382
-
383
- @project.setter
384
- def project(self, project):
385
- if not isinstance(project, entities.Project):
386
- raise ValueError('Must input a valid Project entity')
387
- self._project = project
388
-
389
- @property
390
- def directory_tree(self):
391
- if self._directory_tree is None:
392
- self._directory_tree = self.project.datasets.directory_tree(dataset_id=self.id)
393
- assert isinstance(self._directory_tree, entities.DirectoryTree)
394
- return self._directory_tree
395
-
396
- def __copy__(self):
397
- return Dataset.from_json(_json=self.to_json(),
398
- project=self._project,
399
- client_api=self._client_api,
400
- is_fetched=self.is_fetched,
401
- datasets=self.datasets)
402
-
403
- def __get_local_path__(self):
404
- if self._project is not None:
405
- local_path = os.path.join(services.service_defaults.DATALOOP_PATH,
406
- 'projects',
407
- self.project.name,
408
- 'datasets',
409
- self.name)
410
- else:
411
- local_path = os.path.join(services.service_defaults.DATALOOP_PATH,
412
- 'datasets',
413
- '%s_%s' % (self.name, self.id))
414
- return local_path
415
-
416
- def _get_recipe(self):
417
- recipes = self.recipes.list()
418
- if len(recipes) > 0:
419
- return recipes[0]
420
- else:
421
- raise exceptions.PlatformException('404', 'Dataset {} has no recipe'.format(self.name))
422
-
423
- def _get_ontology(self):
424
- if self._ontology is None:
425
- ontologies = self._get_recipe().ontologies.list()
426
- if len(ontologies) > 0:
427
- self._ontology = ontologies[0]
428
- else:
429
- raise exceptions.PlatformException('404', 'Dataset {} has no ontology'.format(self.name))
430
- return self._ontology
431
-
432
- @staticmethod
433
- def serialize_labels(labels_dict):
434
- """
435
- Convert hex color format to rgb
436
-
437
- :param dict labels_dict: dict of labels
438
- :return: dict of converted labels
439
- """
440
- dataset_labels_dict = dict()
441
- for label, color in labels_dict.items():
442
- dataset_labels_dict[label] = '#%02x%02x%02x' % color
443
- return dataset_labels_dict
444
-
445
- def get_recipe_ids(self):
446
- """
447
- Get dataset recipe Ids
448
-
449
- :return: list of recipe ids
450
- :rtype: list
451
- """
452
- return self.metadata['system']['recipes']
453
-
454
- def switch_recipe(self, recipe_id=None, recipe=None):
455
- """
456
- Switch the recipe that linked to the dataset with the given one
457
-
458
- :param str recipe_id: recipe id
459
- :param dtlpy.entities.recipe.Recipe recipe: recipe entity
460
-
461
- **Example**:
462
-
463
- .. code-block:: python
464
-
465
- dataset.switch_recipe(recipe_id='recipe_id')
466
- """
467
- if recipe is None and recipe_id is None:
468
- raise exceptions.PlatformException('400', 'Must provide recipe or recipe_id')
469
- if recipe_id is None:
470
- if not isinstance(recipe, entities.Recipe):
471
- raise exceptions.PlatformException('400', 'Recipe must me entities.Recipe type')
472
- else:
473
- recipe_id = recipe.id
474
-
475
- # add recipe id to dataset metadata
476
- if 'system' not in self.metadata:
477
- self.metadata['system'] = dict()
478
- if 'recipes' not in self.metadata['system']:
479
- self.metadata['system']['recipes'] = list()
480
- self.metadata['system']['recipes'] = [recipe_id]
481
- self.update(system_metadata=True)
482
-
483
- def delete(self, sure=False, really=False):
484
- """
485
- Delete a dataset forever!
486
-
487
- **Prerequisites**: You must be an *owner* or *developer* to use this method.
488
-
489
- :param bool sure: are you sure you want to delete?
490
- :param bool really: really really?
491
- :return: True is success
492
- :rtype: bool
493
-
494
- **Example**:
495
-
496
- .. code-block:: python
497
-
498
- is_deleted = dataset.delete(sure=True, really=True)
499
- """
500
- return self.datasets.delete(dataset_id=self.id,
501
- sure=sure,
502
- really=really)
503
-
504
- def update(self, system_metadata=False):
505
- """
506
- Update dataset field
507
-
508
- **Prerequisites**: You must be an *owner* or *developer* to use this method.
509
-
510
- :param bool system_metadata: bool - True, if you want to change metadata system
511
- :return: Dataset object
512
- :rtype: dtlpy.entities.dataset.Dataset
513
-
514
- **Example**:
515
-
516
- .. code-block:: python
517
-
518
- dataset = dataset.update()
519
- """
520
- return self.datasets.update(dataset=self,
521
- system_metadata=system_metadata)
522
-
523
- def unlock(self):
524
- """
525
- Unlock dataset
526
-
527
- **Prerequisites**: You must be an *owner* or *developer* to use this method.
528
-
529
- :return: Dataset object
530
- :rtype: dtlpy.entities.dataset.Dataset
531
-
532
- **Example**:
533
-
534
- .. code-block:: python
535
-
536
- dataset = dataset.unlock()
537
- """
538
- return self.datasets.unlock(dataset=self)
539
-
540
- def set_readonly(self, state: bool):
541
- """
542
- Set dataset readonly mode
543
-
544
- **Prerequisites**: You must be in the role of an *owner* or *developer*.
545
-
546
- :param bool state: state
547
-
548
- **Example**:
549
-
550
- .. code-block:: python
551
-
552
- dataset.set_readonly(state=True)
553
- """
554
- import warnings
555
- warnings.warn("`readonly` flag on dataset is deprecated, doing nothing.", DeprecationWarning)
556
-
557
- def clone(self,
558
- clone_name=None,
559
- filters=None,
560
- with_items_annotations=True,
561
- with_metadata=True,
562
- with_task_annotations_status=True,
563
- dst_dataset_id=None,
564
- target_directory=None,
565
- ):
566
- """
567
- Clone dataset
568
-
569
- **Prerequisites**: You must be in the role of an *owner* or *developer*.
570
-
571
- :param str clone_name: new dataset name
572
- :param dtlpy.entities.filters.Filters filters: Filters entity or a query dict
573
- :param bool with_items_annotations: clone all item's annotations
574
- :param bool with_metadata: clone metadata
575
- :param bool with_task_annotations_status: clone task annotations status
576
- :param str dst_dataset_id: destination dataset id
577
- :param str target_directory: target directory
578
- :return: dataset object
579
- :rtype: dtlpy.entities.dataset.Dataset
580
-
581
- **Example**:
582
-
583
- .. code-block:: python
584
-
585
- dataset = dataset.clone(dataset_id='dataset_id',
586
- clone_name='dataset_clone_name',
587
- with_metadata=True,
588
- with_items_annotations=False,
589
- with_task_annotations_status=False)
590
- """
591
- return self.datasets.clone(dataset_id=self.id,
592
- filters=filters,
593
- clone_name=clone_name,
594
- with_metadata=with_metadata,
595
- with_items_annotations=with_items_annotations,
596
- with_task_annotations_status=with_task_annotations_status,
597
- dst_dataset_id=dst_dataset_id,
598
- target_directory=target_directory)
599
-
600
- def sync(self, wait=True):
601
- """
602
- Sync dataset with external storage
603
-
604
- **Prerequisites**: You must be in the role of an *owner* or *developer*.
605
-
606
- :param bool wait: wait for the command to finish
607
- :return: True if success
608
- :rtype: bool
609
-
610
- **Example**:
611
-
612
- .. code-block:: python
613
-
614
- success = dataset.sync()
615
- """
616
- return self.datasets.sync(dataset_id=self.id, wait=wait)
617
-
618
- def download_annotations(self,
619
- local_path=None,
620
- filters=None,
621
- annotation_options: ViewAnnotationOptions = None,
622
- annotation_filters=None,
623
- overwrite=False,
624
- thickness=1,
625
- with_text=False,
626
- remote_path=None,
627
- include_annotations_in_output=True,
628
- export_png_files=False,
629
- filter_output_annotations=False,
630
- alpha=1,
631
- export_version=ExportVersion.V1,
632
- dataset_lock=False,
633
- lock_timeout_sec=None,
634
- export_summary=False,
635
- ):
636
- """
637
- Download dataset by filters.
638
- Filtering the dataset for items and save them local
639
- Optional - also download annotation, mask, instance and image mask of the item
640
-
641
- **Prerequisites**: You must be in the role of an *owner* or *developer*.
642
-
643
- :param str local_path: local folder or filename to save to.
644
- :param dtlpy.entities.filters.Filters filters: Filters entity or a dictionary containing filters parameters
645
- :param list(dtlpy.entities.annotation.ViewAnnotationOptions) annotation_options: download annotations options: list(dl.ViewAnnotationOptions)
646
- :param dtlpy.entities.filters.Filters annotation_filters: Filters entity to filter annotations for download
647
- :param bool overwrite: optional - default = False
648
- :param bool dataset_lock: optional - default = False
649
- :param bool export_summary: optional - default = False
650
- :param int lock_timeout_sec: optional
651
- :param int thickness: optional - line thickness, if -1 annotation will be filled, default =1
652
- :param bool with_text: optional - add text to annotations, default = False
653
- :param str remote_path: DEPRECATED and ignored
654
- :param bool include_annotations_in_output: default - False , if export should contain annotations
655
- :param bool export_png_files: default - if True, semantic annotations should be exported as png files
656
- :param bool filter_output_annotations: default - False, given an export by filter - determine if to filter out annotations
657
- :param float alpha: opacity value [0 1], default 1
658
- :param str export_version: exported items will have original extension in filename, `V1` - no original extension in filenames
659
- :return: local_path of the directory where all the downloaded item
660
- :rtype: str
661
-
662
- **Example**:
663
-
664
- .. code-block:: python
665
-
666
- local_path = dataset.download_annotations(dataset='dataset_entity',
667
- local_path='local_path',
668
- annotation_options=[dl.ViewAnnotationOptions.JSON, dl.ViewAnnotationOptions.MASK],
669
- overwrite=False,
670
- thickness=1,
671
- with_text=False,
672
- alpha=1,
673
- dataset_lock=False,
674
- lock_timeout_sec=300,
675
- export_summary=False
676
- )
677
- """
678
-
679
- return self.datasets.download_annotations(
680
- dataset=self,
681
- local_path=local_path,
682
- overwrite=overwrite,
683
- filters=filters,
684
- annotation_options=annotation_options,
685
- annotation_filters=annotation_filters,
686
- thickness=thickness,
687
- with_text=with_text,
688
- remote_path=remote_path,
689
- include_annotations_in_output=include_annotations_in_output,
690
- export_png_files=export_png_files,
691
- filter_output_annotations=filter_output_annotations,
692
- alpha=alpha,
693
- export_version=export_version,
694
- dataset_lock=dataset_lock,
695
- lock_timeout_sec=lock_timeout_sec,
696
- export_summary=export_summary
697
- )
698
-
699
- def export(self,
700
- local_path=None,
701
- filters=None,
702
- annotation_filters=None,
703
- feature_vector_filters=None,
704
- include_feature_vectors: bool = False,
705
- include_annotations: bool = False,
706
- export_type: ExportType = ExportType.JSON,
707
- timeout: int = 0,
708
- dataset_lock: bool = False,
709
- lock_timeout_sec: int = None,
710
- export_summary: bool = False,
711
- output_export_type: OutputExportType = None):
712
- """
713
- Export dataset items and annotations.
714
-
715
- **Prerequisites**: You must be an *owner* or *developer* to use this method.
716
-
717
- You must provide at least ONE of the following params: dataset, dataset_name, dataset_id.
718
-
719
- :param str local_path: The local path to save the exported dataset
720
- :param Union[dict, dtlpy.entities.filters.Filters] filters: Filters entity or a query dictionary
721
- :param dtlpy.entities.filters.Filters annotation_filters: Filters entity
722
- :param dtlpy.entities.filters.Filters feature_vector_filters: Filters entity
723
- :param bool include_feature_vectors: Include item feature vectors in the export
724
- :param bool include_annotations: Include item annotations in the export
725
- :param bool dataset_lock: Make dataset readonly during the export
726
- :param bool export_summary: Download dataset export summary
727
- :param int lock_timeout_sec: Timeout for locking the dataset during export in seconds
728
- :param entities.ExportType export_type: Type of export ('json' or 'zip')
729
- :param entities.OutputExportType output_export_type: Output format ('json', 'zip', or 'folders'). If None, defaults to 'json'
730
- :param int timeout: Maximum time in seconds to wait for the export to complete
731
- :return: Exported item
732
- :rtype: dtlpy.entities.item.Item
733
-
734
- **Example**:
735
-
736
- .. code-block:: python
737
-
738
- export_item = dataset.export(filters=filters,
739
- include_feature_vectors=True,
740
- include_annotations=True,
741
- export_type=dl.ExportType.JSON,
742
- output_export_type=dl.OutputExportType.JSON)
743
- """
744
-
745
- return self.datasets.export(dataset=self,
746
- local_path=local_path,
747
- filters=filters,
748
- annotation_filters=annotation_filters,
749
- feature_vector_filters=feature_vector_filters,
750
- include_feature_vectors=include_feature_vectors,
751
- include_annotations=include_annotations,
752
- export_type=export_type,
753
- timeout=timeout,
754
- dataset_lock=dataset_lock,
755
- lock_timeout_sec=lock_timeout_sec,
756
- export_summary=export_summary,
757
- output_export_type=output_export_type)
758
-
759
- def upload_annotations(self,
760
- local_path,
761
- filters=None,
762
- clean=False,
763
- remote_root_path='/',
764
- export_version=ExportVersion.V1
765
- ):
766
- """
767
- Upload annotations to dataset.
768
-
769
- **Prerequisites**: You must have a dataset with items that are related to the annotations. The relationship between the dataset and annotations is shown in the name. You must be in the role of an *owner* or *developer*.
770
-
771
- :param str local_path: str - local folder where the annotations files is.
772
- :param dtlpy.entities.filters.Filters filters: Filters entity or a dictionary containing filters parameters
773
- :param bool clean: bool - if True it remove the old annotations
774
- :param str remote_root_path: str - the remote root path to match remote and local items
775
- :param str export_version: `V2` - exported items will have original extension in filename, `V1` - no original extension in filenames
776
-
777
- For example, if the item filepath is a/b/item and remote_root_path is /a the start folder will be b instead of a
778
-
779
- **Example**:
780
-
781
- .. code-block:: python
782
-
783
- dataset.upload_annotations(dataset='dataset_entity',
784
- local_path='local_path',
785
- clean=False,
786
- export_version=dl.ExportVersion.V1
787
- )
788
- """
789
-
790
- return self.datasets.upload_annotations(
791
- dataset=self,
792
- local_path=local_path,
793
- filters=filters,
794
- clean=clean,
795
- remote_root_path=remote_root_path,
796
- export_version=export_version
797
- )
798
-
799
- def checkout(self):
800
- """
801
- Checkout the dataset
802
-
803
- """
804
- self.datasets.checkout(dataset=self)
805
-
806
- def open_in_web(self):
807
- """
808
- Open the dataset in web platform
809
-
810
- """
811
- self._client_api._open_in_web(url=self.platform_url)
812
-
813
- def add_label(self, label_name, color=None, children=None, attributes=None, display_label=None, label=None,
814
- recipe_id=None, ontology_id=None, icon_path=None):
815
- """
816
- Add single label to dataset
817
-
818
- **Prerequisites**: You must have a dataset with items that are related to the annotations. The relationship between the dataset and annotations is shown in the name. You must be in the role of an *owner* or *developer*.
819
-
820
- :param str label_name: str - label name
821
- :param tuple color: RGB color of the annotation, e.g (255,0,0) or '#ff0000' for red
822
- :param children: children (sub labels). list of sub labels of this current label, each value is either dict or dl.Label
823
- :param list attributes: add attributes to the labels
824
- :param str display_label: name that display label
825
- :param dtlpy.entities.label.Label label: label object
826
- :param str recipe_id: optional recipe id
827
- :param str ontology_id: optional ontology id
828
- :param str icon_path: path to image to be display on label
829
- :return: label entity
830
- :rtype: dtlpy.entities.label.Label
831
-
832
- **Example**:
833
-
834
- .. code-block:: python
835
-
836
- dataset.add_label(label_name='person', color=(34, 6, 231), attributes=['big', 'small'])
837
- """
838
- # get recipe
839
- if recipe_id is None:
840
- recipe_id = self.get_recipe_ids()[0]
841
- recipe = self.recipes.get(recipe_id=recipe_id)
842
-
843
- # get ontology
844
- if ontology_id is None:
845
- ontology_id = recipe.ontology_ids[0]
846
- ontology = recipe.ontologies.get(ontology_id=ontology_id)
847
- # ontology._dataset = self
848
-
849
- # add label
850
- added_label = ontology.add_label(label_name=label_name,
851
- color=color,
852
- children=children,
853
- attributes=attributes,
854
- display_label=display_label,
855
- label=label,
856
- update_ontology=True,
857
- icon_path=icon_path)
858
-
859
- return added_label
860
-
861
- def add_labels(self, label_list, ontology_id=None, recipe_id=None):
862
- """
863
- Add labels to dataset
864
-
865
- **Prerequisites**: You must have a dataset with items that are related to the annotations. The relationship between the dataset and annotations is shown in the name. You must be in the role of an *owner* or *developer*.
866
-
867
- :param list label_list: a list of labels to add to the dataset's ontology. each value should be a dict, dl.Label or a string
868
- :param str ontology_id: optional ontology id
869
- :param str recipe_id: optional recipe id
870
- :return: label entities
871
-
872
- **Example**:
873
-
874
- .. code-block:: python
875
-
876
- dataset.add_labels(label_list=label_list)
877
- """
878
- # get recipe
879
- if recipe_id is None:
880
- recipe_id = self.get_recipe_ids()[0]
881
- recipe = self.recipes.get(recipe_id=recipe_id)
882
-
883
- # get ontology
884
- if ontology_id is None:
885
- ontology_id = recipe.ontology_ids[0]
886
- ontology = recipe.ontologies.get(ontology_id=ontology_id)
887
-
888
- # add labels to ontology
889
- added_labels = ontology.add_labels(label_list=label_list, update_ontology=True)
890
-
891
- return added_labels
892
-
893
- def update_label(self, label_name, color=None, children=None, attributes=None, display_label=None, label=None,
894
- recipe_id=None, ontology_id=None, upsert=False, icon_path=None):
895
- """
896
- Add single label to dataset
897
-
898
- **Prerequisites**: You must have a dataset with items that are related to the annotations. The relationship between the dataset and annotations is shown in the name. You must be in the role of an *owner* or *developer*.
899
-
900
- :param str label_name: str - label name
901
- :param tuple color: color
902
- :param children: children (sub labels)
903
- :param list attributes: add attributes to the labels
904
- :param str display_label: name that display label
905
- :param dtlpy.entities.label.Label label: label
906
- :param str recipe_id: optional recipe id
907
- :param str ontology_id: optional ontology id
908
- :param str icon_path: path to image to be display on label
909
-
910
- :return: label entity
911
- :rtype: dtlpy.entities.label.Label
912
-
913
- **Example**:
914
-
915
- .. code-block:: python
916
-
917
- dataset.update_label(label_name='person', color=(34, 6, 231), attributes=['big', 'small'])
918
- """
919
- # get recipe
920
-
921
- if recipe_id is None:
922
- recipe_id = self.get_recipe_ids()[0]
923
- recipe = self.recipes.get(recipe_id=recipe_id)
924
-
925
- # get ontology
926
- if ontology_id is None:
927
- ontology_id = recipe.ontology_ids[0]
928
- ontology = recipe.ontologies.get(ontology_id=ontology_id)
929
-
930
- # add label
931
- added_label = ontology.update_label(label_name=label_name,
932
- color=color,
933
- children=children,
934
- attributes=attributes,
935
- display_label=display_label,
936
- label=label,
937
- update_ontology=True,
938
- upsert=upsert,
939
- icon_path=icon_path)
940
-
941
- return added_label
942
-
943
- def update_labels(self, label_list, ontology_id=None, recipe_id=None, upsert=False):
944
- """
945
- Add labels to dataset
946
-
947
- **Prerequisites**: You must have a dataset with items that are related to the annotations. The relationship between the dataset and annotations is shown in the name. You must be in the role of an *owner* or *developer*.
948
-
949
- :param list label_list: label list
950
- :param str ontology_id: optional ontology id
951
- :param str recipe_id: optional recipe id
952
- :param bool upsert: if True will add in case it does not existing
953
-
954
- :return: label entities
955
- :rtype: dtlpy.entities.label.Label
956
-
957
- **Example**:
958
-
959
- .. code-block:: python
960
-
961
- dataset.update_labels(label_list=label_list)
962
- """
963
- # get recipe
964
- if recipe_id is None:
965
- recipe_id = self.get_recipe_ids()[0]
966
- recipe = self.recipes.get(recipe_id=recipe_id)
967
-
968
- # get ontology
969
- if ontology_id is None:
970
- ontology_id = recipe.ontology_ids[0]
971
- ontology = recipe.ontologies.get(ontology_id=ontology_id)
972
-
973
- # add labels to ontology
974
- added_labels = ontology.update_labels(label_list=label_list, update_ontology=True, upsert=upsert)
975
-
976
- return added_labels
977
-
978
- def download(
979
- self,
980
- filters=None,
981
- local_path=None,
982
- file_types=None,
983
- annotation_options: ViewAnnotationOptions = None,
984
- annotation_filters=None,
985
- overwrite=False,
986
- to_items_folder=True,
987
- thickness=1,
988
- with_text=False,
989
- without_relative_path=None,
990
- alpha=1,
991
- export_version=ExportVersion.V1,
992
- dataset_lock=False,
993
- lock_timeout_sec=None,
994
- export_summary=False,
995
- raise_on_error=False
996
- ):
997
- """
998
- Download dataset by filters.
999
- Filtering the dataset for items and save them local
1000
- Optional - also download annotation, mask, instance and image mask of the item
1001
-
1002
- **Prerequisites**: You must be in the role of an *owner* or *developer*.
1003
-
1004
- :param dtlpy.entities.filters.Filters filters: Filters entity or a dictionary containing filters parameters
1005
- :param str local_path: local folder or filename to save to.
1006
- :param list file_types: a list of file type to download. e.g ['video/webm', 'video/mp4', 'image/jpeg', 'image/png']
1007
- :param list annotation_options: type of download annotations: list(dl.ViewAnnotationOptions)
1008
- :param dtlpy.entities.filters.Filters annotation_filters: Filters entity to filter annotations for download
1009
- :param bool overwrite: optional - default = False to overwrite the existing files
1010
- :param bool dataset_lock: optional - default = False to make dataset readonly during the download
1011
- :param bool export_summary: optional - default = False to get the symmary of the export
1012
- :param int lock_timeout_sec: optional - Set lock timeout for the export
1013
- :param bool to_items_folder: Create 'items' folder and download items to it
1014
- :param int thickness: optional - line thickness, if -1 annotation will be filled, default =1
1015
- :param bool with_text: optional - add text to annotations, default = False
1016
- :param bool without_relative_path: bool - download items without the relative path from platform
1017
- :param float alpha: opacity value [0 1], default 1
1018
- :param str export_version: `V2` - exported items will have original extension in filename, `V1` - no original extension in filenames
1019
- :param bool raise_on_error: raise an exception if an error occurs
1020
- :return: `List` of local_path per each downloaded item
1021
-
1022
- **Example**:
1023
-
1024
- .. code-block:: python
1025
-
1026
- dataset.download(local_path='local_path',
1027
- annotation_options=[dl.ViewAnnotationOptions.JSON, dl.ViewAnnotationOptions.MASK],
1028
- overwrite=False,
1029
- thickness=1,
1030
- with_text=False,
1031
- alpha=1,
1032
- dataset_lock=False,
1033
- lock_timeout_sec=300,
1034
- export_summary=False
1035
- )
1036
- """
1037
- return self.items.download(filters=filters,
1038
- local_path=local_path,
1039
- file_types=file_types,
1040
- annotation_options=annotation_options,
1041
- annotation_filters=annotation_filters,
1042
- overwrite=overwrite,
1043
- to_items_folder=to_items_folder,
1044
- thickness=thickness,
1045
- with_text=with_text,
1046
- without_relative_path=without_relative_path,
1047
- alpha=alpha,
1048
- export_version=export_version,
1049
- dataset_lock=dataset_lock,
1050
- lock_timeout_sec=lock_timeout_sec,
1051
- export_summary=export_summary,
1052
- raise_on_error=raise_on_error
1053
- )
1054
-
1055
- def download_folder(
1056
- self,
1057
- folder_path,
1058
- filters=None,
1059
- local_path=None,
1060
- file_types=None,
1061
- annotation_options: ViewAnnotationOptions = None,
1062
- annotation_filters=None,
1063
- overwrite=False,
1064
- to_items_folder=True,
1065
- thickness=1,
1066
- with_text=False,
1067
- without_relative_path=None,
1068
- alpha=1,
1069
- export_version=ExportVersion.V1,
1070
- dataset_lock=False,
1071
- lock_timeout_sec=None,
1072
- export_summary=False,
1073
- raise_on_error=False
1074
- ):
1075
- """
1076
- Download dataset folder.
1077
- Optional - also download annotation, mask, instance and image mask of the item
1078
-
1079
- **Prerequisites**: You must be in the role of an *owner* or *developer*.
1080
-
1081
- :param str folder_path: the path of the folder that want to download
1082
- :param dtlpy.entities.filters.Filters filters: Filters entity or a dictionary containing filters parameters
1083
- :param str local_path: local folder or filename to save to.
1084
- :param list file_types: a list of file type to download. e.g ['video/webm', 'video/mp4', 'image/jpeg', 'image/png']
1085
- :param list annotation_options: type of download annotations: list(dl.ViewAnnotationOptions)
1086
- :param dtlpy.entities.filters.Filters annotation_filters: Filters entity to filter annotations for download
1087
- :param bool overwrite: optional - default = False to overwrite the existing files
1088
- :param bool dataset_lock: optional - default = False to make the dataset readonly during the download
1089
- :param bool export_summary: optional - default = False to get the symmary of the export
1090
- :param bool lock_timeout_sec: optional - Set lock timeout for the export
1091
- :param bool to_items_folder: Create 'items' folder and download items to it
1092
- :param int thickness: optional - line thickness, if -1 annotation will be filled, default =1
1093
- :param bool with_text: optional - add text to annotations, default = False
1094
- :param bool without_relative_path: bool - download items without the relative path from platform
1095
- :param float alpha: opacity value [0 1], default 1
1096
- :param str export_version: `V2` - exported items will have original extension in filename, `V1` - no original extension in filenames
1097
- :param bool raise_on_error: raise an exception if an error occurs
1098
- :return: `List` of local_path per each downloaded item
1099
-
1100
- **Example**:
1101
-
1102
- .. code-block:: python
1103
-
1104
- dataset.download_folder(folder_path='folder_path'
1105
- local_path='local_path',
1106
- annotation_options=[dl.ViewAnnotationOptions.JSON, dl.ViewAnnotationOptions.MASK],
1107
- overwrite=False,
1108
- thickness=1,
1109
- with_text=False,
1110
- alpha=1,
1111
- save_locally=True,
1112
- dataset_lock=False
1113
- lock_timeout_sec=300,
1114
- export_summary=False
1115
- )
1116
- """
1117
- filters = self.datasets._bulid_folder_filter(folder_path=folder_path, filters=filters)
1118
- return self.items.download(filters=filters,
1119
- local_path=local_path,
1120
- file_types=file_types,
1121
- annotation_options=annotation_options,
1122
- annotation_filters=annotation_filters,
1123
- overwrite=overwrite,
1124
- to_items_folder=to_items_folder,
1125
- thickness=thickness,
1126
- with_text=with_text,
1127
- without_relative_path=without_relative_path,
1128
- alpha=alpha,
1129
- export_version=export_version,
1130
- dataset_lock=dataset_lock,
1131
- lock_timeout_sec=lock_timeout_sec,
1132
- export_summary=export_summary,
1133
- raise_on_error=raise_on_error
1134
- )
1135
-
1136
- def delete_labels(self, label_names):
1137
- """
1138
- Delete labels from dataset's ontologies
1139
-
1140
- **Prerequisites**: You must be in the role of an *owner* or *developer*.
1141
-
1142
- :param label_names: label object/ label name / list of label objects / list of label names
1143
-
1144
- **Example**:
1145
-
1146
- .. code-block:: python
1147
-
1148
- dataset.delete_labels(label_names=['myLabel1', 'Mylabel2'])
1149
- """
1150
- for recipe in self.recipes.list():
1151
- for ontology in recipe.ontologies.list():
1152
- ontology.delete_labels(label_names=label_names)
1153
- self._labels = None
1154
-
1155
- def update_attributes(self,
1156
- title: str,
1157
- key: str,
1158
- attribute_type,
1159
- recipe_id: str = None,
1160
- ontology_id: str = None,
1161
- scope: list = None,
1162
- optional: bool = None,
1163
- values: list = None,
1164
- attribute_range=None):
1165
- """
1166
- ADD a new attribute or update if exist
1167
-
1168
- :param str ontology_id: ontology_id
1169
- :param str title: attribute title
1170
- :param str key: the key of the attribute must br unique
1171
- :param AttributesTypes attribute_type: dl.AttributesTypes your attribute type
1172
- :param list scope: list of the labels or * for all labels
1173
- :param bool optional: optional attribute
1174
- :param list values: list of the attribute values ( for checkbox and radio button)
1175
- :param dict or AttributesRange attribute_range: dl.AttributesRange object
1176
- :return: true in success
1177
- :rtype: bool
1178
-
1179
- **Example**:
1180
-
1181
- .. code-block:: python
1182
-
1183
- dataset.update_attributes(ontology_id='ontology_id',
1184
- key='1',
1185
- title='checkbox',
1186
- attribute_type=dl.AttributesTypes.CHECKBOX,
1187
- values=[1,2,3])
1188
- """
1189
- # get recipe
1190
- if recipe_id is None:
1191
- recipe_id = self.get_recipe_ids()[0]
1192
- recipe = self.recipes.get(recipe_id=recipe_id)
1193
-
1194
- # get ontology
1195
- if ontology_id is None:
1196
- ontology_id = recipe.ontology_ids[0]
1197
- ontology = recipe.ontologies.get(ontology_id=ontology_id)
1198
-
1199
- # add attribute to ontology
1200
- attribute = ontology.update_attributes(
1201
- title=title,
1202
- key=key,
1203
- attribute_type=attribute_type,
1204
- scope=scope,
1205
- optional=optional,
1206
- values=values,
1207
- attribute_range=attribute_range)
1208
-
1209
- return attribute
1210
-
1211
- def delete_attributes(self, keys: list,
1212
- recipe_id: str = None,
1213
- ontology_id: str = None):
1214
- """
1215
- Delete a bulk of attributes
1216
-
1217
- :param str recipe_id: recipe id
1218
- :param str ontology_id: ontology id
1219
- :param list keys: Keys of attributes to delete
1220
- :return: True if success
1221
- :rtype: bool
1222
- """
1223
-
1224
- # get recipe
1225
- if recipe_id is None:
1226
- recipe_id = self.get_recipe_ids()[0]
1227
- recipe = self.recipes.get(recipe_id=recipe_id)
1228
-
1229
- # get ontology
1230
- if ontology_id is None:
1231
- ontology_id = recipe.ontology_ids[0]
1232
- ontology = recipe.ontologies.get(ontology_id=ontology_id)
1233
- return ontology.delete_attributes(ontology_id=ontology.id, keys=keys)
1234
-
1235
- def split_ml_subsets(self,
1236
- items_query = None,
1237
- percentages: dict = None ):
1238
- """
1239
- Split dataset items into ML subsets.
1240
-
1241
- :param dl.Filters items_query: Filters object to select items.
1242
- :param dict percentages: {'train': x, 'validation': y, 'test': z}.
1243
- :return: True if the split operation was successful.
1244
- :rtype: bool
1245
- """
1246
- return self.datasets.split_ml_subsets(dataset_id=self.id,
1247
- items_query=items_query,
1248
- ml_split_list=percentages)
1249
-
1250
- def assign_subset_to_items(self, subset: str, items_query=None) -> bool:
1251
- """
1252
- Assign a specific ML subset (train/validation/test) to items defined by the given filters.
1253
- This will set the chosen subset to True and the others to None.
1254
-
1255
- :param dl.Filters items_query: Filters to select items
1256
- :param str subset: 'train', 'validation', or 'test'
1257
- :return: True if successful
1258
- :rtype: bool
1259
- """
1260
-
1261
- return self.datasets.bulk_update_ml_subset(dataset_id=self.id,
1262
- items_query=items_query,
1263
- subset=subset)
1264
-
1265
- def remove_subset_from_items(self, items_query= None,) -> bool:
1266
- """
1267
- Remove any ML subset assignment from items defined by the given filters.
1268
- This sets train, validation, and test tags to None.
1269
-
1270
- :param dl.Filters items_query: Filters to select items
1271
- :return: True if successful
1272
- :rtype: bool
1273
- """
1274
- return self.datasets.bulk_update_ml_subset(dataset_id=self.id,
1275
- items_query=items_query,
1276
- subset=None,
1277
- deleteTag=True)
1278
-
1279
- def get_items_missing_ml_subset(self, filters = None) -> list:
1280
- """
1281
- Get the list of item IDs that are missing ML subset assignment.
1282
- An item is considered missing ML subset if train, validation, and test tags are not True (all None).
1283
-
1284
- :param dl.Filters filters: optional filters to narrow down items. If None, will use a default filter for files.
1285
- :return: list of item IDs
1286
- :rtype: list
1287
- """
1288
- if filters is None:
1289
- filters = entities.Filters()
1290
- filters.add(field='metadata.system.tags.train', values=None)
1291
- filters.add(field='metadata.system.tags.validation', values=None)
1292
- filters.add(field='metadata.system.tags.test', values=None)
1293
- missing_ids = []
1294
- pages = self.items.list(filters=filters)
1295
- for page in pages:
1296
- for item in page:
1297
- # item that pass filters means no subsets assigned
1298
- missing_ids.append(item.id)
1299
- return missing_ids
1
+ from collections import namedtuple
2
+ import traceback
3
+ import logging
4
+ from enum import Enum
5
+
6
+ import attr
7
+ import os
8
+
9
+ from .. import repositories, entities, services, exceptions
10
+ from ..services.api_client import ApiClient
11
+ from .annotation import ViewAnnotationOptions, AnnotationType, ExportVersion
12
+
13
+ logger = logging.getLogger(name='dtlpy')
14
+
15
+
16
+ class IndexDriver(str, Enum):
17
+ V1 = "v1"
18
+ V2 = "v2"
19
+
20
+
21
+ class ExportType(str, Enum):
22
+ JSON = "json"
23
+ ZIP = "zip"
24
+
25
+ class OutputExportType(str, Enum):
26
+ JSON = "json"
27
+ ZIP = "zip"
28
+ FOLDERS = "folders"
29
+
30
+ class ExpirationOptions:
31
+ """
32
+ ExpirationOptions object
33
+ """
34
+
35
+ def __init__(self, item_max_days: int = None):
36
+ """
37
+ :param item_max_days: int. items in dataset will be auto delete after this number id days
38
+ """
39
+ self.item_max_days = item_max_days
40
+
41
+ def to_json(self):
42
+ _json = dict()
43
+ if self.item_max_days is not None:
44
+ _json["itemMaxDays"] = self.item_max_days
45
+ return _json
46
+
47
+ @classmethod
48
+ def from_json(cls, _json: dict):
49
+ item_max_days = _json.get('itemMaxDays', None)
50
+ if item_max_days:
51
+ return cls(item_max_days=item_max_days)
52
+ return None
53
+
54
+
55
+ @attr.s
56
+ class Dataset(entities.BaseEntity):
57
+ """
58
+ Dataset object
59
+ """
60
+ # dataset information
61
+ id = attr.ib()
62
+ url = attr.ib()
63
+ name = attr.ib()
64
+ annotated = attr.ib(repr=False)
65
+ creator = attr.ib()
66
+ projects = attr.ib(repr=False)
67
+ items_count = attr.ib()
68
+ metadata = attr.ib(repr=False)
69
+ directoryTree = attr.ib(repr=False)
70
+ expiration_options = attr.ib()
71
+ index_driver = attr.ib()
72
+ enable_sync_with_cloned = attr.ib(repr=False)
73
+
74
+ # name change when to_json
75
+ created_at = attr.ib()
76
+ updated_at = attr.ib()
77
+ updated_by = attr.ib()
78
+ items_url = attr.ib(repr=False)
79
+ readable_type = attr.ib(repr=False)
80
+ access_level = attr.ib(repr=False)
81
+ driver = attr.ib(repr=False)
82
+ src_dataset = attr.ib(repr=False)
83
+ _readonly = attr.ib(repr=False)
84
+ annotations_count = attr.ib()
85
+
86
+ # api
87
+ _client_api = attr.ib(type=ApiClient, repr=False)
88
+
89
+ # entities
90
+ _project = attr.ib(default=None, repr=False)
91
+
92
+ # repositories
93
+ _datasets = attr.ib(repr=False, default=None)
94
+ _repositories = attr.ib(repr=False)
95
+
96
+ # defaults
97
+ _ontology_ids = attr.ib(default=None, repr=False)
98
+ _labels = attr.ib(default=None, repr=False)
99
+ _directory_tree = attr.ib(default=None, repr=False)
100
+ _recipe = attr.ib(default=None, repr=False)
101
+ _ontology = attr.ib(default=None, repr=False)
102
+
103
+ @property
104
+ def itemsCount(self):
105
+ return self.items_count
106
+
107
+ @staticmethod
108
+ def _protected_from_json(project: entities.Project,
109
+ _json: dict,
110
+ client_api: ApiClient,
111
+ datasets=None,
112
+ is_fetched=True):
113
+ """
114
+ Same as from_json but with try-except to catch if error
115
+
116
+ :param project: dataset's project
117
+ :param _json: _json response from host
118
+ :param client_api: ApiClient entity
119
+ :param datasets: Datasets repository
120
+ :param is_fetched: is Entity fetched from Platform
121
+ :return: Dataset object
122
+ """
123
+ try:
124
+ dataset = Dataset.from_json(project=project,
125
+ _json=_json,
126
+ client_api=client_api,
127
+ datasets=datasets,
128
+ is_fetched=is_fetched)
129
+ status = True
130
+ except Exception:
131
+ dataset = traceback.format_exc()
132
+ status = False
133
+ return status, dataset
134
+
135
+ @classmethod
136
+ def from_json(cls,
137
+ project: entities.Project,
138
+ _json: dict,
139
+ client_api: ApiClient,
140
+ datasets=None,
141
+ is_fetched=True):
142
+ """
143
+ Build a Dataset entity object from a json
144
+
145
+ :param project: dataset's project
146
+ :param dict _json: _json response from host
147
+ :param client_api: ApiClient entity
148
+ :param datasets: Datasets repository
149
+ :param bool is_fetched: is Entity fetched from Platform
150
+ :return: Dataset object
151
+ :rtype: dtlpy.entities.dataset.Dataset
152
+ """
153
+ projects = _json.get('projects', None)
154
+ if project is not None and projects is not None:
155
+ if project.id not in projects:
156
+ logger.warning('Dataset has been fetched from a project that is not in it projects list')
157
+ project = None
158
+
159
+ expiration_options = _json.get('expirationOptions', None)
160
+ if expiration_options:
161
+ expiration_options = ExpirationOptions.from_json(expiration_options)
162
+ inst = cls(metadata=_json.get('metadata', None),
163
+ directoryTree=_json.get('directoryTree', None),
164
+ readable_type=_json.get('readableType', None),
165
+ access_level=_json.get('accessLevel', None),
166
+ created_at=_json.get('createdAt', None),
167
+ updated_at=_json.get('updatedAt', None),
168
+ updated_by=_json.get('updatedBy', None),
169
+ annotations_count=_json.get("annotationsCount", None),
170
+ items_count=_json.get('itemsCount', None),
171
+ annotated=_json.get('annotated', None),
172
+ readonly=_json.get('readonly', None),
173
+ projects=projects,
174
+ creator=_json.get('creator', None),
175
+ items_url=_json.get('items', None),
176
+ driver=_json.get('driver', None),
177
+ name=_json.get('name', None),
178
+ url=_json.get('url', None),
179
+ id=_json.get('id', None),
180
+ datasets=datasets,
181
+ client_api=client_api,
182
+ project=project,
183
+ expiration_options=expiration_options,
184
+ index_driver=_json.get('indexDriver', None),
185
+ enable_sync_with_cloned=_json.get('enableSyncWithCloned', None),
186
+ src_dataset=_json.get('srcDataset', None))
187
+ inst.is_fetched = is_fetched
188
+ return inst
189
+
190
+ def to_json(self):
191
+ """
192
+ Returns platform _json format of object
193
+
194
+ :return: platform json format of object
195
+ :rtype: dict
196
+ """
197
+ _json = attr.asdict(self, filter=attr.filters.exclude(attr.fields(Dataset)._client_api,
198
+ attr.fields(Dataset)._project,
199
+ attr.fields(Dataset)._readonly,
200
+ attr.fields(Dataset)._datasets,
201
+ attr.fields(Dataset)._repositories,
202
+ attr.fields(Dataset)._ontology_ids,
203
+ attr.fields(Dataset)._labels,
204
+ attr.fields(Dataset)._recipe,
205
+ attr.fields(Dataset)._ontology,
206
+ attr.fields(Dataset)._directory_tree,
207
+ attr.fields(Dataset).access_level,
208
+ attr.fields(Dataset).readable_type,
209
+ attr.fields(Dataset).created_at,
210
+ attr.fields(Dataset).updated_at,
211
+ attr.fields(Dataset).updated_by,
212
+ attr.fields(Dataset).annotations_count,
213
+ attr.fields(Dataset).items_url,
214
+ attr.fields(Dataset).expiration_options,
215
+ attr.fields(Dataset).items_count,
216
+ attr.fields(Dataset).index_driver,
217
+ attr.fields(Dataset).enable_sync_with_cloned,
218
+ attr.fields(Dataset).src_dataset,
219
+ ))
220
+ _json.update({'items': self.items_url})
221
+ _json['readableType'] = self.readable_type
222
+ _json['createdAt'] = self.created_at
223
+ _json['updatedAt'] = self.updated_at
224
+ _json['updatedBy'] = self.updated_by
225
+ _json['annotationsCount'] = self.annotations_count
226
+ _json['accessLevel'] = self.access_level
227
+ _json['readonly'] = self._readonly
228
+ _json['itemsCount'] = self.items_count
229
+ _json['indexDriver'] = self.index_driver
230
+ if self.expiration_options and self.expiration_options.to_json():
231
+ _json['expirationOptions'] = self.expiration_options.to_json()
232
+ if self.enable_sync_with_cloned is not None:
233
+ _json['enableSyncWithCloned'] = self.enable_sync_with_cloned
234
+ if self.src_dataset is not None:
235
+ _json['srcDataset'] = self.src_dataset
236
+ return _json
237
+
238
+ @property
239
+ def labels(self):
240
+ if self._labels is None:
241
+ self._labels = self._get_ontology().labels
242
+ return self._labels
243
+
244
+ @property
245
+ def readonly(self):
246
+ return self._readonly
247
+
248
+ @property
249
+ def platform_url(self):
250
+ return self._client_api._get_resource_url("projects/{}/datasets/{}/items".format(self.project.id, self.id))
251
+
252
+ @readonly.setter
253
+ def readonly(self, state):
254
+ import warnings
255
+ warnings.warn("`readonly` flag on dataset is deprecated, doing nothing.", DeprecationWarning)
256
+
257
+ @property
258
+ def labels_flat_dict(self):
259
+ return self._get_ontology().labels_flat_dict
260
+
261
+ @property
262
+ def instance_map(self) -> dict:
263
+ return self._get_ontology().instance_map
264
+
265
+ @instance_map.setter
266
+ def instance_map(self, value: dict):
267
+ """
268
+ instance mapping for creating instance mask
269
+
270
+ :param value: dictionary {label: map_id}
271
+ """
272
+ if not isinstance(value, dict):
273
+ raise ValueError('input must be a dictionary of {label_name: instance_id}')
274
+ self._get_ontology().instance_map = value
275
+
276
+ @property
277
+ def ontology_ids(self):
278
+ if self._ontology_ids is None:
279
+ self._ontology_ids = list()
280
+ if self.metadata is not None and 'system' in self.metadata and 'recipes' in self.metadata['system']:
281
+ recipe_ids = self.get_recipe_ids()
282
+ for rec_id in recipe_ids:
283
+ recipe = self.recipes.get(recipe_id=rec_id)
284
+ self._ontology_ids += recipe.ontology_ids
285
+ return self._ontology_ids
286
+
287
+ @_repositories.default
288
+ def set_repositories(self):
289
+ reps = namedtuple('repositories',
290
+ field_names=['items', 'recipes', 'datasets', 'assignments', 'tasks', 'annotations',
291
+ 'ontologies', 'features', 'settings', 'schema', 'collections'])
292
+ if self._project is None:
293
+ datasets = repositories.Datasets(client_api=self._client_api, project=self._project)
294
+ else:
295
+ datasets = self._project.datasets
296
+
297
+ return reps(
298
+ items=repositories.Items(client_api=self._client_api, dataset=self, datasets=datasets),
299
+ recipes=repositories.Recipes(client_api=self._client_api, dataset=self),
300
+ assignments=repositories.Assignments(project=self._project, client_api=self._client_api, dataset=self),
301
+ tasks=repositories.Tasks(client_api=self._client_api, project=self._project, dataset=self),
302
+ annotations=repositories.Annotations(client_api=self._client_api, dataset=self),
303
+ datasets=datasets,
304
+ ontologies=repositories.Ontologies(client_api=self._client_api, dataset=self),
305
+ features=repositories.Features(client_api=self._client_api, project=self._project, dataset=self),
306
+ settings=repositories.Settings(client_api=self._client_api, dataset=self),
307
+ schema=repositories.Schema(client_api=self._client_api, dataset=self),
308
+ collections=repositories.Collections(client_api=self._client_api, dataset=self)
309
+ )
310
+
311
+ @property
312
+ def settings(self):
313
+ assert isinstance(self._repositories.settings, repositories.Settings)
314
+ return self._repositories.settings
315
+
316
+ @property
317
+ def items(self):
318
+ assert isinstance(self._repositories.items, repositories.Items)
319
+ return self._repositories.items
320
+
321
+ @property
322
+ def ontologies(self):
323
+ assert isinstance(self._repositories.ontologies, repositories.Ontologies)
324
+ return self._repositories.ontologies
325
+
326
+ @property
327
+ def recipes(self):
328
+ assert isinstance(self._repositories.recipes, repositories.Recipes)
329
+ return self._repositories.recipes
330
+
331
+ @property
332
+ def datasets(self):
333
+ assert isinstance(self._repositories.datasets, repositories.Datasets)
334
+ return self._repositories.datasets
335
+
336
+ @property
337
+ def assignments(self):
338
+ assert isinstance(self._repositories.assignments, repositories.Assignments)
339
+ return self._repositories.assignments
340
+
341
+ @property
342
+ def tasks(self):
343
+ assert isinstance(self._repositories.tasks, repositories.Tasks)
344
+ return self._repositories.tasks
345
+
346
+ @property
347
+ def annotations(self):
348
+ assert isinstance(self._repositories.annotations, repositories.Annotations)
349
+ return self._repositories.annotations
350
+
351
+ @property
352
+ def features(self):
353
+ assert isinstance(self._repositories.features, repositories.Features)
354
+ return self._repositories.features
355
+
356
+ @property
357
+ def collections(self):
358
+ assert isinstance(self._repositories.collections, repositories.Collections)
359
+ return self._repositories.collections
360
+
361
+ @property
362
+ def schema(self):
363
+ assert isinstance(self._repositories.schema, repositories.Schema)
364
+ return self._repositories.schema
365
+
366
+ @property
367
+ def project(self):
368
+ if self._project is None:
369
+ # get from cache
370
+ project = self._client_api.state_io.get('project')
371
+ if project is not None:
372
+ # build entity from json
373
+ p = entities.Project.from_json(_json=project, client_api=self._client_api)
374
+ # check if dataset belongs to project
375
+ if p.id in self.projects:
376
+ self._project = p
377
+ if self._project is None:
378
+ self._project = repositories.Projects(client_api=self._client_api).get(project_id=self.projects[0],
379
+ fetch=None)
380
+ assert isinstance(self._project, entities.Project)
381
+ return self._project
382
+
383
+ @project.setter
384
+ def project(self, project):
385
+ if not isinstance(project, entities.Project):
386
+ raise ValueError('Must input a valid Project entity')
387
+ self._project = project
388
+
389
+ @property
390
+ def directory_tree(self):
391
+ if self._directory_tree is None:
392
+ self._directory_tree = self.project.datasets.directory_tree(dataset_id=self.id)
393
+ assert isinstance(self._directory_tree, entities.DirectoryTree)
394
+ return self._directory_tree
395
+
396
+ def __copy__(self):
397
+ return Dataset.from_json(_json=self.to_json(),
398
+ project=self._project,
399
+ client_api=self._client_api,
400
+ is_fetched=self.is_fetched,
401
+ datasets=self.datasets)
402
+
403
+ def __get_local_path__(self):
404
+ if self._project is not None:
405
+ local_path = os.path.join(services.service_defaults.DATALOOP_PATH,
406
+ 'projects',
407
+ self.project.name,
408
+ 'datasets',
409
+ self.name)
410
+ else:
411
+ local_path = os.path.join(services.service_defaults.DATALOOP_PATH,
412
+ 'datasets',
413
+ '%s_%s' % (self.name, self.id))
414
+ return local_path
415
+
416
+ def _get_recipe(self):
417
+ recipes = self.recipes.list()
418
+ if len(recipes) > 0:
419
+ return recipes[0]
420
+ else:
421
+ raise exceptions.PlatformException('404', 'Dataset {} has no recipe'.format(self.name))
422
+
423
+ def _get_ontology(self):
424
+ if self._ontology is None:
425
+ ontologies = self._get_recipe().ontologies.list()
426
+ if len(ontologies) > 0:
427
+ self._ontology = ontologies[0]
428
+ else:
429
+ raise exceptions.PlatformException('404', 'Dataset {} has no ontology'.format(self.name))
430
+ return self._ontology
431
+
432
+ @staticmethod
433
+ def serialize_labels(labels_dict):
434
+ """
435
+ Convert hex color format to rgb
436
+
437
+ :param dict labels_dict: dict of labels
438
+ :return: dict of converted labels
439
+ """
440
+ dataset_labels_dict = dict()
441
+ for label, color in labels_dict.items():
442
+ dataset_labels_dict[label] = '#%02x%02x%02x' % color
443
+ return dataset_labels_dict
444
+
445
+ def get_recipe_ids(self):
446
+ """
447
+ Get dataset recipe Ids
448
+
449
+ :return: list of recipe ids
450
+ :rtype: list
451
+ """
452
+ return self.metadata['system']['recipes']
453
+
454
+ def switch_recipe(self, recipe_id=None, recipe=None):
455
+ """
456
+ Switch the recipe that linked to the dataset with the given one
457
+
458
+ :param str recipe_id: recipe id
459
+ :param dtlpy.entities.recipe.Recipe recipe: recipe entity
460
+
461
+ **Example**:
462
+
463
+ .. code-block:: python
464
+
465
+ dataset.switch_recipe(recipe_id='recipe_id')
466
+ """
467
+ if recipe is None and recipe_id is None:
468
+ raise exceptions.PlatformException('400', 'Must provide recipe or recipe_id')
469
+ if recipe_id is None:
470
+ if not isinstance(recipe, entities.Recipe):
471
+ raise exceptions.PlatformException('400', 'Recipe must me entities.Recipe type')
472
+ else:
473
+ recipe_id = recipe.id
474
+
475
+ # add recipe id to dataset metadata
476
+ if 'system' not in self.metadata:
477
+ self.metadata['system'] = dict()
478
+ if 'recipes' not in self.metadata['system']:
479
+ self.metadata['system']['recipes'] = list()
480
+ self.metadata['system']['recipes'] = [recipe_id]
481
+ self.update(system_metadata=True)
482
+
483
+ def delete(self, sure=False, really=False):
484
+ """
485
+ Delete a dataset forever!
486
+
487
+ **Prerequisites**: You must be an *owner* or *developer* to use this method.
488
+
489
+ :param bool sure: are you sure you want to delete?
490
+ :param bool really: really really?
491
+ :return: True is success
492
+ :rtype: bool
493
+
494
+ **Example**:
495
+
496
+ .. code-block:: python
497
+
498
+ is_deleted = dataset.delete(sure=True, really=True)
499
+ """
500
+ return self.datasets.delete(dataset_id=self.id,
501
+ sure=sure,
502
+ really=really)
503
+
504
+ def update(self, system_metadata=False):
505
+ """
506
+ Update dataset field
507
+
508
+ **Prerequisites**: You must be an *owner* or *developer* to use this method.
509
+
510
+ :param bool system_metadata: bool - True, if you want to change metadata system
511
+ :return: Dataset object
512
+ :rtype: dtlpy.entities.dataset.Dataset
513
+
514
+ **Example**:
515
+
516
+ .. code-block:: python
517
+
518
+ dataset = dataset.update()
519
+ """
520
+ return self.datasets.update(dataset=self,
521
+ system_metadata=system_metadata)
522
+
523
+ def unlock(self):
524
+ """
525
+ Unlock dataset
526
+
527
+ **Prerequisites**: You must be an *owner* or *developer* to use this method.
528
+
529
+ :return: Dataset object
530
+ :rtype: dtlpy.entities.dataset.Dataset
531
+
532
+ **Example**:
533
+
534
+ .. code-block:: python
535
+
536
+ dataset = dataset.unlock()
537
+ """
538
+ return self.datasets.unlock(dataset=self)
539
+
540
+ def set_readonly(self, state: bool):
541
+ """
542
+ Set dataset readonly mode
543
+
544
+ **Prerequisites**: You must be in the role of an *owner* or *developer*.
545
+
546
+ :param bool state: state
547
+
548
+ **Example**:
549
+
550
+ .. code-block:: python
551
+
552
+ dataset.set_readonly(state=True)
553
+ """
554
+ import warnings
555
+ warnings.warn("`readonly` flag on dataset is deprecated, doing nothing.", DeprecationWarning)
556
+
557
+ def clone(self,
558
+ clone_name=None,
559
+ filters=None,
560
+ with_items_annotations=True,
561
+ with_metadata=True,
562
+ with_task_annotations_status=True,
563
+ dst_dataset_id=None,
564
+ target_directory=None,
565
+ ):
566
+ """
567
+ Clone dataset
568
+
569
+ **Prerequisites**: You must be in the role of an *owner* or *developer*.
570
+
571
+ :param str clone_name: new dataset name
572
+ :param dtlpy.entities.filters.Filters filters: Filters entity or a query dict
573
+ :param bool with_items_annotations: clone all item's annotations
574
+ :param bool with_metadata: clone metadata
575
+ :param bool with_task_annotations_status: clone task annotations status
576
+ :param str dst_dataset_id: destination dataset id
577
+ :param str target_directory: target directory
578
+ :return: dataset object
579
+ :rtype: dtlpy.entities.dataset.Dataset
580
+
581
+ **Example**:
582
+
583
+ .. code-block:: python
584
+
585
+ dataset = dataset.clone(dataset_id='dataset_id',
586
+ clone_name='dataset_clone_name',
587
+ with_metadata=True,
588
+ with_items_annotations=False,
589
+ with_task_annotations_status=False)
590
+ """
591
+ return self.datasets.clone(dataset_id=self.id,
592
+ filters=filters,
593
+ clone_name=clone_name,
594
+ with_metadata=with_metadata,
595
+ with_items_annotations=with_items_annotations,
596
+ with_task_annotations_status=with_task_annotations_status,
597
+ dst_dataset_id=dst_dataset_id,
598
+ target_directory=target_directory)
599
+
600
+ def sync(self, wait=True):
601
+ """
602
+ Sync dataset with external storage
603
+
604
+ **Prerequisites**: You must be in the role of an *owner* or *developer*.
605
+
606
+ :param bool wait: wait for the command to finish
607
+ :return: True if success
608
+ :rtype: bool
609
+
610
+ **Example**:
611
+
612
+ .. code-block:: python
613
+
614
+ success = dataset.sync()
615
+ """
616
+ return self.datasets.sync(dataset_id=self.id, wait=wait)
617
+
618
+ def download_annotations(self,
619
+ local_path=None,
620
+ filters=None,
621
+ annotation_options: ViewAnnotationOptions = None,
622
+ annotation_filters=None,
623
+ overwrite=False,
624
+ thickness=1,
625
+ with_text=False,
626
+ remote_path=None,
627
+ include_annotations_in_output=True,
628
+ export_png_files=False,
629
+ filter_output_annotations=False,
630
+ alpha=1,
631
+ export_version=ExportVersion.V1,
632
+ dataset_lock=False,
633
+ lock_timeout_sec=None,
634
+ export_summary=False,
635
+ ):
636
+ """
637
+ Download dataset by filters.
638
+ Filtering the dataset for items and save them local
639
+ Optional - also download annotation, mask, instance and image mask of the item
640
+
641
+ **Prerequisites**: You must be in the role of an *owner* or *developer*.
642
+
643
+ :param str local_path: local folder or filename to save to.
644
+ :param dtlpy.entities.filters.Filters filters: Filters entity or a dictionary containing filters parameters
645
+ :param list(dtlpy.entities.annotation.ViewAnnotationOptions) annotation_options: download annotations options: list(dl.ViewAnnotationOptions)
646
+ :param dtlpy.entities.filters.Filters annotation_filters: Filters entity to filter annotations for download
647
+ :param bool overwrite: optional - default = False
648
+ :param bool dataset_lock: optional - default = False
649
+ :param bool export_summary: optional - default = False
650
+ :param int lock_timeout_sec: optional
651
+ :param int thickness: optional - line thickness, if -1 annotation will be filled, default =1
652
+ :param bool with_text: optional - add text to annotations, default = False
653
+ :param str remote_path: DEPRECATED and ignored
654
+ :param bool include_annotations_in_output: default - False , if export should contain annotations
655
+ :param bool export_png_files: default - if True, semantic annotations should be exported as png files
656
+ :param bool filter_output_annotations: default - False, given an export by filter - determine if to filter out annotations
657
+ :param float alpha: opacity value [0 1], default 1
658
+ :param str export_version: exported items will have original extension in filename, `V1` - no original extension in filenames
659
+ :return: local_path of the directory where all the downloaded item
660
+ :rtype: str
661
+
662
+ **Example**:
663
+
664
+ .. code-block:: python
665
+
666
+ local_path = dataset.download_annotations(dataset='dataset_entity',
667
+ local_path='local_path',
668
+ annotation_options=[dl.ViewAnnotationOptions.JSON, dl.ViewAnnotationOptions.MASK],
669
+ overwrite=False,
670
+ thickness=1,
671
+ with_text=False,
672
+ alpha=1,
673
+ dataset_lock=False,
674
+ lock_timeout_sec=300,
675
+ export_summary=False
676
+ )
677
+ """
678
+
679
+ return self.datasets.download_annotations(
680
+ dataset=self,
681
+ local_path=local_path,
682
+ overwrite=overwrite,
683
+ filters=filters,
684
+ annotation_options=annotation_options,
685
+ annotation_filters=annotation_filters,
686
+ thickness=thickness,
687
+ with_text=with_text,
688
+ remote_path=remote_path,
689
+ include_annotations_in_output=include_annotations_in_output,
690
+ export_png_files=export_png_files,
691
+ filter_output_annotations=filter_output_annotations,
692
+ alpha=alpha,
693
+ export_version=export_version,
694
+ dataset_lock=dataset_lock,
695
+ lock_timeout_sec=lock_timeout_sec,
696
+ export_summary=export_summary
697
+ )
698
+
699
+ def export(self,
700
+ local_path=None,
701
+ filters=None,
702
+ annotation_filters=None,
703
+ feature_vector_filters=None,
704
+ include_feature_vectors: bool = False,
705
+ include_annotations: bool = False,
706
+ export_type: ExportType = ExportType.JSON,
707
+ timeout: int = 0,
708
+ dataset_lock: bool = False,
709
+ lock_timeout_sec: int = None,
710
+ export_summary: bool = False,
711
+ output_export_type: OutputExportType = None):
712
+ """
713
+ Export dataset items and annotations.
714
+
715
+ **Prerequisites**: You must be an *owner* or *developer* to use this method.
716
+
717
+ You must provide at least ONE of the following params: dataset, dataset_name, dataset_id.
718
+
719
+ :param str local_path: The local path to save the exported dataset
720
+ :param Union[dict, dtlpy.entities.filters.Filters] filters: Filters entity or a query dictionary
721
+ :param dtlpy.entities.filters.Filters annotation_filters: Filters entity
722
+ :param dtlpy.entities.filters.Filters feature_vector_filters: Filters entity
723
+ :param bool include_feature_vectors: Include item feature vectors in the export
724
+ :param bool include_annotations: Include item annotations in the export
725
+ :param bool dataset_lock: Make dataset readonly during the export
726
+ :param bool export_summary: Download dataset export summary
727
+ :param int lock_timeout_sec: Timeout for locking the dataset during export in seconds
728
+ :param entities.ExportType export_type: Type of export ('json' or 'zip')
729
+ :param entities.OutputExportType output_export_type: Output format ('json', 'zip', or 'folders'). If None, defaults to 'json'
730
+ :param int timeout: Maximum time in seconds to wait for the export to complete
731
+ :return: Exported item
732
+ :rtype: dtlpy.entities.item.Item
733
+
734
+ **Example**:
735
+
736
+ .. code-block:: python
737
+
738
+ export_item = dataset.export(filters=filters,
739
+ include_feature_vectors=True,
740
+ include_annotations=True,
741
+ export_type=dl.ExportType.JSON,
742
+ output_export_type=dl.OutputExportType.JSON)
743
+ """
744
+
745
+ return self.datasets.export(dataset=self,
746
+ local_path=local_path,
747
+ filters=filters,
748
+ annotation_filters=annotation_filters,
749
+ feature_vector_filters=feature_vector_filters,
750
+ include_feature_vectors=include_feature_vectors,
751
+ include_annotations=include_annotations,
752
+ export_type=export_type,
753
+ timeout=timeout,
754
+ dataset_lock=dataset_lock,
755
+ lock_timeout_sec=lock_timeout_sec,
756
+ export_summary=export_summary,
757
+ output_export_type=output_export_type)
758
+
759
+ def upload_annotations(self,
760
+ local_path,
761
+ filters=None,
762
+ clean=False,
763
+ remote_root_path='/',
764
+ export_version=ExportVersion.V1
765
+ ):
766
+ """
767
+ Upload annotations to dataset.
768
+
769
+ **Prerequisites**: You must have a dataset with items that are related to the annotations. The relationship between the dataset and annotations is shown in the name. You must be in the role of an *owner* or *developer*.
770
+
771
+ :param str local_path: str - local folder where the annotations files is.
772
+ :param dtlpy.entities.filters.Filters filters: Filters entity or a dictionary containing filters parameters
773
+ :param bool clean: bool - if True it remove the old annotations
774
+ :param str remote_root_path: str - the remote root path to match remote and local items
775
+ :param str export_version: `V2` - exported items will have original extension in filename, `V1` - no original extension in filenames
776
+
777
+ For example, if the item filepath is a/b/item and remote_root_path is /a the start folder will be b instead of a
778
+
779
+ **Example**:
780
+
781
+ .. code-block:: python
782
+
783
+ dataset.upload_annotations(dataset='dataset_entity',
784
+ local_path='local_path',
785
+ clean=False,
786
+ export_version=dl.ExportVersion.V1
787
+ )
788
+ """
789
+
790
+ return self.datasets.upload_annotations(
791
+ dataset=self,
792
+ local_path=local_path,
793
+ filters=filters,
794
+ clean=clean,
795
+ remote_root_path=remote_root_path,
796
+ export_version=export_version
797
+ )
798
+
799
+ def checkout(self):
800
+ """
801
+ Checkout the dataset
802
+
803
+ """
804
+ self.datasets.checkout(dataset=self)
805
+
806
+ def open_in_web(self):
807
+ """
808
+ Open the dataset in web platform
809
+
810
+ """
811
+ self._client_api._open_in_web(url=self.platform_url)
812
+
813
+ def add_label(self, label_name, color=None, children=None, attributes=None, display_label=None, label=None,
814
+ recipe_id=None, ontology_id=None, icon_path=None):
815
+ """
816
+ Add single label to dataset
817
+
818
+ **Prerequisites**: You must have a dataset with items that are related to the annotations. The relationship between the dataset and annotations is shown in the name. You must be in the role of an *owner* or *developer*.
819
+
820
+ :param str label_name: str - label name
821
+ :param tuple color: RGB color of the annotation, e.g (255,0,0) or '#ff0000' for red
822
+ :param children: children (sub labels). list of sub labels of this current label, each value is either dict or dl.Label
823
+ :param list attributes: add attributes to the labels
824
+ :param str display_label: name that display label
825
+ :param dtlpy.entities.label.Label label: label object
826
+ :param str recipe_id: optional recipe id
827
+ :param str ontology_id: optional ontology id
828
+ :param str icon_path: path to image to be display on label
829
+ :return: label entity
830
+ :rtype: dtlpy.entities.label.Label
831
+
832
+ **Example**:
833
+
834
+ .. code-block:: python
835
+
836
+ dataset.add_label(label_name='person', color=(34, 6, 231), attributes=['big', 'small'])
837
+ """
838
+ # get recipe
839
+ if recipe_id is None:
840
+ recipe_id = self.get_recipe_ids()[0]
841
+ recipe = self.recipes.get(recipe_id=recipe_id)
842
+
843
+ # get ontology
844
+ if ontology_id is None:
845
+ ontology_id = recipe.ontology_ids[0]
846
+ ontology = recipe.ontologies.get(ontology_id=ontology_id)
847
+ # ontology._dataset = self
848
+
849
+ # add label
850
+ added_label = ontology.add_label(label_name=label_name,
851
+ color=color,
852
+ children=children,
853
+ attributes=attributes,
854
+ display_label=display_label,
855
+ label=label,
856
+ update_ontology=True,
857
+ icon_path=icon_path)
858
+
859
+ return added_label
860
+
861
+ def add_labels(self, label_list, ontology_id=None, recipe_id=None):
862
+ """
863
+ Add labels to dataset
864
+
865
+ **Prerequisites**: You must have a dataset with items that are related to the annotations. The relationship between the dataset and annotations is shown in the name. You must be in the role of an *owner* or *developer*.
866
+
867
+ :param list label_list: a list of labels to add to the dataset's ontology. each value should be a dict, dl.Label or a string
868
+ :param str ontology_id: optional ontology id
869
+ :param str recipe_id: optional recipe id
870
+ :return: label entities
871
+
872
+ **Example**:
873
+
874
+ .. code-block:: python
875
+
876
+ dataset.add_labels(label_list=label_list)
877
+ """
878
+ # get recipe
879
+ if recipe_id is None:
880
+ recipe_id = self.get_recipe_ids()[0]
881
+ recipe = self.recipes.get(recipe_id=recipe_id)
882
+
883
+ # get ontology
884
+ if ontology_id is None:
885
+ ontology_id = recipe.ontology_ids[0]
886
+ ontology = recipe.ontologies.get(ontology_id=ontology_id)
887
+
888
+ # add labels to ontology
889
+ added_labels = ontology.add_labels(label_list=label_list, update_ontology=True)
890
+
891
+ return added_labels
892
+
893
+ def update_label(self, label_name, color=None, children=None, attributes=None, display_label=None, label=None,
894
+ recipe_id=None, ontology_id=None, upsert=False, icon_path=None):
895
+ """
896
+ Add single label to dataset
897
+
898
+ **Prerequisites**: You must have a dataset with items that are related to the annotations. The relationship between the dataset and annotations is shown in the name. You must be in the role of an *owner* or *developer*.
899
+
900
+ :param str label_name: str - label name
901
+ :param tuple color: color
902
+ :param children: children (sub labels)
903
+ :param list attributes: add attributes to the labels
904
+ :param str display_label: name that display label
905
+ :param dtlpy.entities.label.Label label: label
906
+ :param str recipe_id: optional recipe id
907
+ :param str ontology_id: optional ontology id
908
+ :param str icon_path: path to image to be display on label
909
+
910
+ :return: label entity
911
+ :rtype: dtlpy.entities.label.Label
912
+
913
+ **Example**:
914
+
915
+ .. code-block:: python
916
+
917
+ dataset.update_label(label_name='person', color=(34, 6, 231), attributes=['big', 'small'])
918
+ """
919
+ # get recipe
920
+
921
+ if recipe_id is None:
922
+ recipe_id = self.get_recipe_ids()[0]
923
+ recipe = self.recipes.get(recipe_id=recipe_id)
924
+
925
+ # get ontology
926
+ if ontology_id is None:
927
+ ontology_id = recipe.ontology_ids[0]
928
+ ontology = recipe.ontologies.get(ontology_id=ontology_id)
929
+
930
+ # add label
931
+ added_label = ontology.update_label(label_name=label_name,
932
+ color=color,
933
+ children=children,
934
+ attributes=attributes,
935
+ display_label=display_label,
936
+ label=label,
937
+ update_ontology=True,
938
+ upsert=upsert,
939
+ icon_path=icon_path)
940
+
941
+ return added_label
942
+
943
+ def update_labels(self, label_list, ontology_id=None, recipe_id=None, upsert=False):
944
+ """
945
+ Add labels to dataset
946
+
947
+ **Prerequisites**: You must have a dataset with items that are related to the annotations. The relationship between the dataset and annotations is shown in the name. You must be in the role of an *owner* or *developer*.
948
+
949
+ :param list label_list: label list
950
+ :param str ontology_id: optional ontology id
951
+ :param str recipe_id: optional recipe id
952
+ :param bool upsert: if True will add in case it does not existing
953
+
954
+ :return: label entities
955
+ :rtype: dtlpy.entities.label.Label
956
+
957
+ **Example**:
958
+
959
+ .. code-block:: python
960
+
961
+ dataset.update_labels(label_list=label_list)
962
+ """
963
+ # get recipe
964
+ if recipe_id is None:
965
+ recipe_id = self.get_recipe_ids()[0]
966
+ recipe = self.recipes.get(recipe_id=recipe_id)
967
+
968
+ # get ontology
969
+ if ontology_id is None:
970
+ ontology_id = recipe.ontology_ids[0]
971
+ ontology = recipe.ontologies.get(ontology_id=ontology_id)
972
+
973
+ # add labels to ontology
974
+ added_labels = ontology.update_labels(label_list=label_list, update_ontology=True, upsert=upsert)
975
+
976
+ return added_labels
977
+
978
+ def download(
979
+ self,
980
+ filters=None,
981
+ local_path=None,
982
+ file_types=None,
983
+ annotation_options: ViewAnnotationOptions = None,
984
+ annotation_filters=None,
985
+ overwrite=False,
986
+ to_items_folder=True,
987
+ thickness=1,
988
+ with_text=False,
989
+ without_relative_path=None,
990
+ alpha=1,
991
+ export_version=ExportVersion.V1,
992
+ dataset_lock=False,
993
+ lock_timeout_sec=None,
994
+ export_summary=False,
995
+ raise_on_error=False
996
+ ):
997
+ """
998
+ Download dataset by filters.
999
+ Filtering the dataset for items and save them local
1000
+ Optional - also download annotation, mask, instance and image mask of the item
1001
+
1002
+ **Prerequisites**: You must be in the role of an *owner* or *developer*.
1003
+
1004
+ :param dtlpy.entities.filters.Filters filters: Filters entity or a dictionary containing filters parameters
1005
+ :param str local_path: local folder or filename to save to.
1006
+ :param list file_types: a list of file type to download. e.g ['video/webm', 'video/mp4', 'image/jpeg', 'image/png']
1007
+ :param list annotation_options: type of download annotations: list(dl.ViewAnnotationOptions)
1008
+ :param dtlpy.entities.filters.Filters annotation_filters: Filters entity to filter annotations for download
1009
+ :param bool overwrite: optional - default = False to overwrite the existing files
1010
+ :param bool dataset_lock: optional - default = False to make dataset readonly during the download
1011
+ :param bool export_summary: optional - default = False to get the symmary of the export
1012
+ :param int lock_timeout_sec: optional - Set lock timeout for the export
1013
+ :param bool to_items_folder: Create 'items' folder and download items to it
1014
+ :param int thickness: optional - line thickness, if -1 annotation will be filled, default =1
1015
+ :param bool with_text: optional - add text to annotations, default = False
1016
+ :param bool without_relative_path: bool - download items without the relative path from platform
1017
+ :param float alpha: opacity value [0 1], default 1
1018
+ :param str export_version: `V2` - exported items will have original extension in filename, `V1` - no original extension in filenames
1019
+ :param bool raise_on_error: raise an exception if an error occurs
1020
+ :return: `List` of local_path per each downloaded item
1021
+
1022
+ **Example**:
1023
+
1024
+ .. code-block:: python
1025
+
1026
+ dataset.download(local_path='local_path',
1027
+ annotation_options=[dl.ViewAnnotationOptions.JSON, dl.ViewAnnotationOptions.MASK],
1028
+ overwrite=False,
1029
+ thickness=1,
1030
+ with_text=False,
1031
+ alpha=1,
1032
+ dataset_lock=False,
1033
+ lock_timeout_sec=300,
1034
+ export_summary=False
1035
+ )
1036
+ """
1037
+ return self.items.download(filters=filters,
1038
+ local_path=local_path,
1039
+ file_types=file_types,
1040
+ annotation_options=annotation_options,
1041
+ annotation_filters=annotation_filters,
1042
+ overwrite=overwrite,
1043
+ to_items_folder=to_items_folder,
1044
+ thickness=thickness,
1045
+ with_text=with_text,
1046
+ without_relative_path=without_relative_path,
1047
+ alpha=alpha,
1048
+ export_version=export_version,
1049
+ dataset_lock=dataset_lock,
1050
+ lock_timeout_sec=lock_timeout_sec,
1051
+ export_summary=export_summary,
1052
+ raise_on_error=raise_on_error
1053
+ )
1054
+
1055
+ def download_folder(
1056
+ self,
1057
+ folder_path,
1058
+ filters=None,
1059
+ local_path=None,
1060
+ file_types=None,
1061
+ annotation_options: ViewAnnotationOptions = None,
1062
+ annotation_filters=None,
1063
+ overwrite=False,
1064
+ to_items_folder=True,
1065
+ thickness=1,
1066
+ with_text=False,
1067
+ without_relative_path=None,
1068
+ alpha=1,
1069
+ export_version=ExportVersion.V1,
1070
+ dataset_lock=False,
1071
+ lock_timeout_sec=None,
1072
+ export_summary=False,
1073
+ raise_on_error=False
1074
+ ):
1075
+ """
1076
+ Download dataset folder.
1077
+ Optional - also download annotation, mask, instance and image mask of the item
1078
+
1079
+ **Prerequisites**: You must be in the role of an *owner* or *developer*.
1080
+
1081
+ :param str folder_path: the path of the folder that want to download
1082
+ :param dtlpy.entities.filters.Filters filters: Filters entity or a dictionary containing filters parameters
1083
+ :param str local_path: local folder or filename to save to.
1084
+ :param list file_types: a list of file type to download. e.g ['video/webm', 'video/mp4', 'image/jpeg', 'image/png']
1085
+ :param list annotation_options: type of download annotations: list(dl.ViewAnnotationOptions)
1086
+ :param dtlpy.entities.filters.Filters annotation_filters: Filters entity to filter annotations for download
1087
+ :param bool overwrite: optional - default = False to overwrite the existing files
1088
+ :param bool dataset_lock: optional - default = False to make the dataset readonly during the download
1089
+ :param bool export_summary: optional - default = False to get the symmary of the export
1090
+ :param bool lock_timeout_sec: optional - Set lock timeout for the export
1091
+ :param bool to_items_folder: Create 'items' folder and download items to it
1092
+ :param int thickness: optional - line thickness, if -1 annotation will be filled, default =1
1093
+ :param bool with_text: optional - add text to annotations, default = False
1094
+ :param bool without_relative_path: bool - download items without the relative path from platform
1095
+ :param float alpha: opacity value [0 1], default 1
1096
+ :param str export_version: `V2` - exported items will have original extension in filename, `V1` - no original extension in filenames
1097
+ :param bool raise_on_error: raise an exception if an error occurs
1098
+ :return: `List` of local_path per each downloaded item
1099
+
1100
+ **Example**:
1101
+
1102
+ .. code-block:: python
1103
+
1104
+ dataset.download_folder(folder_path='folder_path'
1105
+ local_path='local_path',
1106
+ annotation_options=[dl.ViewAnnotationOptions.JSON, dl.ViewAnnotationOptions.MASK],
1107
+ overwrite=False,
1108
+ thickness=1,
1109
+ with_text=False,
1110
+ alpha=1,
1111
+ save_locally=True,
1112
+ dataset_lock=False
1113
+ lock_timeout_sec=300,
1114
+ export_summary=False
1115
+ )
1116
+ """
1117
+ filters = self.datasets._bulid_folder_filter(folder_path=folder_path, filters=filters)
1118
+ return self.items.download(filters=filters,
1119
+ local_path=local_path,
1120
+ file_types=file_types,
1121
+ annotation_options=annotation_options,
1122
+ annotation_filters=annotation_filters,
1123
+ overwrite=overwrite,
1124
+ to_items_folder=to_items_folder,
1125
+ thickness=thickness,
1126
+ with_text=with_text,
1127
+ without_relative_path=without_relative_path,
1128
+ alpha=alpha,
1129
+ export_version=export_version,
1130
+ dataset_lock=dataset_lock,
1131
+ lock_timeout_sec=lock_timeout_sec,
1132
+ export_summary=export_summary,
1133
+ raise_on_error=raise_on_error
1134
+ )
1135
+
1136
+ def delete_labels(self, label_names):
1137
+ """
1138
+ Delete labels from dataset's ontologies
1139
+
1140
+ **Prerequisites**: You must be in the role of an *owner* or *developer*.
1141
+
1142
+ :param label_names: label object/ label name / list of label objects / list of label names
1143
+
1144
+ **Example**:
1145
+
1146
+ .. code-block:: python
1147
+
1148
+ dataset.delete_labels(label_names=['myLabel1', 'Mylabel2'])
1149
+ """
1150
+ for recipe in self.recipes.list():
1151
+ for ontology in recipe.ontologies.list():
1152
+ ontology.delete_labels(label_names=label_names)
1153
+ self._labels = None
1154
+
1155
+ def update_attributes(self,
1156
+ title: str,
1157
+ key: str,
1158
+ attribute_type,
1159
+ recipe_id: str = None,
1160
+ ontology_id: str = None,
1161
+ scope: list = None,
1162
+ optional: bool = None,
1163
+ values: list = None,
1164
+ attribute_range=None):
1165
+ """
1166
+ ADD a new attribute or update if exist
1167
+
1168
+ :param str ontology_id: ontology_id
1169
+ :param str title: attribute title
1170
+ :param str key: the key of the attribute must br unique
1171
+ :param AttributesTypes attribute_type: dl.AttributesTypes your attribute type
1172
+ :param list scope: list of the labels or * for all labels
1173
+ :param bool optional: optional attribute
1174
+ :param list values: list of the attribute values ( for checkbox and radio button)
1175
+ :param dict or AttributesRange attribute_range: dl.AttributesRange object
1176
+ :return: true in success
1177
+ :rtype: bool
1178
+
1179
+ **Example**:
1180
+
1181
+ .. code-block:: python
1182
+
1183
+ dataset.update_attributes(ontology_id='ontology_id',
1184
+ key='1',
1185
+ title='checkbox',
1186
+ attribute_type=dl.AttributesTypes.CHECKBOX,
1187
+ values=[1,2,3])
1188
+ """
1189
+ # get recipe
1190
+ if recipe_id is None:
1191
+ recipe_id = self.get_recipe_ids()[0]
1192
+ recipe = self.recipes.get(recipe_id=recipe_id)
1193
+
1194
+ # get ontology
1195
+ if ontology_id is None:
1196
+ ontology_id = recipe.ontology_ids[0]
1197
+ ontology = recipe.ontologies.get(ontology_id=ontology_id)
1198
+
1199
+ # add attribute to ontology
1200
+ attribute = ontology.update_attributes(
1201
+ title=title,
1202
+ key=key,
1203
+ attribute_type=attribute_type,
1204
+ scope=scope,
1205
+ optional=optional,
1206
+ values=values,
1207
+ attribute_range=attribute_range)
1208
+
1209
+ return attribute
1210
+
1211
+ def delete_attributes(self, keys: list,
1212
+ recipe_id: str = None,
1213
+ ontology_id: str = None):
1214
+ """
1215
+ Delete a bulk of attributes
1216
+
1217
+ :param str recipe_id: recipe id
1218
+ :param str ontology_id: ontology id
1219
+ :param list keys: Keys of attributes to delete
1220
+ :return: True if success
1221
+ :rtype: bool
1222
+ """
1223
+
1224
+ # get recipe
1225
+ if recipe_id is None:
1226
+ recipe_id = self.get_recipe_ids()[0]
1227
+ recipe = self.recipes.get(recipe_id=recipe_id)
1228
+
1229
+ # get ontology
1230
+ if ontology_id is None:
1231
+ ontology_id = recipe.ontology_ids[0]
1232
+ ontology = recipe.ontologies.get(ontology_id=ontology_id)
1233
+ return ontology.delete_attributes(ontology_id=ontology.id, keys=keys)
1234
+
1235
+ def split_ml_subsets(self,
1236
+ items_query = None,
1237
+ percentages: dict = None ):
1238
+ """
1239
+ Split dataset items into ML subsets.
1240
+
1241
+ :param dl.Filters items_query: Filters object to select items.
1242
+ :param dict percentages: {'train': x, 'validation': y, 'test': z}.
1243
+ :return: True if the split operation was successful.
1244
+ :rtype: bool
1245
+ """
1246
+ return self.datasets.split_ml_subsets(dataset_id=self.id,
1247
+ items_query=items_query,
1248
+ ml_split_list=percentages)
1249
+
1250
+ def assign_subset_to_items(self, subset: str, items_query=None) -> bool:
1251
+ """
1252
+ Assign a specific ML subset (train/validation/test) to items defined by the given filters.
1253
+ This will set the chosen subset to True and the others to None.
1254
+
1255
+ :param dl.Filters items_query: Filters to select items
1256
+ :param str subset: 'train', 'validation', or 'test'
1257
+ :return: True if successful
1258
+ :rtype: bool
1259
+ """
1260
+
1261
+ return self.datasets.bulk_update_ml_subset(dataset_id=self.id,
1262
+ items_query=items_query,
1263
+ subset=subset)
1264
+
1265
+ def remove_subset_from_items(self, items_query= None,) -> bool:
1266
+ """
1267
+ Remove any ML subset assignment from items defined by the given filters.
1268
+ This sets train, validation, and test tags to None.
1269
+
1270
+ :param dl.Filters items_query: Filters to select items
1271
+ :return: True if successful
1272
+ :rtype: bool
1273
+ """
1274
+ return self.datasets.bulk_update_ml_subset(dataset_id=self.id,
1275
+ items_query=items_query,
1276
+ subset=None,
1277
+ deleteTag=True)
1278
+
1279
+ def get_items_missing_ml_subset(self, filters = None) -> list:
1280
+ """
1281
+ Get the list of item IDs that are missing ML subset assignment.
1282
+ An item is considered missing ML subset if train, validation, and test tags are not True (all None).
1283
+
1284
+ :param dl.Filters filters: optional filters to narrow down items. If None, will use a default filter for files.
1285
+ :return: list of item IDs
1286
+ :rtype: list
1287
+ """
1288
+ if filters is None:
1289
+ filters = entities.Filters()
1290
+ filters.add(field='metadata.system.tags.train', values=None)
1291
+ filters.add(field='metadata.system.tags.validation', values=None)
1292
+ filters.add(field='metadata.system.tags.test', values=None)
1293
+ missing_ids = []
1294
+ pages = self.items.list(filters=filters)
1295
+ for page in pages:
1296
+ for item in page:
1297
+ # item that pass filters means no subsets assigned
1298
+ missing_ids.append(item.id)
1299
+ return missing_ids