dtlpy 1.113.10__py3-none-any.whl → 1.114.13__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (243) hide show
  1. dtlpy/__init__.py +488 -488
  2. dtlpy/__version__.py +1 -1
  3. dtlpy/assets/__init__.py +26 -26
  4. dtlpy/assets/__pycache__/__init__.cpython-38.pyc +0 -0
  5. dtlpy/assets/code_server/config.yaml +2 -2
  6. dtlpy/assets/code_server/installation.sh +24 -24
  7. dtlpy/assets/code_server/launch.json +13 -13
  8. dtlpy/assets/code_server/settings.json +2 -2
  9. dtlpy/assets/main.py +53 -53
  10. dtlpy/assets/main_partial.py +18 -18
  11. dtlpy/assets/mock.json +11 -11
  12. dtlpy/assets/model_adapter.py +83 -83
  13. dtlpy/assets/package.json +61 -61
  14. dtlpy/assets/package_catalog.json +29 -29
  15. dtlpy/assets/package_gitignore +307 -307
  16. dtlpy/assets/service_runners/__init__.py +33 -33
  17. dtlpy/assets/service_runners/converter.py +96 -96
  18. dtlpy/assets/service_runners/multi_method.py +49 -49
  19. dtlpy/assets/service_runners/multi_method_annotation.py +54 -54
  20. dtlpy/assets/service_runners/multi_method_dataset.py +55 -55
  21. dtlpy/assets/service_runners/multi_method_item.py +52 -52
  22. dtlpy/assets/service_runners/multi_method_json.py +52 -52
  23. dtlpy/assets/service_runners/single_method.py +37 -37
  24. dtlpy/assets/service_runners/single_method_annotation.py +43 -43
  25. dtlpy/assets/service_runners/single_method_dataset.py +43 -43
  26. dtlpy/assets/service_runners/single_method_item.py +41 -41
  27. dtlpy/assets/service_runners/single_method_json.py +42 -42
  28. dtlpy/assets/service_runners/single_method_multi_input.py +45 -45
  29. dtlpy/assets/voc_annotation_template.xml +23 -23
  30. dtlpy/caches/base_cache.py +32 -32
  31. dtlpy/caches/cache.py +473 -473
  32. dtlpy/caches/dl_cache.py +201 -201
  33. dtlpy/caches/filesystem_cache.py +89 -89
  34. dtlpy/caches/redis_cache.py +84 -84
  35. dtlpy/dlp/__init__.py +20 -20
  36. dtlpy/dlp/cli_utilities.py +367 -367
  37. dtlpy/dlp/command_executor.py +764 -764
  38. dtlpy/dlp/dlp +1 -1
  39. dtlpy/dlp/dlp.bat +1 -1
  40. dtlpy/dlp/dlp.py +128 -128
  41. dtlpy/dlp/parser.py +651 -651
  42. dtlpy/entities/__init__.py +83 -83
  43. dtlpy/entities/analytic.py +311 -311
  44. dtlpy/entities/annotation.py +1879 -1879
  45. dtlpy/entities/annotation_collection.py +699 -699
  46. dtlpy/entities/annotation_definitions/__init__.py +20 -20
  47. dtlpy/entities/annotation_definitions/base_annotation_definition.py +100 -100
  48. dtlpy/entities/annotation_definitions/box.py +195 -195
  49. dtlpy/entities/annotation_definitions/classification.py +67 -67
  50. dtlpy/entities/annotation_definitions/comparison.py +72 -72
  51. dtlpy/entities/annotation_definitions/cube.py +204 -204
  52. dtlpy/entities/annotation_definitions/cube_3d.py +149 -149
  53. dtlpy/entities/annotation_definitions/description.py +32 -32
  54. dtlpy/entities/annotation_definitions/ellipse.py +124 -124
  55. dtlpy/entities/annotation_definitions/free_text.py +62 -62
  56. dtlpy/entities/annotation_definitions/gis.py +69 -69
  57. dtlpy/entities/annotation_definitions/note.py +139 -139
  58. dtlpy/entities/annotation_definitions/point.py +117 -117
  59. dtlpy/entities/annotation_definitions/polygon.py +182 -182
  60. dtlpy/entities/annotation_definitions/polyline.py +111 -111
  61. dtlpy/entities/annotation_definitions/pose.py +92 -92
  62. dtlpy/entities/annotation_definitions/ref_image.py +86 -86
  63. dtlpy/entities/annotation_definitions/segmentation.py +240 -240
  64. dtlpy/entities/annotation_definitions/subtitle.py +34 -34
  65. dtlpy/entities/annotation_definitions/text.py +85 -85
  66. dtlpy/entities/annotation_definitions/undefined_annotation.py +74 -74
  67. dtlpy/entities/app.py +220 -220
  68. dtlpy/entities/app_module.py +107 -107
  69. dtlpy/entities/artifact.py +174 -174
  70. dtlpy/entities/assignment.py +399 -399
  71. dtlpy/entities/base_entity.py +214 -214
  72. dtlpy/entities/bot.py +113 -113
  73. dtlpy/entities/codebase.py +296 -296
  74. dtlpy/entities/collection.py +38 -38
  75. dtlpy/entities/command.py +169 -169
  76. dtlpy/entities/compute.py +442 -442
  77. dtlpy/entities/dataset.py +1285 -1285
  78. dtlpy/entities/directory_tree.py +44 -44
  79. dtlpy/entities/dpk.py +470 -470
  80. dtlpy/entities/driver.py +222 -222
  81. dtlpy/entities/execution.py +397 -397
  82. dtlpy/entities/feature.py +124 -124
  83. dtlpy/entities/feature_set.py +145 -145
  84. dtlpy/entities/filters.py +641 -641
  85. dtlpy/entities/gis_item.py +107 -107
  86. dtlpy/entities/integration.py +184 -184
  87. dtlpy/entities/item.py +953 -953
  88. dtlpy/entities/label.py +123 -123
  89. dtlpy/entities/links.py +85 -85
  90. dtlpy/entities/message.py +175 -175
  91. dtlpy/entities/model.py +694 -691
  92. dtlpy/entities/node.py +1005 -1005
  93. dtlpy/entities/ontology.py +803 -803
  94. dtlpy/entities/organization.py +287 -287
  95. dtlpy/entities/package.py +657 -657
  96. dtlpy/entities/package_defaults.py +5 -5
  97. dtlpy/entities/package_function.py +185 -185
  98. dtlpy/entities/package_module.py +113 -113
  99. dtlpy/entities/package_slot.py +118 -118
  100. dtlpy/entities/paged_entities.py +290 -267
  101. dtlpy/entities/pipeline.py +593 -593
  102. dtlpy/entities/pipeline_execution.py +279 -279
  103. dtlpy/entities/project.py +394 -394
  104. dtlpy/entities/prompt_item.py +499 -499
  105. dtlpy/entities/recipe.py +301 -301
  106. dtlpy/entities/reflect_dict.py +102 -102
  107. dtlpy/entities/resource_execution.py +138 -138
  108. dtlpy/entities/service.py +958 -958
  109. dtlpy/entities/service_driver.py +117 -117
  110. dtlpy/entities/setting.py +294 -294
  111. dtlpy/entities/task.py +491 -491
  112. dtlpy/entities/time_series.py +143 -143
  113. dtlpy/entities/trigger.py +426 -426
  114. dtlpy/entities/user.py +118 -118
  115. dtlpy/entities/webhook.py +124 -124
  116. dtlpy/examples/__init__.py +19 -19
  117. dtlpy/examples/add_labels.py +135 -135
  118. dtlpy/examples/add_metadata_to_item.py +21 -21
  119. dtlpy/examples/annotate_items_using_model.py +65 -65
  120. dtlpy/examples/annotate_video_using_model_and_tracker.py +75 -75
  121. dtlpy/examples/annotations_convert_to_voc.py +9 -9
  122. dtlpy/examples/annotations_convert_to_yolo.py +9 -9
  123. dtlpy/examples/convert_annotation_types.py +51 -51
  124. dtlpy/examples/converter.py +143 -143
  125. dtlpy/examples/copy_annotations.py +22 -22
  126. dtlpy/examples/copy_folder.py +31 -31
  127. dtlpy/examples/create_annotations.py +51 -51
  128. dtlpy/examples/create_video_annotations.py +83 -83
  129. dtlpy/examples/delete_annotations.py +26 -26
  130. dtlpy/examples/filters.py +113 -113
  131. dtlpy/examples/move_item.py +23 -23
  132. dtlpy/examples/play_video_annotation.py +13 -13
  133. dtlpy/examples/show_item_and_mask.py +53 -53
  134. dtlpy/examples/triggers.py +49 -49
  135. dtlpy/examples/upload_batch_of_items.py +20 -20
  136. dtlpy/examples/upload_items_and_custom_format_annotations.py +55 -55
  137. dtlpy/examples/upload_items_with_modalities.py +43 -43
  138. dtlpy/examples/upload_segmentation_annotations_from_mask_image.py +44 -44
  139. dtlpy/examples/upload_yolo_format_annotations.py +70 -70
  140. dtlpy/exceptions.py +125 -125
  141. dtlpy/miscellaneous/__init__.py +20 -20
  142. dtlpy/miscellaneous/dict_differ.py +95 -95
  143. dtlpy/miscellaneous/git_utils.py +217 -217
  144. dtlpy/miscellaneous/json_utils.py +14 -14
  145. dtlpy/miscellaneous/list_print.py +105 -105
  146. dtlpy/miscellaneous/zipping.py +130 -130
  147. dtlpy/ml/__init__.py +20 -20
  148. dtlpy/ml/base_feature_extractor_adapter.py +27 -27
  149. dtlpy/ml/base_model_adapter.py +945 -940
  150. dtlpy/ml/metrics.py +461 -461
  151. dtlpy/ml/predictions_utils.py +274 -274
  152. dtlpy/ml/summary_writer.py +57 -57
  153. dtlpy/ml/train_utils.py +60 -60
  154. dtlpy/new_instance.py +252 -252
  155. dtlpy/repositories/__init__.py +56 -56
  156. dtlpy/repositories/analytics.py +85 -85
  157. dtlpy/repositories/annotations.py +916 -916
  158. dtlpy/repositories/apps.py +383 -383
  159. dtlpy/repositories/artifacts.py +452 -452
  160. dtlpy/repositories/assignments.py +599 -599
  161. dtlpy/repositories/bots.py +213 -213
  162. dtlpy/repositories/codebases.py +559 -559
  163. dtlpy/repositories/collections.py +332 -348
  164. dtlpy/repositories/commands.py +158 -158
  165. dtlpy/repositories/compositions.py +61 -61
  166. dtlpy/repositories/computes.py +434 -406
  167. dtlpy/repositories/datasets.py +1291 -1291
  168. dtlpy/repositories/downloader.py +895 -895
  169. dtlpy/repositories/dpks.py +433 -433
  170. dtlpy/repositories/drivers.py +266 -266
  171. dtlpy/repositories/executions.py +817 -817
  172. dtlpy/repositories/feature_sets.py +226 -226
  173. dtlpy/repositories/features.py +238 -238
  174. dtlpy/repositories/integrations.py +484 -484
  175. dtlpy/repositories/items.py +909 -915
  176. dtlpy/repositories/messages.py +94 -94
  177. dtlpy/repositories/models.py +877 -867
  178. dtlpy/repositories/nodes.py +80 -80
  179. dtlpy/repositories/ontologies.py +511 -511
  180. dtlpy/repositories/organizations.py +525 -525
  181. dtlpy/repositories/packages.py +1941 -1941
  182. dtlpy/repositories/pipeline_executions.py +448 -448
  183. dtlpy/repositories/pipelines.py +642 -642
  184. dtlpy/repositories/projects.py +539 -539
  185. dtlpy/repositories/recipes.py +399 -399
  186. dtlpy/repositories/resource_executions.py +137 -137
  187. dtlpy/repositories/schema.py +120 -120
  188. dtlpy/repositories/service_drivers.py +213 -213
  189. dtlpy/repositories/services.py +1704 -1704
  190. dtlpy/repositories/settings.py +339 -339
  191. dtlpy/repositories/tasks.py +1124 -1124
  192. dtlpy/repositories/times_series.py +278 -278
  193. dtlpy/repositories/triggers.py +536 -536
  194. dtlpy/repositories/upload_element.py +257 -257
  195. dtlpy/repositories/uploader.py +651 -651
  196. dtlpy/repositories/webhooks.py +249 -249
  197. dtlpy/services/__init__.py +22 -22
  198. dtlpy/services/aihttp_retry.py +131 -131
  199. dtlpy/services/api_client.py +1782 -1782
  200. dtlpy/services/api_reference.py +40 -40
  201. dtlpy/services/async_utils.py +133 -133
  202. dtlpy/services/calls_counter.py +44 -44
  203. dtlpy/services/check_sdk.py +68 -68
  204. dtlpy/services/cookie.py +115 -115
  205. dtlpy/services/create_logger.py +156 -156
  206. dtlpy/services/events.py +84 -84
  207. dtlpy/services/logins.py +235 -235
  208. dtlpy/services/reporter.py +256 -256
  209. dtlpy/services/service_defaults.py +91 -91
  210. dtlpy/utilities/__init__.py +20 -20
  211. dtlpy/utilities/annotations/__init__.py +16 -16
  212. dtlpy/utilities/annotations/annotation_converters.py +269 -269
  213. dtlpy/utilities/base_package_runner.py +264 -264
  214. dtlpy/utilities/converter.py +1650 -1650
  215. dtlpy/utilities/dataset_generators/__init__.py +1 -1
  216. dtlpy/utilities/dataset_generators/dataset_generator.py +670 -670
  217. dtlpy/utilities/dataset_generators/dataset_generator_tensorflow.py +23 -23
  218. dtlpy/utilities/dataset_generators/dataset_generator_torch.py +21 -21
  219. dtlpy/utilities/local_development/__init__.py +1 -1
  220. dtlpy/utilities/local_development/local_session.py +179 -179
  221. dtlpy/utilities/reports/__init__.py +2 -2
  222. dtlpy/utilities/reports/figures.py +343 -343
  223. dtlpy/utilities/reports/report.py +71 -71
  224. dtlpy/utilities/videos/__init__.py +17 -17
  225. dtlpy/utilities/videos/video_player.py +598 -598
  226. dtlpy/utilities/videos/videos.py +470 -470
  227. {dtlpy-1.113.10.data → dtlpy-1.114.13.data}/scripts/dlp +1 -1
  228. dtlpy-1.114.13.data/scripts/dlp.bat +2 -0
  229. {dtlpy-1.113.10.data → dtlpy-1.114.13.data}/scripts/dlp.py +128 -128
  230. {dtlpy-1.113.10.dist-info → dtlpy-1.114.13.dist-info}/LICENSE +200 -200
  231. {dtlpy-1.113.10.dist-info → dtlpy-1.114.13.dist-info}/METADATA +172 -172
  232. dtlpy-1.114.13.dist-info/RECORD +240 -0
  233. {dtlpy-1.113.10.dist-info → dtlpy-1.114.13.dist-info}/WHEEL +1 -1
  234. tests/features/environment.py +551 -550
  235. dtlpy-1.113.10.data/scripts/dlp.bat +0 -2
  236. dtlpy-1.113.10.dist-info/RECORD +0 -244
  237. tests/assets/__init__.py +0 -0
  238. tests/assets/models_flow/__init__.py +0 -0
  239. tests/assets/models_flow/failedmain.py +0 -52
  240. tests/assets/models_flow/main.py +0 -62
  241. tests/assets/models_flow/main_model.py +0 -54
  242. {dtlpy-1.113.10.dist-info → dtlpy-1.114.13.dist-info}/entry_points.txt +0 -0
  243. {dtlpy-1.113.10.dist-info → dtlpy-1.114.13.dist-info}/top_level.txt +0 -0
dtlpy/entities/dataset.py CHANGED
@@ -1,1285 +1,1285 @@
1
- from collections import namedtuple
2
- import traceback
3
- import logging
4
- from enum import Enum
5
-
6
- import attr
7
- import os
8
-
9
- from .. import repositories, entities, services, exceptions
10
- from ..services.api_client import ApiClient
11
- from .annotation import ViewAnnotationOptions, AnnotationType, ExportVersion
12
-
13
- logger = logging.getLogger(name='dtlpy')
14
-
15
-
16
- class IndexDriver(str, Enum):
17
- V1 = "v1"
18
- V2 = "v2"
19
-
20
-
21
- class ExportType(str, Enum):
22
- JSON = "json"
23
- ZIP = "zip"
24
-
25
-
26
- class ExpirationOptions:
27
- """
28
- ExpirationOptions object
29
- """
30
-
31
- def __init__(self, item_max_days: int = None):
32
- """
33
- :param item_max_days: int. items in dataset will be auto delete after this number id days
34
- """
35
- self.item_max_days = item_max_days
36
-
37
- def to_json(self):
38
- _json = dict()
39
- if self.item_max_days is not None:
40
- _json["itemMaxDays"] = self.item_max_days
41
- return _json
42
-
43
- @classmethod
44
- def from_json(cls, _json: dict):
45
- item_max_days = _json.get('itemMaxDays', None)
46
- if item_max_days:
47
- return cls(item_max_days=item_max_days)
48
- return None
49
-
50
-
51
- @attr.s
52
- class Dataset(entities.BaseEntity):
53
- """
54
- Dataset object
55
- """
56
- # dataset information
57
- id = attr.ib()
58
- url = attr.ib()
59
- name = attr.ib()
60
- annotated = attr.ib(repr=False)
61
- creator = attr.ib()
62
- projects = attr.ib(repr=False)
63
- items_count = attr.ib()
64
- metadata = attr.ib(repr=False)
65
- directoryTree = attr.ib(repr=False)
66
- expiration_options = attr.ib()
67
- index_driver = attr.ib()
68
- enable_sync_with_cloned = attr.ib(repr=False)
69
-
70
- # name change when to_json
71
- created_at = attr.ib()
72
- updated_at = attr.ib()
73
- updated_by = attr.ib()
74
- items_url = attr.ib(repr=False)
75
- readable_type = attr.ib(repr=False)
76
- access_level = attr.ib(repr=False)
77
- driver = attr.ib(repr=False)
78
- src_dataset = attr.ib(repr=False)
79
- _readonly = attr.ib(repr=False)
80
- annotations_count = attr.ib()
81
-
82
- # api
83
- _client_api = attr.ib(type=ApiClient, repr=False)
84
-
85
- # entities
86
- _project = attr.ib(default=None, repr=False)
87
-
88
- # repositories
89
- _datasets = attr.ib(repr=False, default=None)
90
- _repositories = attr.ib(repr=False)
91
-
92
- # defaults
93
- _ontology_ids = attr.ib(default=None, repr=False)
94
- _labels = attr.ib(default=None, repr=False)
95
- _directory_tree = attr.ib(default=None, repr=False)
96
- _recipe = attr.ib(default=None, repr=False)
97
- _ontology = attr.ib(default=None, repr=False)
98
-
99
- @property
100
- def itemsCount(self):
101
- return self.items_count
102
-
103
- @staticmethod
104
- def _protected_from_json(project: entities.Project,
105
- _json: dict,
106
- client_api: ApiClient,
107
- datasets=None,
108
- is_fetched=True):
109
- """
110
- Same as from_json but with try-except to catch if error
111
-
112
- :param project: dataset's project
113
- :param _json: _json response from host
114
- :param client_api: ApiClient entity
115
- :param datasets: Datasets repository
116
- :param is_fetched: is Entity fetched from Platform
117
- :return: Dataset object
118
- """
119
- try:
120
- dataset = Dataset.from_json(project=project,
121
- _json=_json,
122
- client_api=client_api,
123
- datasets=datasets,
124
- is_fetched=is_fetched)
125
- status = True
126
- except Exception:
127
- dataset = traceback.format_exc()
128
- status = False
129
- return status, dataset
130
-
131
- @classmethod
132
- def from_json(cls,
133
- project: entities.Project,
134
- _json: dict,
135
- client_api: ApiClient,
136
- datasets=None,
137
- is_fetched=True):
138
- """
139
- Build a Dataset entity object from a json
140
-
141
- :param project: dataset's project
142
- :param dict _json: _json response from host
143
- :param client_api: ApiClient entity
144
- :param datasets: Datasets repository
145
- :param bool is_fetched: is Entity fetched from Platform
146
- :return: Dataset object
147
- :rtype: dtlpy.entities.dataset.Dataset
148
- """
149
- projects = _json.get('projects', None)
150
- if project is not None and projects is not None:
151
- if project.id not in projects:
152
- logger.warning('Dataset has been fetched from a project that is not in it projects list')
153
- project = None
154
-
155
- expiration_options = _json.get('expirationOptions', None)
156
- if expiration_options:
157
- expiration_options = ExpirationOptions.from_json(expiration_options)
158
- inst = cls(metadata=_json.get('metadata', None),
159
- directoryTree=_json.get('directoryTree', None),
160
- readable_type=_json.get('readableType', None),
161
- access_level=_json.get('accessLevel', None),
162
- created_at=_json.get('createdAt', None),
163
- updated_at=_json.get('updatedAt', None),
164
- updated_by=_json.get('updatedBy', None),
165
- annotations_count=_json.get("annotationsCount", None),
166
- items_count=_json.get('itemsCount', None),
167
- annotated=_json.get('annotated', None),
168
- readonly=_json.get('readonly', None),
169
- projects=projects,
170
- creator=_json.get('creator', None),
171
- items_url=_json.get('items', None),
172
- driver=_json.get('driver', None),
173
- name=_json.get('name', None),
174
- url=_json.get('url', None),
175
- id=_json.get('id', None),
176
- datasets=datasets,
177
- client_api=client_api,
178
- project=project,
179
- expiration_options=expiration_options,
180
- index_driver=_json.get('indexDriver', None),
181
- enable_sync_with_cloned=_json.get('enableSyncWithCloned', None),
182
- src_dataset=_json.get('srcDataset', None))
183
- inst.is_fetched = is_fetched
184
- return inst
185
-
186
- def to_json(self):
187
- """
188
- Returns platform _json format of object
189
-
190
- :return: platform json format of object
191
- :rtype: dict
192
- """
193
- _json = attr.asdict(self, filter=attr.filters.exclude(attr.fields(Dataset)._client_api,
194
- attr.fields(Dataset)._project,
195
- attr.fields(Dataset)._readonly,
196
- attr.fields(Dataset)._datasets,
197
- attr.fields(Dataset)._repositories,
198
- attr.fields(Dataset)._ontology_ids,
199
- attr.fields(Dataset)._labels,
200
- attr.fields(Dataset)._recipe,
201
- attr.fields(Dataset)._ontology,
202
- attr.fields(Dataset)._directory_tree,
203
- attr.fields(Dataset).access_level,
204
- attr.fields(Dataset).readable_type,
205
- attr.fields(Dataset).created_at,
206
- attr.fields(Dataset).updated_at,
207
- attr.fields(Dataset).updated_by,
208
- attr.fields(Dataset).annotations_count,
209
- attr.fields(Dataset).items_url,
210
- attr.fields(Dataset).expiration_options,
211
- attr.fields(Dataset).items_count,
212
- attr.fields(Dataset).index_driver,
213
- attr.fields(Dataset).enable_sync_with_cloned,
214
- attr.fields(Dataset).src_dataset,
215
- ))
216
- _json.update({'items': self.items_url})
217
- _json['readableType'] = self.readable_type
218
- _json['createdAt'] = self.created_at
219
- _json['updatedAt'] = self.updated_at
220
- _json['updatedBy'] = self.updated_by
221
- _json['annotationsCount'] = self.annotations_count
222
- _json['accessLevel'] = self.access_level
223
- _json['readonly'] = self._readonly
224
- _json['itemsCount'] = self.items_count
225
- _json['indexDriver'] = self.index_driver
226
- if self.expiration_options and self.expiration_options.to_json():
227
- _json['expirationOptions'] = self.expiration_options.to_json()
228
- if self.enable_sync_with_cloned is not None:
229
- _json['enableSyncWithCloned'] = self.enable_sync_with_cloned
230
- if self.src_dataset is not None:
231
- _json['srcDataset'] = self.src_dataset
232
- return _json
233
-
234
- @property
235
- def labels(self):
236
- if self._labels is None:
237
- self._labels = self._get_ontology().labels
238
- return self._labels
239
-
240
- @property
241
- def readonly(self):
242
- return self._readonly
243
-
244
- @property
245
- def platform_url(self):
246
- return self._client_api._get_resource_url("projects/{}/datasets/{}/items".format(self.project.id, self.id))
247
-
248
- @readonly.setter
249
- def readonly(self, state):
250
- import warnings
251
- warnings.warn("`readonly` flag on dataset is deprecated, doing nothing.", DeprecationWarning)
252
-
253
- @property
254
- def labels_flat_dict(self):
255
- return self._get_ontology().labels_flat_dict
256
-
257
- @property
258
- def instance_map(self) -> dict:
259
- return self._get_ontology().instance_map
260
-
261
- @instance_map.setter
262
- def instance_map(self, value: dict):
263
- """
264
- instance mapping for creating instance mask
265
-
266
- :param value: dictionary {label: map_id}
267
- """
268
- if not isinstance(value, dict):
269
- raise ValueError('input must be a dictionary of {label_name: instance_id}')
270
- self._get_ontology().instance_map = value
271
-
272
- @property
273
- def ontology_ids(self):
274
- if self._ontology_ids is None:
275
- self._ontology_ids = list()
276
- if self.metadata is not None and 'system' in self.metadata and 'recipes' in self.metadata['system']:
277
- recipe_ids = self.get_recipe_ids()
278
- for rec_id in recipe_ids:
279
- recipe = self.recipes.get(recipe_id=rec_id)
280
- self._ontology_ids += recipe.ontology_ids
281
- return self._ontology_ids
282
-
283
- @_repositories.default
284
- def set_repositories(self):
285
- reps = namedtuple('repositories',
286
- field_names=['items', 'recipes', 'datasets', 'assignments', 'tasks', 'annotations',
287
- 'ontologies', 'features', 'settings', 'schema', 'collections'])
288
- if self._project is None:
289
- datasets = repositories.Datasets(client_api=self._client_api, project=self._project)
290
- else:
291
- datasets = self._project.datasets
292
-
293
- return reps(
294
- items=repositories.Items(client_api=self._client_api, dataset=self, datasets=datasets),
295
- recipes=repositories.Recipes(client_api=self._client_api, dataset=self),
296
- assignments=repositories.Assignments(project=self._project, client_api=self._client_api, dataset=self),
297
- tasks=repositories.Tasks(client_api=self._client_api, project=self._project, dataset=self),
298
- annotations=repositories.Annotations(client_api=self._client_api, dataset=self),
299
- datasets=datasets,
300
- ontologies=repositories.Ontologies(client_api=self._client_api, dataset=self),
301
- features=repositories.Features(client_api=self._client_api, project=self._project, dataset=self),
302
- settings=repositories.Settings(client_api=self._client_api, dataset=self),
303
- schema=repositories.Schema(client_api=self._client_api, dataset=self),
304
- collections=repositories.Collections(client_api=self._client_api, dataset=self)
305
- )
306
-
307
- @property
308
- def settings(self):
309
- assert isinstance(self._repositories.settings, repositories.Settings)
310
- return self._repositories.settings
311
-
312
- @property
313
- def items(self):
314
- assert isinstance(self._repositories.items, repositories.Items)
315
- return self._repositories.items
316
-
317
- @property
318
- def ontologies(self):
319
- assert isinstance(self._repositories.ontologies, repositories.Ontologies)
320
- return self._repositories.ontologies
321
-
322
- @property
323
- def recipes(self):
324
- assert isinstance(self._repositories.recipes, repositories.Recipes)
325
- return self._repositories.recipes
326
-
327
- @property
328
- def datasets(self):
329
- assert isinstance(self._repositories.datasets, repositories.Datasets)
330
- return self._repositories.datasets
331
-
332
- @property
333
- def assignments(self):
334
- assert isinstance(self._repositories.assignments, repositories.Assignments)
335
- return self._repositories.assignments
336
-
337
- @property
338
- def tasks(self):
339
- assert isinstance(self._repositories.tasks, repositories.Tasks)
340
- return self._repositories.tasks
341
-
342
- @property
343
- def annotations(self):
344
- assert isinstance(self._repositories.annotations, repositories.Annotations)
345
- return self._repositories.annotations
346
-
347
- @property
348
- def features(self):
349
- assert isinstance(self._repositories.features, repositories.Features)
350
- return self._repositories.features
351
-
352
- @property
353
- def collections(self):
354
- assert isinstance(self._repositories.collections, repositories.Collections)
355
- return self._repositories.collections
356
-
357
- @property
358
- def schema(self):
359
- assert isinstance(self._repositories.schema, repositories.Schema)
360
- return self._repositories.schema
361
-
362
- @property
363
- def project(self):
364
- if self._project is None:
365
- # get from cache
366
- project = self._client_api.state_io.get('project')
367
- if project is not None:
368
- # build entity from json
369
- p = entities.Project.from_json(_json=project, client_api=self._client_api)
370
- # check if dataset belongs to project
371
- if p.id in self.projects:
372
- self._project = p
373
- if self._project is None:
374
- self._project = repositories.Projects(client_api=self._client_api).get(project_id=self.projects[0],
375
- fetch=None)
376
- assert isinstance(self._project, entities.Project)
377
- return self._project
378
-
379
- @project.setter
380
- def project(self, project):
381
- if not isinstance(project, entities.Project):
382
- raise ValueError('Must input a valid Project entity')
383
- self._project = project
384
-
385
- @property
386
- def directory_tree(self):
387
- if self._directory_tree is None:
388
- self._directory_tree = self.project.datasets.directory_tree(dataset_id=self.id)
389
- assert isinstance(self._directory_tree, entities.DirectoryTree)
390
- return self._directory_tree
391
-
392
- def __copy__(self):
393
- return Dataset.from_json(_json=self.to_json(),
394
- project=self._project,
395
- client_api=self._client_api,
396
- is_fetched=self.is_fetched,
397
- datasets=self.datasets)
398
-
399
- def __get_local_path__(self):
400
- if self._project is not None:
401
- local_path = os.path.join(services.service_defaults.DATALOOP_PATH,
402
- 'projects',
403
- self.project.name,
404
- 'datasets',
405
- self.name)
406
- else:
407
- local_path = os.path.join(services.service_defaults.DATALOOP_PATH,
408
- 'datasets',
409
- '%s_%s' % (self.name, self.id))
410
- return local_path
411
-
412
- def _get_recipe(self):
413
- recipes = self.recipes.list()
414
- if len(recipes) > 0:
415
- return recipes[0]
416
- else:
417
- raise exceptions.PlatformException('404', 'Dataset {} has no recipe'.format(self.name))
418
-
419
- def _get_ontology(self):
420
- if self._ontology is None:
421
- ontologies = self._get_recipe().ontologies.list()
422
- if len(ontologies) > 0:
423
- self._ontology = ontologies[0]
424
- else:
425
- raise exceptions.PlatformException('404', 'Dataset {} has no ontology'.format(self.name))
426
- return self._ontology
427
-
428
- @staticmethod
429
- def serialize_labels(labels_dict):
430
- """
431
- Convert hex color format to rgb
432
-
433
- :param dict labels_dict: dict of labels
434
- :return: dict of converted labels
435
- """
436
- dataset_labels_dict = dict()
437
- for label, color in labels_dict.items():
438
- dataset_labels_dict[label] = '#%02x%02x%02x' % color
439
- return dataset_labels_dict
440
-
441
- def get_recipe_ids(self):
442
- """
443
- Get dataset recipe Ids
444
-
445
- :return: list of recipe ids
446
- :rtype: list
447
- """
448
- return self.metadata['system']['recipes']
449
-
450
- def switch_recipe(self, recipe_id=None, recipe=None):
451
- """
452
- Switch the recipe that linked to the dataset with the given one
453
-
454
- :param str recipe_id: recipe id
455
- :param dtlpy.entities.recipe.Recipe recipe: recipe entity
456
-
457
- **Example**:
458
-
459
- .. code-block:: python
460
-
461
- dataset.switch_recipe(recipe_id='recipe_id')
462
- """
463
- if recipe is None and recipe_id is None:
464
- raise exceptions.PlatformException('400', 'Must provide recipe or recipe_id')
465
- if recipe_id is None:
466
- if not isinstance(recipe, entities.Recipe):
467
- raise exceptions.PlatformException('400', 'Recipe must me entities.Recipe type')
468
- else:
469
- recipe_id = recipe.id
470
-
471
- # add recipe id to dataset metadata
472
- if 'system' not in self.metadata:
473
- self.metadata['system'] = dict()
474
- if 'recipes' not in self.metadata['system']:
475
- self.metadata['system']['recipes'] = list()
476
- self.metadata['system']['recipes'] = [recipe_id]
477
- self.update(system_metadata=True)
478
-
479
- def delete(self, sure=False, really=False):
480
- """
481
- Delete a dataset forever!
482
-
483
- **Prerequisites**: You must be an *owner* or *developer* to use this method.
484
-
485
- :param bool sure: are you sure you want to delete?
486
- :param bool really: really really?
487
- :return: True is success
488
- :rtype: bool
489
-
490
- **Example**:
491
-
492
- .. code-block:: python
493
-
494
- is_deleted = dataset.delete(sure=True, really=True)
495
- """
496
- return self.datasets.delete(dataset_id=self.id,
497
- sure=sure,
498
- really=really)
499
-
500
- def update(self, system_metadata=False):
501
- """
502
- Update dataset field
503
-
504
- **Prerequisites**: You must be an *owner* or *developer* to use this method.
505
-
506
- :param bool system_metadata: bool - True, if you want to change metadata system
507
- :return: Dataset object
508
- :rtype: dtlpy.entities.dataset.Dataset
509
-
510
- **Example**:
511
-
512
- .. code-block:: python
513
-
514
- dataset = dataset.update()
515
- """
516
- return self.datasets.update(dataset=self,
517
- system_metadata=system_metadata)
518
-
519
- def unlock(self):
520
- """
521
- Unlock dataset
522
-
523
- **Prerequisites**: You must be an *owner* or *developer* to use this method.
524
-
525
- :return: Dataset object
526
- :rtype: dtlpy.entities.dataset.Dataset
527
-
528
- **Example**:
529
-
530
- .. code-block:: python
531
-
532
- dataset = dataset.unlock()
533
- """
534
- return self.datasets.unlock(dataset=self)
535
-
536
- def set_readonly(self, state: bool):
537
- """
538
- Set dataset readonly mode
539
-
540
- **Prerequisites**: You must be in the role of an *owner* or *developer*.
541
-
542
- :param bool state: state
543
-
544
- **Example**:
545
-
546
- .. code-block:: python
547
-
548
- dataset.set_readonly(state=True)
549
- """
550
- import warnings
551
- warnings.warn("`readonly` flag on dataset is deprecated, doing nothing.", DeprecationWarning)
552
-
553
- def clone(self,
554
- clone_name=None,
555
- filters=None,
556
- with_items_annotations=True,
557
- with_metadata=True,
558
- with_task_annotations_status=True,
559
- dst_dataset_id=None,
560
- target_directory=None,
561
- ):
562
- """
563
- Clone dataset
564
-
565
- **Prerequisites**: You must be in the role of an *owner* or *developer*.
566
-
567
- :param str clone_name: new dataset name
568
- :param dtlpy.entities.filters.Filters filters: Filters entity or a query dict
569
- :param bool with_items_annotations: clone all item's annotations
570
- :param bool with_metadata: clone metadata
571
- :param bool with_task_annotations_status: clone task annotations status
572
- :param str dst_dataset_id: destination dataset id
573
- :param str target_directory: target directory
574
- :return: dataset object
575
- :rtype: dtlpy.entities.dataset.Dataset
576
-
577
- **Example**:
578
-
579
- .. code-block:: python
580
-
581
- dataset = dataset.clone(dataset_id='dataset_id',
582
- clone_name='dataset_clone_name',
583
- with_metadata=True,
584
- with_items_annotations=False,
585
- with_task_annotations_status=False)
586
- """
587
- return self.datasets.clone(dataset_id=self.id,
588
- filters=filters,
589
- clone_name=clone_name,
590
- with_metadata=with_metadata,
591
- with_items_annotations=with_items_annotations,
592
- with_task_annotations_status=with_task_annotations_status,
593
- dst_dataset_id=dst_dataset_id,
594
- target_directory=target_directory)
595
-
596
- def sync(self, wait=True):
597
- """
598
- Sync dataset with external storage
599
-
600
- **Prerequisites**: You must be in the role of an *owner* or *developer*.
601
-
602
- :param bool wait: wait for the command to finish
603
- :return: True if success
604
- :rtype: bool
605
-
606
- **Example**:
607
-
608
- .. code-block:: python
609
-
610
- success = dataset.sync()
611
- """
612
- return self.datasets.sync(dataset_id=self.id, wait=wait)
613
-
614
- def download_annotations(self,
615
- local_path=None,
616
- filters=None,
617
- annotation_options: ViewAnnotationOptions = None,
618
- annotation_filters=None,
619
- overwrite=False,
620
- thickness=1,
621
- with_text=False,
622
- remote_path=None,
623
- include_annotations_in_output=True,
624
- export_png_files=False,
625
- filter_output_annotations=False,
626
- alpha=1,
627
- export_version=ExportVersion.V1,
628
- dataset_lock=False,
629
- lock_timeout_sec=None,
630
- export_summary=False,
631
- ):
632
- """
633
- Download dataset by filters.
634
- Filtering the dataset for items and save them local
635
- Optional - also download annotation, mask, instance and image mask of the item
636
-
637
- **Prerequisites**: You must be in the role of an *owner* or *developer*.
638
-
639
- :param str local_path: local folder or filename to save to.
640
- :param dtlpy.entities.filters.Filters filters: Filters entity or a dictionary containing filters parameters
641
- :param list(dtlpy.entities.annotation.ViewAnnotationOptions) annotation_options: download annotations options: list(dl.ViewAnnotationOptions)
642
- :param dtlpy.entities.filters.Filters annotation_filters: Filters entity to filter annotations for download
643
- :param bool overwrite: optional - default = False
644
- :param bool dataset_lock: optional - default = False
645
- :param bool export_summary: optional - default = False
646
- :param int lock_timeout_sec: optional
647
- :param int thickness: optional - line thickness, if -1 annotation will be filled, default =1
648
- :param bool with_text: optional - add text to annotations, default = False
649
- :param str remote_path: DEPRECATED and ignored
650
- :param bool include_annotations_in_output: default - False , if export should contain annotations
651
- :param bool export_png_files: default - if True, semantic annotations should be exported as png files
652
- :param bool filter_output_annotations: default - False, given an export by filter - determine if to filter out annotations
653
- :param float alpha: opacity value [0 1], default 1
654
- :param str export_version: exported items will have original extension in filename, `V1` - no original extension in filenames
655
- :return: local_path of the directory where all the downloaded item
656
- :rtype: str
657
-
658
- **Example**:
659
-
660
- .. code-block:: python
661
-
662
- local_path = dataset.download_annotations(dataset='dataset_entity',
663
- local_path='local_path',
664
- annotation_options=[dl.ViewAnnotationOptions.JSON, dl.ViewAnnotationOptions.MASK],
665
- overwrite=False,
666
- thickness=1,
667
- with_text=False,
668
- alpha=1,
669
- dataset_lock=False,
670
- lock_timeout_sec=300,
671
- export_summary=False
672
- )
673
- """
674
-
675
- return self.datasets.download_annotations(
676
- dataset=self,
677
- local_path=local_path,
678
- overwrite=overwrite,
679
- filters=filters,
680
- annotation_options=annotation_options,
681
- annotation_filters=annotation_filters,
682
- thickness=thickness,
683
- with_text=with_text,
684
- remote_path=remote_path,
685
- include_annotations_in_output=include_annotations_in_output,
686
- export_png_files=export_png_files,
687
- filter_output_annotations=filter_output_annotations,
688
- alpha=alpha,
689
- export_version=export_version,
690
- dataset_lock=dataset_lock,
691
- lock_timeout_sec=lock_timeout_sec,
692
- export_summary=export_summary
693
- )
694
-
695
- def export(self,
696
- local_path=None,
697
- filters=None,
698
- annotation_filters=None,
699
- feature_vector_filters=None,
700
- include_feature_vectors: bool = False,
701
- include_annotations: bool = False,
702
- export_type: ExportType = ExportType.JSON,
703
- timeout: int = 0,
704
- dataset_lock: bool = False,
705
- lock_timeout_sec: int = None,
706
- export_summary: bool = False):
707
- """
708
- Export dataset items and annotations.
709
-
710
- **Prerequisites**: You must be an *owner* or *developer* to use this method.
711
-
712
- You must provide at least ONE of the following params: dataset, dataset_name, dataset_id.
713
-
714
- :param str local_path: The local path to save the exported dataset
715
- :param Union[dict, dtlpy.entities.filters.Filters] filters: Filters entity or a query dictionary
716
- :param dtlpy.entities.filters.Filters annotation_filters: Filters entity
717
- :param dtlpy.entities.filters.Filters feature_vector_filters: Filters entity
718
- :param bool include_feature_vectors: Include item feature vectors in the export
719
- :param bool include_annotations: Include item annotations in the export
720
- :param bool dataset_lock: Make dataset readonly during the export
721
- :param bool export_summary: Download dataset export summary
722
- :param int lock_timeout_sec: Timeout for locking the dataset during export in seconds
723
- :param entities.ExportType export_type: Type of export ('json' or 'zip')
724
- :param int timeout: Maximum time in seconds to wait for the export to complete
725
- :return: Exported item
726
- :rtype: dtlpy.entities.item.Item
727
-
728
- **Example**:
729
-
730
- .. code-block:: python
731
-
732
- export_item = dataset.export(filters=filters,
733
- include_feature_vectors=True,
734
- include_annotations=True,
735
- export_type=dl.ExportType.JSON)
736
- """
737
-
738
- return self.datasets.export(dataset=self,
739
- local_path=local_path,
740
- filters=filters,
741
- annotation_filters=annotation_filters,
742
- feature_vector_filters=feature_vector_filters,
743
- include_feature_vectors=include_feature_vectors,
744
- include_annotations=include_annotations,
745
- export_type=export_type,
746
- timeout=timeout,
747
- dataset_lock=dataset_lock,
748
- lock_timeout_sec=lock_timeout_sec,
749
- export_summary=export_summary)
750
-
751
- def upload_annotations(self,
752
- local_path,
753
- filters=None,
754
- clean=False,
755
- remote_root_path='/',
756
- export_version=ExportVersion.V1
757
- ):
758
- """
759
- Upload annotations to dataset.
760
-
761
- **Prerequisites**: You must have a dataset with items that are related to the annotations. The relationship between the dataset and annotations is shown in the name. You must be in the role of an *owner* or *developer*.
762
-
763
- :param str local_path: str - local folder where the annotations files is.
764
- :param dtlpy.entities.filters.Filters filters: Filters entity or a dictionary containing filters parameters
765
- :param bool clean: bool - if True it remove the old annotations
766
- :param str remote_root_path: str - the remote root path to match remote and local items
767
- :param str export_version: `V2` - exported items will have original extension in filename, `V1` - no original extension in filenames
768
-
769
- For example, if the item filepath is a/b/item and remote_root_path is /a the start folder will be b instead of a
770
-
771
- **Example**:
772
-
773
- .. code-block:: python
774
-
775
- dataset.upload_annotations(dataset='dataset_entity',
776
- local_path='local_path',
777
- clean=False,
778
- export_version=dl.ExportVersion.V1
779
- )
780
- """
781
-
782
- return self.datasets.upload_annotations(
783
- dataset=self,
784
- local_path=local_path,
785
- filters=filters,
786
- clean=clean,
787
- remote_root_path=remote_root_path,
788
- export_version=export_version
789
- )
790
-
791
- def checkout(self):
792
- """
793
- Checkout the dataset
794
-
795
- """
796
- self.datasets.checkout(dataset=self)
797
-
798
- def open_in_web(self):
799
- """
800
- Open the dataset in web platform
801
-
802
- """
803
- self._client_api._open_in_web(url=self.platform_url)
804
-
805
- def add_label(self, label_name, color=None, children=None, attributes=None, display_label=None, label=None,
806
- recipe_id=None, ontology_id=None, icon_path=None):
807
- """
808
- Add single label to dataset
809
-
810
- **Prerequisites**: You must have a dataset with items that are related to the annotations. The relationship between the dataset and annotations is shown in the name. You must be in the role of an *owner* or *developer*.
811
-
812
- :param str label_name: str - label name
813
- :param tuple color: RGB color of the annotation, e.g (255,0,0) or '#ff0000' for red
814
- :param children: children (sub labels). list of sub labels of this current label, each value is either dict or dl.Label
815
- :param list attributes: add attributes to the labels
816
- :param str display_label: name that display label
817
- :param dtlpy.entities.label.Label label: label object
818
- :param str recipe_id: optional recipe id
819
- :param str ontology_id: optional ontology id
820
- :param str icon_path: path to image to be display on label
821
- :return: label entity
822
- :rtype: dtlpy.entities.label.Label
823
-
824
- **Example**:
825
-
826
- .. code-block:: python
827
-
828
- dataset.add_label(label_name='person', color=(34, 6, 231), attributes=['big', 'small'])
829
- """
830
- # get recipe
831
- if recipe_id is None:
832
- recipe_id = self.get_recipe_ids()[0]
833
- recipe = self.recipes.get(recipe_id=recipe_id)
834
-
835
- # get ontology
836
- if ontology_id is None:
837
- ontology_id = recipe.ontology_ids[0]
838
- ontology = recipe.ontologies.get(ontology_id=ontology_id)
839
- # ontology._dataset = self
840
-
841
- # add label
842
- added_label = ontology.add_label(label_name=label_name,
843
- color=color,
844
- children=children,
845
- attributes=attributes,
846
- display_label=display_label,
847
- label=label,
848
- update_ontology=True,
849
- icon_path=icon_path)
850
-
851
- return added_label
852
-
853
- def add_labels(self, label_list, ontology_id=None, recipe_id=None):
854
- """
855
- Add labels to dataset
856
-
857
- **Prerequisites**: You must have a dataset with items that are related to the annotations. The relationship between the dataset and annotations is shown in the name. You must be in the role of an *owner* or *developer*.
858
-
859
- :param list label_list: a list of labels to add to the dataset's ontology. each value should be a dict, dl.Label or a string
860
- :param str ontology_id: optional ontology id
861
- :param str recipe_id: optional recipe id
862
- :return: label entities
863
-
864
- **Example**:
865
-
866
- .. code-block:: python
867
-
868
- dataset.add_labels(label_list=label_list)
869
- """
870
- # get recipe
871
- if recipe_id is None:
872
- recipe_id = self.get_recipe_ids()[0]
873
- recipe = self.recipes.get(recipe_id=recipe_id)
874
-
875
- # get ontology
876
- if ontology_id is None:
877
- ontology_id = recipe.ontology_ids[0]
878
- ontology = recipe.ontologies.get(ontology_id=ontology_id)
879
-
880
- # add labels to ontology
881
- added_labels = ontology.add_labels(label_list=label_list, update_ontology=True)
882
-
883
- return added_labels
884
-
885
- def update_label(self, label_name, color=None, children=None, attributes=None, display_label=None, label=None,
886
- recipe_id=None, ontology_id=None, upsert=False, icon_path=None):
887
- """
888
- Add single label to dataset
889
-
890
- **Prerequisites**: You must have a dataset with items that are related to the annotations. The relationship between the dataset and annotations is shown in the name. You must be in the role of an *owner* or *developer*.
891
-
892
- :param str label_name: str - label name
893
- :param tuple color: color
894
- :param children: children (sub labels)
895
- :param list attributes: add attributes to the labels
896
- :param str display_label: name that display label
897
- :param dtlpy.entities.label.Label label: label
898
- :param str recipe_id: optional recipe id
899
- :param str ontology_id: optional ontology id
900
- :param str icon_path: path to image to be display on label
901
-
902
- :return: label entity
903
- :rtype: dtlpy.entities.label.Label
904
-
905
- **Example**:
906
-
907
- .. code-block:: python
908
-
909
- dataset.update_label(label_name='person', color=(34, 6, 231), attributes=['big', 'small'])
910
- """
911
- # get recipe
912
-
913
- if recipe_id is None:
914
- recipe_id = self.get_recipe_ids()[0]
915
- recipe = self.recipes.get(recipe_id=recipe_id)
916
-
917
- # get ontology
918
- if ontology_id is None:
919
- ontology_id = recipe.ontology_ids[0]
920
- ontology = recipe.ontologies.get(ontology_id=ontology_id)
921
-
922
- # add label
923
- added_label = ontology.update_label(label_name=label_name,
924
- color=color,
925
- children=children,
926
- attributes=attributes,
927
- display_label=display_label,
928
- label=label,
929
- update_ontology=True,
930
- upsert=upsert,
931
- icon_path=icon_path)
932
-
933
- return added_label
934
-
935
- def update_labels(self, label_list, ontology_id=None, recipe_id=None, upsert=False):
936
- """
937
- Add labels to dataset
938
-
939
- **Prerequisites**: You must have a dataset with items that are related to the annotations. The relationship between the dataset and annotations is shown in the name. You must be in the role of an *owner* or *developer*.
940
-
941
- :param list label_list: label list
942
- :param str ontology_id: optional ontology id
943
- :param str recipe_id: optional recipe id
944
- :param bool upsert: if True will add in case it does not existing
945
-
946
- :return: label entities
947
- :rtype: dtlpy.entities.label.Label
948
-
949
- **Example**:
950
-
951
- .. code-block:: python
952
-
953
- dataset.update_labels(label_list=label_list)
954
- """
955
- # get recipe
956
- if recipe_id is None:
957
- recipe_id = self.get_recipe_ids()[0]
958
- recipe = self.recipes.get(recipe_id=recipe_id)
959
-
960
- # get ontology
961
- if ontology_id is None:
962
- ontology_id = recipe.ontology_ids[0]
963
- ontology = recipe.ontologies.get(ontology_id=ontology_id)
964
-
965
- # add labels to ontology
966
- added_labels = ontology.update_labels(label_list=label_list, update_ontology=True, upsert=upsert)
967
-
968
- return added_labels
969
-
970
- def download(
971
- self,
972
- filters=None,
973
- local_path=None,
974
- file_types=None,
975
- annotation_options: ViewAnnotationOptions = None,
976
- annotation_filters=None,
977
- overwrite=False,
978
- to_items_folder=True,
979
- thickness=1,
980
- with_text=False,
981
- without_relative_path=None,
982
- alpha=1,
983
- export_version=ExportVersion.V1,
984
- dataset_lock=False,
985
- lock_timeout_sec=None,
986
- export_summary=False,
987
- ):
988
- """
989
- Download dataset by filters.
990
- Filtering the dataset for items and save them local
991
- Optional - also download annotation, mask, instance and image mask of the item
992
-
993
- **Prerequisites**: You must be in the role of an *owner* or *developer*.
994
-
995
- :param dtlpy.entities.filters.Filters filters: Filters entity or a dictionary containing filters parameters
996
- :param str local_path: local folder or filename to save to.
997
- :param list file_types: a list of file type to download. e.g ['video/webm', 'video/mp4', 'image/jpeg', 'image/png']
998
- :param list annotation_options: type of download annotations: list(dl.ViewAnnotationOptions)
999
- :param dtlpy.entities.filters.Filters annotation_filters: Filters entity to filter annotations for download
1000
- :param bool overwrite: optional - default = False to overwrite the existing files
1001
- :param bool dataset_lock: optional - default = False to make dataset readonly during the download
1002
- :param bool export_summary: optional - default = False to get the symmary of the export
1003
- :param int lock_timeout_sec: optional - Set lock timeout for the export
1004
- :param bool to_items_folder: Create 'items' folder and download items to it
1005
- :param int thickness: optional - line thickness, if -1 annotation will be filled, default =1
1006
- :param bool with_text: optional - add text to annotations, default = False
1007
- :param bool without_relative_path: bool - download items without the relative path from platform
1008
- :param float alpha: opacity value [0 1], default 1
1009
- :param str export_version: `V2` - exported items will have original extension in filename, `V1` - no original extension in filenames
1010
- :return: `List` of local_path per each downloaded item
1011
-
1012
- **Example**:
1013
-
1014
- .. code-block:: python
1015
-
1016
- dataset.download(local_path='local_path',
1017
- annotation_options=[dl.ViewAnnotationOptions.JSON, dl.ViewAnnotationOptions.MASK],
1018
- overwrite=False,
1019
- thickness=1,
1020
- with_text=False,
1021
- alpha=1,
1022
- dataset_lock=False,
1023
- lock_timeout_sec=300,
1024
- export_summary=False
1025
- )
1026
- """
1027
- return self.items.download(filters=filters,
1028
- local_path=local_path,
1029
- file_types=file_types,
1030
- annotation_options=annotation_options,
1031
- annotation_filters=annotation_filters,
1032
- overwrite=overwrite,
1033
- to_items_folder=to_items_folder,
1034
- thickness=thickness,
1035
- with_text=with_text,
1036
- without_relative_path=without_relative_path,
1037
- alpha=alpha,
1038
- export_version=export_version,
1039
- dataset_lock=dataset_lock,
1040
- lock_timeout_sec=lock_timeout_sec,
1041
- export_summary=export_summary
1042
- )
1043
-
1044
- def download_folder(
1045
- self,
1046
- folder_path,
1047
- filters=None,
1048
- local_path=None,
1049
- file_types=None,
1050
- annotation_options: ViewAnnotationOptions = None,
1051
- annotation_filters=None,
1052
- overwrite=False,
1053
- to_items_folder=True,
1054
- thickness=1,
1055
- with_text=False,
1056
- without_relative_path=None,
1057
- alpha=1,
1058
- export_version=ExportVersion.V1,
1059
- dataset_lock=False,
1060
- lock_timeout_sec=None,
1061
- export_summary=False,
1062
- ):
1063
- """
1064
- Download dataset folder.
1065
- Optional - also download annotation, mask, instance and image mask of the item
1066
-
1067
- **Prerequisites**: You must be in the role of an *owner* or *developer*.
1068
-
1069
- :param str folder_path: the path of the folder that want to download
1070
- :param dtlpy.entities.filters.Filters filters: Filters entity or a dictionary containing filters parameters
1071
- :param str local_path: local folder or filename to save to.
1072
- :param list file_types: a list of file type to download. e.g ['video/webm', 'video/mp4', 'image/jpeg', 'image/png']
1073
- :param list annotation_options: type of download annotations: list(dl.ViewAnnotationOptions)
1074
- :param dtlpy.entities.filters.Filters annotation_filters: Filters entity to filter annotations for download
1075
- :param bool overwrite: optional - default = False to overwrite the existing files
1076
- :param bool dataset_lock: optional - default = False to make the dataset readonly during the download
1077
- :param bool export_summary: optional - default = False to get the symmary of the export
1078
- :param bool lock_timeout_sec: optional - Set lock timeout for the export
1079
- :param bool to_items_folder: Create 'items' folder and download items to it
1080
- :param int thickness: optional - line thickness, if -1 annotation will be filled, default =1
1081
- :param bool with_text: optional - add text to annotations, default = False
1082
- :param bool without_relative_path: bool - download items without the relative path from platform
1083
- :param float alpha: opacity value [0 1], default 1
1084
- :param str export_version: `V2` - exported items will have original extension in filename, `V1` - no original extension in filenames
1085
- :return: `List` of local_path per each downloaded item
1086
-
1087
- **Example**:
1088
-
1089
- .. code-block:: python
1090
-
1091
- dataset.download_folder(folder_path='folder_path'
1092
- local_path='local_path',
1093
- annotation_options=[dl.ViewAnnotationOptions.JSON, dl.ViewAnnotationOptions.MASK],
1094
- overwrite=False,
1095
- thickness=1,
1096
- with_text=False,
1097
- alpha=1,
1098
- save_locally=True,
1099
- dataset_lock=False
1100
- lock_timeout_sec=300,
1101
- export_summary=False
1102
- )
1103
- """
1104
- filters = self.datasets._bulid_folder_filter(folder_path=folder_path, filters=filters)
1105
- return self.items.download(filters=filters,
1106
- local_path=local_path,
1107
- file_types=file_types,
1108
- annotation_options=annotation_options,
1109
- annotation_filters=annotation_filters,
1110
- overwrite=overwrite,
1111
- to_items_folder=to_items_folder,
1112
- thickness=thickness,
1113
- with_text=with_text,
1114
- without_relative_path=without_relative_path,
1115
- alpha=alpha,
1116
- export_version=export_version,
1117
- dataset_lock=dataset_lock,
1118
- lock_timeout_sec=lock_timeout_sec,
1119
- export_summary=export_summary
1120
- )
1121
-
1122
- def delete_labels(self, label_names):
1123
- """
1124
- Delete labels from dataset's ontologies
1125
-
1126
- **Prerequisites**: You must be in the role of an *owner* or *developer*.
1127
-
1128
- :param label_names: label object/ label name / list of label objects / list of label names
1129
-
1130
- **Example**:
1131
-
1132
- .. code-block:: python
1133
-
1134
- dataset.delete_labels(label_names=['myLabel1', 'Mylabel2'])
1135
- """
1136
- for recipe in self.recipes.list():
1137
- for ontology in recipe.ontologies.list():
1138
- ontology.delete_labels(label_names=label_names)
1139
- self._labels = None
1140
-
1141
- def update_attributes(self,
1142
- title: str,
1143
- key: str,
1144
- attribute_type,
1145
- recipe_id: str = None,
1146
- ontology_id: str = None,
1147
- scope: list = None,
1148
- optional: bool = None,
1149
- values: list = None,
1150
- attribute_range=None):
1151
- """
1152
- ADD a new attribute or update if exist
1153
-
1154
- :param str ontology_id: ontology_id
1155
- :param str title: attribute title
1156
- :param str key: the key of the attribute must br unique
1157
- :param AttributesTypes attribute_type: dl.AttributesTypes your attribute type
1158
- :param list scope: list of the labels or * for all labels
1159
- :param bool optional: optional attribute
1160
- :param list values: list of the attribute values ( for checkbox and radio button)
1161
- :param dict or AttributesRange attribute_range: dl.AttributesRange object
1162
- :return: true in success
1163
- :rtype: bool
1164
-
1165
- **Example**:
1166
-
1167
- .. code-block:: python
1168
-
1169
- dataset.update_attributes(ontology_id='ontology_id',
1170
- key='1',
1171
- title='checkbox',
1172
- attribute_type=dl.AttributesTypes.CHECKBOX,
1173
- values=[1,2,3])
1174
- """
1175
- # get recipe
1176
- if recipe_id is None:
1177
- recipe_id = self.get_recipe_ids()[0]
1178
- recipe = self.recipes.get(recipe_id=recipe_id)
1179
-
1180
- # get ontology
1181
- if ontology_id is None:
1182
- ontology_id = recipe.ontology_ids[0]
1183
- ontology = recipe.ontologies.get(ontology_id=ontology_id)
1184
-
1185
- # add attribute to ontology
1186
- attribute = ontology.update_attributes(
1187
- title=title,
1188
- key=key,
1189
- attribute_type=attribute_type,
1190
- scope=scope,
1191
- optional=optional,
1192
- values=values,
1193
- attribute_range=attribute_range)
1194
-
1195
- return attribute
1196
-
1197
- def delete_attributes(self, keys: list,
1198
- recipe_id: str = None,
1199
- ontology_id: str = None):
1200
- """
1201
- Delete a bulk of attributes
1202
-
1203
- :param str recipe_id: recipe id
1204
- :param str ontology_id: ontology id
1205
- :param list keys: Keys of attributes to delete
1206
- :return: True if success
1207
- :rtype: bool
1208
- """
1209
-
1210
- # get recipe
1211
- if recipe_id is None:
1212
- recipe_id = self.get_recipe_ids()[0]
1213
- recipe = self.recipes.get(recipe_id=recipe_id)
1214
-
1215
- # get ontology
1216
- if ontology_id is None:
1217
- ontology_id = recipe.ontology_ids[0]
1218
- ontology = recipe.ontologies.get(ontology_id=ontology_id)
1219
- return ontology.delete_attributes(ontology_id=ontology.id, keys=keys)
1220
-
1221
- def split_ml_subsets(self,
1222
- items_query = None,
1223
- percentages: dict = None ):
1224
- """
1225
- Split dataset items into ML subsets.
1226
-
1227
- :param dl.Filters items_query: Filters object to select items.
1228
- :param dict percentages: {'train': x, 'validation': y, 'test': z}.
1229
- :return: True if the split operation was successful.
1230
- :rtype: bool
1231
- """
1232
- return self.datasets.split_ml_subsets(dataset_id=self.id,
1233
- items_query=items_query,
1234
- ml_split_list=percentages)
1235
-
1236
- def assign_subset_to_items(self, subset: str, items_query=None) -> bool:
1237
- """
1238
- Assign a specific ML subset (train/validation/test) to items defined by the given filters.
1239
- This will set the chosen subset to True and the others to None.
1240
-
1241
- :param dl.Filters items_query: Filters to select items
1242
- :param str subset: 'train', 'validation', or 'test'
1243
- :return: True if successful
1244
- :rtype: bool
1245
- """
1246
-
1247
- return self.datasets.bulk_update_ml_subset(dataset_id=self.id,
1248
- items_query=items_query,
1249
- subset=subset)
1250
-
1251
- def remove_subset_from_items(self, items_query= None,) -> bool:
1252
- """
1253
- Remove any ML subset assignment from items defined by the given filters.
1254
- This sets train, validation, and test tags to None.
1255
-
1256
- :param dl.Filters items_query: Filters to select items
1257
- :return: True if successful
1258
- :rtype: bool
1259
- """
1260
- return self.datasets.bulk_update_ml_subset(dataset_id=self.id,
1261
- items_query=items_query,
1262
- subset=None,
1263
- deleteTag=True)
1264
-
1265
- def get_items_missing_ml_subset(self, filters = None) -> list:
1266
- """
1267
- Get the list of item IDs that are missing ML subset assignment.
1268
- An item is considered missing ML subset if train, validation, and test tags are not True (all None).
1269
-
1270
- :param dl.Filters filters: optional filters to narrow down items. If None, will use a default filter for files.
1271
- :return: list of item IDs
1272
- :rtype: list
1273
- """
1274
- if filters is None:
1275
- filters = entities.Filters()
1276
- filters.add(field='metadata.system.tags.train', values=None)
1277
- filters.add(field='metadata.system.tags.validation', values=None)
1278
- filters.add(field='metadata.system.tags.test', values=None)
1279
- missing_ids = []
1280
- pages = self.items.list(filters=filters)
1281
- for page in pages:
1282
- for item in page:
1283
- # item that pass filters means no subsets assigned
1284
- missing_ids.append(item.id)
1285
- return missing_ids
1
+ from collections import namedtuple
2
+ import traceback
3
+ import logging
4
+ from enum import Enum
5
+
6
+ import attr
7
+ import os
8
+
9
+ from .. import repositories, entities, services, exceptions
10
+ from ..services.api_client import ApiClient
11
+ from .annotation import ViewAnnotationOptions, AnnotationType, ExportVersion
12
+
13
+ logger = logging.getLogger(name='dtlpy')
14
+
15
+
16
+ class IndexDriver(str, Enum):
17
+ V1 = "v1"
18
+ V2 = "v2"
19
+
20
+
21
+ class ExportType(str, Enum):
22
+ JSON = "json"
23
+ ZIP = "zip"
24
+
25
+
26
+ class ExpirationOptions:
27
+ """
28
+ ExpirationOptions object
29
+ """
30
+
31
+ def __init__(self, item_max_days: int = None):
32
+ """
33
+ :param item_max_days: int. items in dataset will be auto delete after this number id days
34
+ """
35
+ self.item_max_days = item_max_days
36
+
37
+ def to_json(self):
38
+ _json = dict()
39
+ if self.item_max_days is not None:
40
+ _json["itemMaxDays"] = self.item_max_days
41
+ return _json
42
+
43
+ @classmethod
44
+ def from_json(cls, _json: dict):
45
+ item_max_days = _json.get('itemMaxDays', None)
46
+ if item_max_days:
47
+ return cls(item_max_days=item_max_days)
48
+ return None
49
+
50
+
51
+ @attr.s
52
+ class Dataset(entities.BaseEntity):
53
+ """
54
+ Dataset object
55
+ """
56
+ # dataset information
57
+ id = attr.ib()
58
+ url = attr.ib()
59
+ name = attr.ib()
60
+ annotated = attr.ib(repr=False)
61
+ creator = attr.ib()
62
+ projects = attr.ib(repr=False)
63
+ items_count = attr.ib()
64
+ metadata = attr.ib(repr=False)
65
+ directoryTree = attr.ib(repr=False)
66
+ expiration_options = attr.ib()
67
+ index_driver = attr.ib()
68
+ enable_sync_with_cloned = attr.ib(repr=False)
69
+
70
+ # name change when to_json
71
+ created_at = attr.ib()
72
+ updated_at = attr.ib()
73
+ updated_by = attr.ib()
74
+ items_url = attr.ib(repr=False)
75
+ readable_type = attr.ib(repr=False)
76
+ access_level = attr.ib(repr=False)
77
+ driver = attr.ib(repr=False)
78
+ src_dataset = attr.ib(repr=False)
79
+ _readonly = attr.ib(repr=False)
80
+ annotations_count = attr.ib()
81
+
82
+ # api
83
+ _client_api = attr.ib(type=ApiClient, repr=False)
84
+
85
+ # entities
86
+ _project = attr.ib(default=None, repr=False)
87
+
88
+ # repositories
89
+ _datasets = attr.ib(repr=False, default=None)
90
+ _repositories = attr.ib(repr=False)
91
+
92
+ # defaults
93
+ _ontology_ids = attr.ib(default=None, repr=False)
94
+ _labels = attr.ib(default=None, repr=False)
95
+ _directory_tree = attr.ib(default=None, repr=False)
96
+ _recipe = attr.ib(default=None, repr=False)
97
+ _ontology = attr.ib(default=None, repr=False)
98
+
99
+ @property
100
+ def itemsCount(self):
101
+ return self.items_count
102
+
103
+ @staticmethod
104
+ def _protected_from_json(project: entities.Project,
105
+ _json: dict,
106
+ client_api: ApiClient,
107
+ datasets=None,
108
+ is_fetched=True):
109
+ """
110
+ Same as from_json but with try-except to catch if error
111
+
112
+ :param project: dataset's project
113
+ :param _json: _json response from host
114
+ :param client_api: ApiClient entity
115
+ :param datasets: Datasets repository
116
+ :param is_fetched: is Entity fetched from Platform
117
+ :return: Dataset object
118
+ """
119
+ try:
120
+ dataset = Dataset.from_json(project=project,
121
+ _json=_json,
122
+ client_api=client_api,
123
+ datasets=datasets,
124
+ is_fetched=is_fetched)
125
+ status = True
126
+ except Exception:
127
+ dataset = traceback.format_exc()
128
+ status = False
129
+ return status, dataset
130
+
131
+ @classmethod
132
+ def from_json(cls,
133
+ project: entities.Project,
134
+ _json: dict,
135
+ client_api: ApiClient,
136
+ datasets=None,
137
+ is_fetched=True):
138
+ """
139
+ Build a Dataset entity object from a json
140
+
141
+ :param project: dataset's project
142
+ :param dict _json: _json response from host
143
+ :param client_api: ApiClient entity
144
+ :param datasets: Datasets repository
145
+ :param bool is_fetched: is Entity fetched from Platform
146
+ :return: Dataset object
147
+ :rtype: dtlpy.entities.dataset.Dataset
148
+ """
149
+ projects = _json.get('projects', None)
150
+ if project is not None and projects is not None:
151
+ if project.id not in projects:
152
+ logger.warning('Dataset has been fetched from a project that is not in it projects list')
153
+ project = None
154
+
155
+ expiration_options = _json.get('expirationOptions', None)
156
+ if expiration_options:
157
+ expiration_options = ExpirationOptions.from_json(expiration_options)
158
+ inst = cls(metadata=_json.get('metadata', None),
159
+ directoryTree=_json.get('directoryTree', None),
160
+ readable_type=_json.get('readableType', None),
161
+ access_level=_json.get('accessLevel', None),
162
+ created_at=_json.get('createdAt', None),
163
+ updated_at=_json.get('updatedAt', None),
164
+ updated_by=_json.get('updatedBy', None),
165
+ annotations_count=_json.get("annotationsCount", None),
166
+ items_count=_json.get('itemsCount', None),
167
+ annotated=_json.get('annotated', None),
168
+ readonly=_json.get('readonly', None),
169
+ projects=projects,
170
+ creator=_json.get('creator', None),
171
+ items_url=_json.get('items', None),
172
+ driver=_json.get('driver', None),
173
+ name=_json.get('name', None),
174
+ url=_json.get('url', None),
175
+ id=_json.get('id', None),
176
+ datasets=datasets,
177
+ client_api=client_api,
178
+ project=project,
179
+ expiration_options=expiration_options,
180
+ index_driver=_json.get('indexDriver', None),
181
+ enable_sync_with_cloned=_json.get('enableSyncWithCloned', None),
182
+ src_dataset=_json.get('srcDataset', None))
183
+ inst.is_fetched = is_fetched
184
+ return inst
185
+
186
+ def to_json(self):
187
+ """
188
+ Returns platform _json format of object
189
+
190
+ :return: platform json format of object
191
+ :rtype: dict
192
+ """
193
+ _json = attr.asdict(self, filter=attr.filters.exclude(attr.fields(Dataset)._client_api,
194
+ attr.fields(Dataset)._project,
195
+ attr.fields(Dataset)._readonly,
196
+ attr.fields(Dataset)._datasets,
197
+ attr.fields(Dataset)._repositories,
198
+ attr.fields(Dataset)._ontology_ids,
199
+ attr.fields(Dataset)._labels,
200
+ attr.fields(Dataset)._recipe,
201
+ attr.fields(Dataset)._ontology,
202
+ attr.fields(Dataset)._directory_tree,
203
+ attr.fields(Dataset).access_level,
204
+ attr.fields(Dataset).readable_type,
205
+ attr.fields(Dataset).created_at,
206
+ attr.fields(Dataset).updated_at,
207
+ attr.fields(Dataset).updated_by,
208
+ attr.fields(Dataset).annotations_count,
209
+ attr.fields(Dataset).items_url,
210
+ attr.fields(Dataset).expiration_options,
211
+ attr.fields(Dataset).items_count,
212
+ attr.fields(Dataset).index_driver,
213
+ attr.fields(Dataset).enable_sync_with_cloned,
214
+ attr.fields(Dataset).src_dataset,
215
+ ))
216
+ _json.update({'items': self.items_url})
217
+ _json['readableType'] = self.readable_type
218
+ _json['createdAt'] = self.created_at
219
+ _json['updatedAt'] = self.updated_at
220
+ _json['updatedBy'] = self.updated_by
221
+ _json['annotationsCount'] = self.annotations_count
222
+ _json['accessLevel'] = self.access_level
223
+ _json['readonly'] = self._readonly
224
+ _json['itemsCount'] = self.items_count
225
+ _json['indexDriver'] = self.index_driver
226
+ if self.expiration_options and self.expiration_options.to_json():
227
+ _json['expirationOptions'] = self.expiration_options.to_json()
228
+ if self.enable_sync_with_cloned is not None:
229
+ _json['enableSyncWithCloned'] = self.enable_sync_with_cloned
230
+ if self.src_dataset is not None:
231
+ _json['srcDataset'] = self.src_dataset
232
+ return _json
233
+
234
+ @property
235
+ def labels(self):
236
+ if self._labels is None:
237
+ self._labels = self._get_ontology().labels
238
+ return self._labels
239
+
240
+ @property
241
+ def readonly(self):
242
+ return self._readonly
243
+
244
+ @property
245
+ def platform_url(self):
246
+ return self._client_api._get_resource_url("projects/{}/datasets/{}/items".format(self.project.id, self.id))
247
+
248
+ @readonly.setter
249
+ def readonly(self, state):
250
+ import warnings
251
+ warnings.warn("`readonly` flag on dataset is deprecated, doing nothing.", DeprecationWarning)
252
+
253
+ @property
254
+ def labels_flat_dict(self):
255
+ return self._get_ontology().labels_flat_dict
256
+
257
+ @property
258
+ def instance_map(self) -> dict:
259
+ return self._get_ontology().instance_map
260
+
261
+ @instance_map.setter
262
+ def instance_map(self, value: dict):
263
+ """
264
+ instance mapping for creating instance mask
265
+
266
+ :param value: dictionary {label: map_id}
267
+ """
268
+ if not isinstance(value, dict):
269
+ raise ValueError('input must be a dictionary of {label_name: instance_id}')
270
+ self._get_ontology().instance_map = value
271
+
272
+ @property
273
+ def ontology_ids(self):
274
+ if self._ontology_ids is None:
275
+ self._ontology_ids = list()
276
+ if self.metadata is not None and 'system' in self.metadata and 'recipes' in self.metadata['system']:
277
+ recipe_ids = self.get_recipe_ids()
278
+ for rec_id in recipe_ids:
279
+ recipe = self.recipes.get(recipe_id=rec_id)
280
+ self._ontology_ids += recipe.ontology_ids
281
+ return self._ontology_ids
282
+
283
+ @_repositories.default
284
+ def set_repositories(self):
285
+ reps = namedtuple('repositories',
286
+ field_names=['items', 'recipes', 'datasets', 'assignments', 'tasks', 'annotations',
287
+ 'ontologies', 'features', 'settings', 'schema', 'collections'])
288
+ if self._project is None:
289
+ datasets = repositories.Datasets(client_api=self._client_api, project=self._project)
290
+ else:
291
+ datasets = self._project.datasets
292
+
293
+ return reps(
294
+ items=repositories.Items(client_api=self._client_api, dataset=self, datasets=datasets),
295
+ recipes=repositories.Recipes(client_api=self._client_api, dataset=self),
296
+ assignments=repositories.Assignments(project=self._project, client_api=self._client_api, dataset=self),
297
+ tasks=repositories.Tasks(client_api=self._client_api, project=self._project, dataset=self),
298
+ annotations=repositories.Annotations(client_api=self._client_api, dataset=self),
299
+ datasets=datasets,
300
+ ontologies=repositories.Ontologies(client_api=self._client_api, dataset=self),
301
+ features=repositories.Features(client_api=self._client_api, project=self._project, dataset=self),
302
+ settings=repositories.Settings(client_api=self._client_api, dataset=self),
303
+ schema=repositories.Schema(client_api=self._client_api, dataset=self),
304
+ collections=repositories.Collections(client_api=self._client_api, dataset=self)
305
+ )
306
+
307
+ @property
308
+ def settings(self):
309
+ assert isinstance(self._repositories.settings, repositories.Settings)
310
+ return self._repositories.settings
311
+
312
+ @property
313
+ def items(self):
314
+ assert isinstance(self._repositories.items, repositories.Items)
315
+ return self._repositories.items
316
+
317
+ @property
318
+ def ontologies(self):
319
+ assert isinstance(self._repositories.ontologies, repositories.Ontologies)
320
+ return self._repositories.ontologies
321
+
322
+ @property
323
+ def recipes(self):
324
+ assert isinstance(self._repositories.recipes, repositories.Recipes)
325
+ return self._repositories.recipes
326
+
327
+ @property
328
+ def datasets(self):
329
+ assert isinstance(self._repositories.datasets, repositories.Datasets)
330
+ return self._repositories.datasets
331
+
332
+ @property
333
+ def assignments(self):
334
+ assert isinstance(self._repositories.assignments, repositories.Assignments)
335
+ return self._repositories.assignments
336
+
337
+ @property
338
+ def tasks(self):
339
+ assert isinstance(self._repositories.tasks, repositories.Tasks)
340
+ return self._repositories.tasks
341
+
342
+ @property
343
+ def annotations(self):
344
+ assert isinstance(self._repositories.annotations, repositories.Annotations)
345
+ return self._repositories.annotations
346
+
347
+ @property
348
+ def features(self):
349
+ assert isinstance(self._repositories.features, repositories.Features)
350
+ return self._repositories.features
351
+
352
+ @property
353
+ def collections(self):
354
+ assert isinstance(self._repositories.collections, repositories.Collections)
355
+ return self._repositories.collections
356
+
357
+ @property
358
+ def schema(self):
359
+ assert isinstance(self._repositories.schema, repositories.Schema)
360
+ return self._repositories.schema
361
+
362
+ @property
363
+ def project(self):
364
+ if self._project is None:
365
+ # get from cache
366
+ project = self._client_api.state_io.get('project')
367
+ if project is not None:
368
+ # build entity from json
369
+ p = entities.Project.from_json(_json=project, client_api=self._client_api)
370
+ # check if dataset belongs to project
371
+ if p.id in self.projects:
372
+ self._project = p
373
+ if self._project is None:
374
+ self._project = repositories.Projects(client_api=self._client_api).get(project_id=self.projects[0],
375
+ fetch=None)
376
+ assert isinstance(self._project, entities.Project)
377
+ return self._project
378
+
379
+ @project.setter
380
+ def project(self, project):
381
+ if not isinstance(project, entities.Project):
382
+ raise ValueError('Must input a valid Project entity')
383
+ self._project = project
384
+
385
+ @property
386
+ def directory_tree(self):
387
+ if self._directory_tree is None:
388
+ self._directory_tree = self.project.datasets.directory_tree(dataset_id=self.id)
389
+ assert isinstance(self._directory_tree, entities.DirectoryTree)
390
+ return self._directory_tree
391
+
392
+ def __copy__(self):
393
+ return Dataset.from_json(_json=self.to_json(),
394
+ project=self._project,
395
+ client_api=self._client_api,
396
+ is_fetched=self.is_fetched,
397
+ datasets=self.datasets)
398
+
399
+ def __get_local_path__(self):
400
+ if self._project is not None:
401
+ local_path = os.path.join(services.service_defaults.DATALOOP_PATH,
402
+ 'projects',
403
+ self.project.name,
404
+ 'datasets',
405
+ self.name)
406
+ else:
407
+ local_path = os.path.join(services.service_defaults.DATALOOP_PATH,
408
+ 'datasets',
409
+ '%s_%s' % (self.name, self.id))
410
+ return local_path
411
+
412
+ def _get_recipe(self):
413
+ recipes = self.recipes.list()
414
+ if len(recipes) > 0:
415
+ return recipes[0]
416
+ else:
417
+ raise exceptions.PlatformException('404', 'Dataset {} has no recipe'.format(self.name))
418
+
419
+ def _get_ontology(self):
420
+ if self._ontology is None:
421
+ ontologies = self._get_recipe().ontologies.list()
422
+ if len(ontologies) > 0:
423
+ self._ontology = ontologies[0]
424
+ else:
425
+ raise exceptions.PlatformException('404', 'Dataset {} has no ontology'.format(self.name))
426
+ return self._ontology
427
+
428
+ @staticmethod
429
+ def serialize_labels(labels_dict):
430
+ """
431
+ Convert hex color format to rgb
432
+
433
+ :param dict labels_dict: dict of labels
434
+ :return: dict of converted labels
435
+ """
436
+ dataset_labels_dict = dict()
437
+ for label, color in labels_dict.items():
438
+ dataset_labels_dict[label] = '#%02x%02x%02x' % color
439
+ return dataset_labels_dict
440
+
441
+ def get_recipe_ids(self):
442
+ """
443
+ Get dataset recipe Ids
444
+
445
+ :return: list of recipe ids
446
+ :rtype: list
447
+ """
448
+ return self.metadata['system']['recipes']
449
+
450
+ def switch_recipe(self, recipe_id=None, recipe=None):
451
+ """
452
+ Switch the recipe that linked to the dataset with the given one
453
+
454
+ :param str recipe_id: recipe id
455
+ :param dtlpy.entities.recipe.Recipe recipe: recipe entity
456
+
457
+ **Example**:
458
+
459
+ .. code-block:: python
460
+
461
+ dataset.switch_recipe(recipe_id='recipe_id')
462
+ """
463
+ if recipe is None and recipe_id is None:
464
+ raise exceptions.PlatformException('400', 'Must provide recipe or recipe_id')
465
+ if recipe_id is None:
466
+ if not isinstance(recipe, entities.Recipe):
467
+ raise exceptions.PlatformException('400', 'Recipe must me entities.Recipe type')
468
+ else:
469
+ recipe_id = recipe.id
470
+
471
+ # add recipe id to dataset metadata
472
+ if 'system' not in self.metadata:
473
+ self.metadata['system'] = dict()
474
+ if 'recipes' not in self.metadata['system']:
475
+ self.metadata['system']['recipes'] = list()
476
+ self.metadata['system']['recipes'] = [recipe_id]
477
+ self.update(system_metadata=True)
478
+
479
+ def delete(self, sure=False, really=False):
480
+ """
481
+ Delete a dataset forever!
482
+
483
+ **Prerequisites**: You must be an *owner* or *developer* to use this method.
484
+
485
+ :param bool sure: are you sure you want to delete?
486
+ :param bool really: really really?
487
+ :return: True is success
488
+ :rtype: bool
489
+
490
+ **Example**:
491
+
492
+ .. code-block:: python
493
+
494
+ is_deleted = dataset.delete(sure=True, really=True)
495
+ """
496
+ return self.datasets.delete(dataset_id=self.id,
497
+ sure=sure,
498
+ really=really)
499
+
500
+ def update(self, system_metadata=False):
501
+ """
502
+ Update dataset field
503
+
504
+ **Prerequisites**: You must be an *owner* or *developer* to use this method.
505
+
506
+ :param bool system_metadata: bool - True, if you want to change metadata system
507
+ :return: Dataset object
508
+ :rtype: dtlpy.entities.dataset.Dataset
509
+
510
+ **Example**:
511
+
512
+ .. code-block:: python
513
+
514
+ dataset = dataset.update()
515
+ """
516
+ return self.datasets.update(dataset=self,
517
+ system_metadata=system_metadata)
518
+
519
+ def unlock(self):
520
+ """
521
+ Unlock dataset
522
+
523
+ **Prerequisites**: You must be an *owner* or *developer* to use this method.
524
+
525
+ :return: Dataset object
526
+ :rtype: dtlpy.entities.dataset.Dataset
527
+
528
+ **Example**:
529
+
530
+ .. code-block:: python
531
+
532
+ dataset = dataset.unlock()
533
+ """
534
+ return self.datasets.unlock(dataset=self)
535
+
536
+ def set_readonly(self, state: bool):
537
+ """
538
+ Set dataset readonly mode
539
+
540
+ **Prerequisites**: You must be in the role of an *owner* or *developer*.
541
+
542
+ :param bool state: state
543
+
544
+ **Example**:
545
+
546
+ .. code-block:: python
547
+
548
+ dataset.set_readonly(state=True)
549
+ """
550
+ import warnings
551
+ warnings.warn("`readonly` flag on dataset is deprecated, doing nothing.", DeprecationWarning)
552
+
553
+ def clone(self,
554
+ clone_name=None,
555
+ filters=None,
556
+ with_items_annotations=True,
557
+ with_metadata=True,
558
+ with_task_annotations_status=True,
559
+ dst_dataset_id=None,
560
+ target_directory=None,
561
+ ):
562
+ """
563
+ Clone dataset
564
+
565
+ **Prerequisites**: You must be in the role of an *owner* or *developer*.
566
+
567
+ :param str clone_name: new dataset name
568
+ :param dtlpy.entities.filters.Filters filters: Filters entity or a query dict
569
+ :param bool with_items_annotations: clone all item's annotations
570
+ :param bool with_metadata: clone metadata
571
+ :param bool with_task_annotations_status: clone task annotations status
572
+ :param str dst_dataset_id: destination dataset id
573
+ :param str target_directory: target directory
574
+ :return: dataset object
575
+ :rtype: dtlpy.entities.dataset.Dataset
576
+
577
+ **Example**:
578
+
579
+ .. code-block:: python
580
+
581
+ dataset = dataset.clone(dataset_id='dataset_id',
582
+ clone_name='dataset_clone_name',
583
+ with_metadata=True,
584
+ with_items_annotations=False,
585
+ with_task_annotations_status=False)
586
+ """
587
+ return self.datasets.clone(dataset_id=self.id,
588
+ filters=filters,
589
+ clone_name=clone_name,
590
+ with_metadata=with_metadata,
591
+ with_items_annotations=with_items_annotations,
592
+ with_task_annotations_status=with_task_annotations_status,
593
+ dst_dataset_id=dst_dataset_id,
594
+ target_directory=target_directory)
595
+
596
+ def sync(self, wait=True):
597
+ """
598
+ Sync dataset with external storage
599
+
600
+ **Prerequisites**: You must be in the role of an *owner* or *developer*.
601
+
602
+ :param bool wait: wait for the command to finish
603
+ :return: True if success
604
+ :rtype: bool
605
+
606
+ **Example**:
607
+
608
+ .. code-block:: python
609
+
610
+ success = dataset.sync()
611
+ """
612
+ return self.datasets.sync(dataset_id=self.id, wait=wait)
613
+
614
+ def download_annotations(self,
615
+ local_path=None,
616
+ filters=None,
617
+ annotation_options: ViewAnnotationOptions = None,
618
+ annotation_filters=None,
619
+ overwrite=False,
620
+ thickness=1,
621
+ with_text=False,
622
+ remote_path=None,
623
+ include_annotations_in_output=True,
624
+ export_png_files=False,
625
+ filter_output_annotations=False,
626
+ alpha=1,
627
+ export_version=ExportVersion.V1,
628
+ dataset_lock=False,
629
+ lock_timeout_sec=None,
630
+ export_summary=False,
631
+ ):
632
+ """
633
+ Download dataset by filters.
634
+ Filtering the dataset for items and save them local
635
+ Optional - also download annotation, mask, instance and image mask of the item
636
+
637
+ **Prerequisites**: You must be in the role of an *owner* or *developer*.
638
+
639
+ :param str local_path: local folder or filename to save to.
640
+ :param dtlpy.entities.filters.Filters filters: Filters entity or a dictionary containing filters parameters
641
+ :param list(dtlpy.entities.annotation.ViewAnnotationOptions) annotation_options: download annotations options: list(dl.ViewAnnotationOptions)
642
+ :param dtlpy.entities.filters.Filters annotation_filters: Filters entity to filter annotations for download
643
+ :param bool overwrite: optional - default = False
644
+ :param bool dataset_lock: optional - default = False
645
+ :param bool export_summary: optional - default = False
646
+ :param int lock_timeout_sec: optional
647
+ :param int thickness: optional - line thickness, if -1 annotation will be filled, default =1
648
+ :param bool with_text: optional - add text to annotations, default = False
649
+ :param str remote_path: DEPRECATED and ignored
650
+ :param bool include_annotations_in_output: default - False , if export should contain annotations
651
+ :param bool export_png_files: default - if True, semantic annotations should be exported as png files
652
+ :param bool filter_output_annotations: default - False, given an export by filter - determine if to filter out annotations
653
+ :param float alpha: opacity value [0 1], default 1
654
+ :param str export_version: exported items will have original extension in filename, `V1` - no original extension in filenames
655
+ :return: local_path of the directory where all the downloaded item
656
+ :rtype: str
657
+
658
+ **Example**:
659
+
660
+ .. code-block:: python
661
+
662
+ local_path = dataset.download_annotations(dataset='dataset_entity',
663
+ local_path='local_path',
664
+ annotation_options=[dl.ViewAnnotationOptions.JSON, dl.ViewAnnotationOptions.MASK],
665
+ overwrite=False,
666
+ thickness=1,
667
+ with_text=False,
668
+ alpha=1,
669
+ dataset_lock=False,
670
+ lock_timeout_sec=300,
671
+ export_summary=False
672
+ )
673
+ """
674
+
675
+ return self.datasets.download_annotations(
676
+ dataset=self,
677
+ local_path=local_path,
678
+ overwrite=overwrite,
679
+ filters=filters,
680
+ annotation_options=annotation_options,
681
+ annotation_filters=annotation_filters,
682
+ thickness=thickness,
683
+ with_text=with_text,
684
+ remote_path=remote_path,
685
+ include_annotations_in_output=include_annotations_in_output,
686
+ export_png_files=export_png_files,
687
+ filter_output_annotations=filter_output_annotations,
688
+ alpha=alpha,
689
+ export_version=export_version,
690
+ dataset_lock=dataset_lock,
691
+ lock_timeout_sec=lock_timeout_sec,
692
+ export_summary=export_summary
693
+ )
694
+
695
+ def export(self,
696
+ local_path=None,
697
+ filters=None,
698
+ annotation_filters=None,
699
+ feature_vector_filters=None,
700
+ include_feature_vectors: bool = False,
701
+ include_annotations: bool = False,
702
+ export_type: ExportType = ExportType.JSON,
703
+ timeout: int = 0,
704
+ dataset_lock: bool = False,
705
+ lock_timeout_sec: int = None,
706
+ export_summary: bool = False):
707
+ """
708
+ Export dataset items and annotations.
709
+
710
+ **Prerequisites**: You must be an *owner* or *developer* to use this method.
711
+
712
+ You must provide at least ONE of the following params: dataset, dataset_name, dataset_id.
713
+
714
+ :param str local_path: The local path to save the exported dataset
715
+ :param Union[dict, dtlpy.entities.filters.Filters] filters: Filters entity or a query dictionary
716
+ :param dtlpy.entities.filters.Filters annotation_filters: Filters entity
717
+ :param dtlpy.entities.filters.Filters feature_vector_filters: Filters entity
718
+ :param bool include_feature_vectors: Include item feature vectors in the export
719
+ :param bool include_annotations: Include item annotations in the export
720
+ :param bool dataset_lock: Make dataset readonly during the export
721
+ :param bool export_summary: Download dataset export summary
722
+ :param int lock_timeout_sec: Timeout for locking the dataset during export in seconds
723
+ :param entities.ExportType export_type: Type of export ('json' or 'zip')
724
+ :param int timeout: Maximum time in seconds to wait for the export to complete
725
+ :return: Exported item
726
+ :rtype: dtlpy.entities.item.Item
727
+
728
+ **Example**:
729
+
730
+ .. code-block:: python
731
+
732
+ export_item = dataset.export(filters=filters,
733
+ include_feature_vectors=True,
734
+ include_annotations=True,
735
+ export_type=dl.ExportType.JSON)
736
+ """
737
+
738
+ return self.datasets.export(dataset=self,
739
+ local_path=local_path,
740
+ filters=filters,
741
+ annotation_filters=annotation_filters,
742
+ feature_vector_filters=feature_vector_filters,
743
+ include_feature_vectors=include_feature_vectors,
744
+ include_annotations=include_annotations,
745
+ export_type=export_type,
746
+ timeout=timeout,
747
+ dataset_lock=dataset_lock,
748
+ lock_timeout_sec=lock_timeout_sec,
749
+ export_summary=export_summary)
750
+
751
+ def upload_annotations(self,
752
+ local_path,
753
+ filters=None,
754
+ clean=False,
755
+ remote_root_path='/',
756
+ export_version=ExportVersion.V1
757
+ ):
758
+ """
759
+ Upload annotations to dataset.
760
+
761
+ **Prerequisites**: You must have a dataset with items that are related to the annotations. The relationship between the dataset and annotations is shown in the name. You must be in the role of an *owner* or *developer*.
762
+
763
+ :param str local_path: str - local folder where the annotations files is.
764
+ :param dtlpy.entities.filters.Filters filters: Filters entity or a dictionary containing filters parameters
765
+ :param bool clean: bool - if True it remove the old annotations
766
+ :param str remote_root_path: str - the remote root path to match remote and local items
767
+ :param str export_version: `V2` - exported items will have original extension in filename, `V1` - no original extension in filenames
768
+
769
+ For example, if the item filepath is a/b/item and remote_root_path is /a the start folder will be b instead of a
770
+
771
+ **Example**:
772
+
773
+ .. code-block:: python
774
+
775
+ dataset.upload_annotations(dataset='dataset_entity',
776
+ local_path='local_path',
777
+ clean=False,
778
+ export_version=dl.ExportVersion.V1
779
+ )
780
+ """
781
+
782
+ return self.datasets.upload_annotations(
783
+ dataset=self,
784
+ local_path=local_path,
785
+ filters=filters,
786
+ clean=clean,
787
+ remote_root_path=remote_root_path,
788
+ export_version=export_version
789
+ )
790
+
791
+ def checkout(self):
792
+ """
793
+ Checkout the dataset
794
+
795
+ """
796
+ self.datasets.checkout(dataset=self)
797
+
798
+ def open_in_web(self):
799
+ """
800
+ Open the dataset in web platform
801
+
802
+ """
803
+ self._client_api._open_in_web(url=self.platform_url)
804
+
805
+ def add_label(self, label_name, color=None, children=None, attributes=None, display_label=None, label=None,
806
+ recipe_id=None, ontology_id=None, icon_path=None):
807
+ """
808
+ Add single label to dataset
809
+
810
+ **Prerequisites**: You must have a dataset with items that are related to the annotations. The relationship between the dataset and annotations is shown in the name. You must be in the role of an *owner* or *developer*.
811
+
812
+ :param str label_name: str - label name
813
+ :param tuple color: RGB color of the annotation, e.g (255,0,0) or '#ff0000' for red
814
+ :param children: children (sub labels). list of sub labels of this current label, each value is either dict or dl.Label
815
+ :param list attributes: add attributes to the labels
816
+ :param str display_label: name that display label
817
+ :param dtlpy.entities.label.Label label: label object
818
+ :param str recipe_id: optional recipe id
819
+ :param str ontology_id: optional ontology id
820
+ :param str icon_path: path to image to be display on label
821
+ :return: label entity
822
+ :rtype: dtlpy.entities.label.Label
823
+
824
+ **Example**:
825
+
826
+ .. code-block:: python
827
+
828
+ dataset.add_label(label_name='person', color=(34, 6, 231), attributes=['big', 'small'])
829
+ """
830
+ # get recipe
831
+ if recipe_id is None:
832
+ recipe_id = self.get_recipe_ids()[0]
833
+ recipe = self.recipes.get(recipe_id=recipe_id)
834
+
835
+ # get ontology
836
+ if ontology_id is None:
837
+ ontology_id = recipe.ontology_ids[0]
838
+ ontology = recipe.ontologies.get(ontology_id=ontology_id)
839
+ # ontology._dataset = self
840
+
841
+ # add label
842
+ added_label = ontology.add_label(label_name=label_name,
843
+ color=color,
844
+ children=children,
845
+ attributes=attributes,
846
+ display_label=display_label,
847
+ label=label,
848
+ update_ontology=True,
849
+ icon_path=icon_path)
850
+
851
+ return added_label
852
+
853
+ def add_labels(self, label_list, ontology_id=None, recipe_id=None):
854
+ """
855
+ Add labels to dataset
856
+
857
+ **Prerequisites**: You must have a dataset with items that are related to the annotations. The relationship between the dataset and annotations is shown in the name. You must be in the role of an *owner* or *developer*.
858
+
859
+ :param list label_list: a list of labels to add to the dataset's ontology. each value should be a dict, dl.Label or a string
860
+ :param str ontology_id: optional ontology id
861
+ :param str recipe_id: optional recipe id
862
+ :return: label entities
863
+
864
+ **Example**:
865
+
866
+ .. code-block:: python
867
+
868
+ dataset.add_labels(label_list=label_list)
869
+ """
870
+ # get recipe
871
+ if recipe_id is None:
872
+ recipe_id = self.get_recipe_ids()[0]
873
+ recipe = self.recipes.get(recipe_id=recipe_id)
874
+
875
+ # get ontology
876
+ if ontology_id is None:
877
+ ontology_id = recipe.ontology_ids[0]
878
+ ontology = recipe.ontologies.get(ontology_id=ontology_id)
879
+
880
+ # add labels to ontology
881
+ added_labels = ontology.add_labels(label_list=label_list, update_ontology=True)
882
+
883
+ return added_labels
884
+
885
+ def update_label(self, label_name, color=None, children=None, attributes=None, display_label=None, label=None,
886
+ recipe_id=None, ontology_id=None, upsert=False, icon_path=None):
887
+ """
888
+ Add single label to dataset
889
+
890
+ **Prerequisites**: You must have a dataset with items that are related to the annotations. The relationship between the dataset and annotations is shown in the name. You must be in the role of an *owner* or *developer*.
891
+
892
+ :param str label_name: str - label name
893
+ :param tuple color: color
894
+ :param children: children (sub labels)
895
+ :param list attributes: add attributes to the labels
896
+ :param str display_label: name that display label
897
+ :param dtlpy.entities.label.Label label: label
898
+ :param str recipe_id: optional recipe id
899
+ :param str ontology_id: optional ontology id
900
+ :param str icon_path: path to image to be display on label
901
+
902
+ :return: label entity
903
+ :rtype: dtlpy.entities.label.Label
904
+
905
+ **Example**:
906
+
907
+ .. code-block:: python
908
+
909
+ dataset.update_label(label_name='person', color=(34, 6, 231), attributes=['big', 'small'])
910
+ """
911
+ # get recipe
912
+
913
+ if recipe_id is None:
914
+ recipe_id = self.get_recipe_ids()[0]
915
+ recipe = self.recipes.get(recipe_id=recipe_id)
916
+
917
+ # get ontology
918
+ if ontology_id is None:
919
+ ontology_id = recipe.ontology_ids[0]
920
+ ontology = recipe.ontologies.get(ontology_id=ontology_id)
921
+
922
+ # add label
923
+ added_label = ontology.update_label(label_name=label_name,
924
+ color=color,
925
+ children=children,
926
+ attributes=attributes,
927
+ display_label=display_label,
928
+ label=label,
929
+ update_ontology=True,
930
+ upsert=upsert,
931
+ icon_path=icon_path)
932
+
933
+ return added_label
934
+
935
+ def update_labels(self, label_list, ontology_id=None, recipe_id=None, upsert=False):
936
+ """
937
+ Add labels to dataset
938
+
939
+ **Prerequisites**: You must have a dataset with items that are related to the annotations. The relationship between the dataset and annotations is shown in the name. You must be in the role of an *owner* or *developer*.
940
+
941
+ :param list label_list: label list
942
+ :param str ontology_id: optional ontology id
943
+ :param str recipe_id: optional recipe id
944
+ :param bool upsert: if True will add in case it does not existing
945
+
946
+ :return: label entities
947
+ :rtype: dtlpy.entities.label.Label
948
+
949
+ **Example**:
950
+
951
+ .. code-block:: python
952
+
953
+ dataset.update_labels(label_list=label_list)
954
+ """
955
+ # get recipe
956
+ if recipe_id is None:
957
+ recipe_id = self.get_recipe_ids()[0]
958
+ recipe = self.recipes.get(recipe_id=recipe_id)
959
+
960
+ # get ontology
961
+ if ontology_id is None:
962
+ ontology_id = recipe.ontology_ids[0]
963
+ ontology = recipe.ontologies.get(ontology_id=ontology_id)
964
+
965
+ # add labels to ontology
966
+ added_labels = ontology.update_labels(label_list=label_list, update_ontology=True, upsert=upsert)
967
+
968
+ return added_labels
969
+
970
+ def download(
971
+ self,
972
+ filters=None,
973
+ local_path=None,
974
+ file_types=None,
975
+ annotation_options: ViewAnnotationOptions = None,
976
+ annotation_filters=None,
977
+ overwrite=False,
978
+ to_items_folder=True,
979
+ thickness=1,
980
+ with_text=False,
981
+ without_relative_path=None,
982
+ alpha=1,
983
+ export_version=ExportVersion.V1,
984
+ dataset_lock=False,
985
+ lock_timeout_sec=None,
986
+ export_summary=False,
987
+ ):
988
+ """
989
+ Download dataset by filters.
990
+ Filtering the dataset for items and save them local
991
+ Optional - also download annotation, mask, instance and image mask of the item
992
+
993
+ **Prerequisites**: You must be in the role of an *owner* or *developer*.
994
+
995
+ :param dtlpy.entities.filters.Filters filters: Filters entity or a dictionary containing filters parameters
996
+ :param str local_path: local folder or filename to save to.
997
+ :param list file_types: a list of file type to download. e.g ['video/webm', 'video/mp4', 'image/jpeg', 'image/png']
998
+ :param list annotation_options: type of download annotations: list(dl.ViewAnnotationOptions)
999
+ :param dtlpy.entities.filters.Filters annotation_filters: Filters entity to filter annotations for download
1000
+ :param bool overwrite: optional - default = False to overwrite the existing files
1001
+ :param bool dataset_lock: optional - default = False to make dataset readonly during the download
1002
+ :param bool export_summary: optional - default = False to get the symmary of the export
1003
+ :param int lock_timeout_sec: optional - Set lock timeout for the export
1004
+ :param bool to_items_folder: Create 'items' folder and download items to it
1005
+ :param int thickness: optional - line thickness, if -1 annotation will be filled, default =1
1006
+ :param bool with_text: optional - add text to annotations, default = False
1007
+ :param bool without_relative_path: bool - download items without the relative path from platform
1008
+ :param float alpha: opacity value [0 1], default 1
1009
+ :param str export_version: `V2` - exported items will have original extension in filename, `V1` - no original extension in filenames
1010
+ :return: `List` of local_path per each downloaded item
1011
+
1012
+ **Example**:
1013
+
1014
+ .. code-block:: python
1015
+
1016
+ dataset.download(local_path='local_path',
1017
+ annotation_options=[dl.ViewAnnotationOptions.JSON, dl.ViewAnnotationOptions.MASK],
1018
+ overwrite=False,
1019
+ thickness=1,
1020
+ with_text=False,
1021
+ alpha=1,
1022
+ dataset_lock=False,
1023
+ lock_timeout_sec=300,
1024
+ export_summary=False
1025
+ )
1026
+ """
1027
+ return self.items.download(filters=filters,
1028
+ local_path=local_path,
1029
+ file_types=file_types,
1030
+ annotation_options=annotation_options,
1031
+ annotation_filters=annotation_filters,
1032
+ overwrite=overwrite,
1033
+ to_items_folder=to_items_folder,
1034
+ thickness=thickness,
1035
+ with_text=with_text,
1036
+ without_relative_path=without_relative_path,
1037
+ alpha=alpha,
1038
+ export_version=export_version,
1039
+ dataset_lock=dataset_lock,
1040
+ lock_timeout_sec=lock_timeout_sec,
1041
+ export_summary=export_summary
1042
+ )
1043
+
1044
+ def download_folder(
1045
+ self,
1046
+ folder_path,
1047
+ filters=None,
1048
+ local_path=None,
1049
+ file_types=None,
1050
+ annotation_options: ViewAnnotationOptions = None,
1051
+ annotation_filters=None,
1052
+ overwrite=False,
1053
+ to_items_folder=True,
1054
+ thickness=1,
1055
+ with_text=False,
1056
+ without_relative_path=None,
1057
+ alpha=1,
1058
+ export_version=ExportVersion.V1,
1059
+ dataset_lock=False,
1060
+ lock_timeout_sec=None,
1061
+ export_summary=False,
1062
+ ):
1063
+ """
1064
+ Download dataset folder.
1065
+ Optional - also download annotation, mask, instance and image mask of the item
1066
+
1067
+ **Prerequisites**: You must be in the role of an *owner* or *developer*.
1068
+
1069
+ :param str folder_path: the path of the folder that want to download
1070
+ :param dtlpy.entities.filters.Filters filters: Filters entity or a dictionary containing filters parameters
1071
+ :param str local_path: local folder or filename to save to.
1072
+ :param list file_types: a list of file type to download. e.g ['video/webm', 'video/mp4', 'image/jpeg', 'image/png']
1073
+ :param list annotation_options: type of download annotations: list(dl.ViewAnnotationOptions)
1074
+ :param dtlpy.entities.filters.Filters annotation_filters: Filters entity to filter annotations for download
1075
+ :param bool overwrite: optional - default = False to overwrite the existing files
1076
+ :param bool dataset_lock: optional - default = False to make the dataset readonly during the download
1077
+ :param bool export_summary: optional - default = False to get the symmary of the export
1078
+ :param bool lock_timeout_sec: optional - Set lock timeout for the export
1079
+ :param bool to_items_folder: Create 'items' folder and download items to it
1080
+ :param int thickness: optional - line thickness, if -1 annotation will be filled, default =1
1081
+ :param bool with_text: optional - add text to annotations, default = False
1082
+ :param bool without_relative_path: bool - download items without the relative path from platform
1083
+ :param float alpha: opacity value [0 1], default 1
1084
+ :param str export_version: `V2` - exported items will have original extension in filename, `V1` - no original extension in filenames
1085
+ :return: `List` of local_path per each downloaded item
1086
+
1087
+ **Example**:
1088
+
1089
+ .. code-block:: python
1090
+
1091
+ dataset.download_folder(folder_path='folder_path'
1092
+ local_path='local_path',
1093
+ annotation_options=[dl.ViewAnnotationOptions.JSON, dl.ViewAnnotationOptions.MASK],
1094
+ overwrite=False,
1095
+ thickness=1,
1096
+ with_text=False,
1097
+ alpha=1,
1098
+ save_locally=True,
1099
+ dataset_lock=False
1100
+ lock_timeout_sec=300,
1101
+ export_summary=False
1102
+ )
1103
+ """
1104
+ filters = self.datasets._bulid_folder_filter(folder_path=folder_path, filters=filters)
1105
+ return self.items.download(filters=filters,
1106
+ local_path=local_path,
1107
+ file_types=file_types,
1108
+ annotation_options=annotation_options,
1109
+ annotation_filters=annotation_filters,
1110
+ overwrite=overwrite,
1111
+ to_items_folder=to_items_folder,
1112
+ thickness=thickness,
1113
+ with_text=with_text,
1114
+ without_relative_path=without_relative_path,
1115
+ alpha=alpha,
1116
+ export_version=export_version,
1117
+ dataset_lock=dataset_lock,
1118
+ lock_timeout_sec=lock_timeout_sec,
1119
+ export_summary=export_summary
1120
+ )
1121
+
1122
+ def delete_labels(self, label_names):
1123
+ """
1124
+ Delete labels from dataset's ontologies
1125
+
1126
+ **Prerequisites**: You must be in the role of an *owner* or *developer*.
1127
+
1128
+ :param label_names: label object/ label name / list of label objects / list of label names
1129
+
1130
+ **Example**:
1131
+
1132
+ .. code-block:: python
1133
+
1134
+ dataset.delete_labels(label_names=['myLabel1', 'Mylabel2'])
1135
+ """
1136
+ for recipe in self.recipes.list():
1137
+ for ontology in recipe.ontologies.list():
1138
+ ontology.delete_labels(label_names=label_names)
1139
+ self._labels = None
1140
+
1141
+ def update_attributes(self,
1142
+ title: str,
1143
+ key: str,
1144
+ attribute_type,
1145
+ recipe_id: str = None,
1146
+ ontology_id: str = None,
1147
+ scope: list = None,
1148
+ optional: bool = None,
1149
+ values: list = None,
1150
+ attribute_range=None):
1151
+ """
1152
+ ADD a new attribute or update if exist
1153
+
1154
+ :param str ontology_id: ontology_id
1155
+ :param str title: attribute title
1156
+ :param str key: the key of the attribute must br unique
1157
+ :param AttributesTypes attribute_type: dl.AttributesTypes your attribute type
1158
+ :param list scope: list of the labels or * for all labels
1159
+ :param bool optional: optional attribute
1160
+ :param list values: list of the attribute values ( for checkbox and radio button)
1161
+ :param dict or AttributesRange attribute_range: dl.AttributesRange object
1162
+ :return: true in success
1163
+ :rtype: bool
1164
+
1165
+ **Example**:
1166
+
1167
+ .. code-block:: python
1168
+
1169
+ dataset.update_attributes(ontology_id='ontology_id',
1170
+ key='1',
1171
+ title='checkbox',
1172
+ attribute_type=dl.AttributesTypes.CHECKBOX,
1173
+ values=[1,2,3])
1174
+ """
1175
+ # get recipe
1176
+ if recipe_id is None:
1177
+ recipe_id = self.get_recipe_ids()[0]
1178
+ recipe = self.recipes.get(recipe_id=recipe_id)
1179
+
1180
+ # get ontology
1181
+ if ontology_id is None:
1182
+ ontology_id = recipe.ontology_ids[0]
1183
+ ontology = recipe.ontologies.get(ontology_id=ontology_id)
1184
+
1185
+ # add attribute to ontology
1186
+ attribute = ontology.update_attributes(
1187
+ title=title,
1188
+ key=key,
1189
+ attribute_type=attribute_type,
1190
+ scope=scope,
1191
+ optional=optional,
1192
+ values=values,
1193
+ attribute_range=attribute_range)
1194
+
1195
+ return attribute
1196
+
1197
+ def delete_attributes(self, keys: list,
1198
+ recipe_id: str = None,
1199
+ ontology_id: str = None):
1200
+ """
1201
+ Delete a bulk of attributes
1202
+
1203
+ :param str recipe_id: recipe id
1204
+ :param str ontology_id: ontology id
1205
+ :param list keys: Keys of attributes to delete
1206
+ :return: True if success
1207
+ :rtype: bool
1208
+ """
1209
+
1210
+ # get recipe
1211
+ if recipe_id is None:
1212
+ recipe_id = self.get_recipe_ids()[0]
1213
+ recipe = self.recipes.get(recipe_id=recipe_id)
1214
+
1215
+ # get ontology
1216
+ if ontology_id is None:
1217
+ ontology_id = recipe.ontology_ids[0]
1218
+ ontology = recipe.ontologies.get(ontology_id=ontology_id)
1219
+ return ontology.delete_attributes(ontology_id=ontology.id, keys=keys)
1220
+
1221
+ def split_ml_subsets(self,
1222
+ items_query = None,
1223
+ percentages: dict = None ):
1224
+ """
1225
+ Split dataset items into ML subsets.
1226
+
1227
+ :param dl.Filters items_query: Filters object to select items.
1228
+ :param dict percentages: {'train': x, 'validation': y, 'test': z}.
1229
+ :return: True if the split operation was successful.
1230
+ :rtype: bool
1231
+ """
1232
+ return self.datasets.split_ml_subsets(dataset_id=self.id,
1233
+ items_query=items_query,
1234
+ ml_split_list=percentages)
1235
+
1236
+ def assign_subset_to_items(self, subset: str, items_query=None) -> bool:
1237
+ """
1238
+ Assign a specific ML subset (train/validation/test) to items defined by the given filters.
1239
+ This will set the chosen subset to True and the others to None.
1240
+
1241
+ :param dl.Filters items_query: Filters to select items
1242
+ :param str subset: 'train', 'validation', or 'test'
1243
+ :return: True if successful
1244
+ :rtype: bool
1245
+ """
1246
+
1247
+ return self.datasets.bulk_update_ml_subset(dataset_id=self.id,
1248
+ items_query=items_query,
1249
+ subset=subset)
1250
+
1251
+ def remove_subset_from_items(self, items_query= None,) -> bool:
1252
+ """
1253
+ Remove any ML subset assignment from items defined by the given filters.
1254
+ This sets train, validation, and test tags to None.
1255
+
1256
+ :param dl.Filters items_query: Filters to select items
1257
+ :return: True if successful
1258
+ :rtype: bool
1259
+ """
1260
+ return self.datasets.bulk_update_ml_subset(dataset_id=self.id,
1261
+ items_query=items_query,
1262
+ subset=None,
1263
+ deleteTag=True)
1264
+
1265
+ def get_items_missing_ml_subset(self, filters = None) -> list:
1266
+ """
1267
+ Get the list of item IDs that are missing ML subset assignment.
1268
+ An item is considered missing ML subset if train, validation, and test tags are not True (all None).
1269
+
1270
+ :param dl.Filters filters: optional filters to narrow down items. If None, will use a default filter for files.
1271
+ :return: list of item IDs
1272
+ :rtype: list
1273
+ """
1274
+ if filters is None:
1275
+ filters = entities.Filters()
1276
+ filters.add(field='metadata.system.tags.train', values=None)
1277
+ filters.add(field='metadata.system.tags.validation', values=None)
1278
+ filters.add(field='metadata.system.tags.test', values=None)
1279
+ missing_ids = []
1280
+ pages = self.items.list(filters=filters)
1281
+ for page in pages:
1282
+ for item in page:
1283
+ # item that pass filters means no subsets assigned
1284
+ missing_ids.append(item.id)
1285
+ return missing_ids