dtlpy 1.113.10__py3-none-any.whl → 1.114.13__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (243) hide show
  1. dtlpy/__init__.py +488 -488
  2. dtlpy/__version__.py +1 -1
  3. dtlpy/assets/__init__.py +26 -26
  4. dtlpy/assets/__pycache__/__init__.cpython-38.pyc +0 -0
  5. dtlpy/assets/code_server/config.yaml +2 -2
  6. dtlpy/assets/code_server/installation.sh +24 -24
  7. dtlpy/assets/code_server/launch.json +13 -13
  8. dtlpy/assets/code_server/settings.json +2 -2
  9. dtlpy/assets/main.py +53 -53
  10. dtlpy/assets/main_partial.py +18 -18
  11. dtlpy/assets/mock.json +11 -11
  12. dtlpy/assets/model_adapter.py +83 -83
  13. dtlpy/assets/package.json +61 -61
  14. dtlpy/assets/package_catalog.json +29 -29
  15. dtlpy/assets/package_gitignore +307 -307
  16. dtlpy/assets/service_runners/__init__.py +33 -33
  17. dtlpy/assets/service_runners/converter.py +96 -96
  18. dtlpy/assets/service_runners/multi_method.py +49 -49
  19. dtlpy/assets/service_runners/multi_method_annotation.py +54 -54
  20. dtlpy/assets/service_runners/multi_method_dataset.py +55 -55
  21. dtlpy/assets/service_runners/multi_method_item.py +52 -52
  22. dtlpy/assets/service_runners/multi_method_json.py +52 -52
  23. dtlpy/assets/service_runners/single_method.py +37 -37
  24. dtlpy/assets/service_runners/single_method_annotation.py +43 -43
  25. dtlpy/assets/service_runners/single_method_dataset.py +43 -43
  26. dtlpy/assets/service_runners/single_method_item.py +41 -41
  27. dtlpy/assets/service_runners/single_method_json.py +42 -42
  28. dtlpy/assets/service_runners/single_method_multi_input.py +45 -45
  29. dtlpy/assets/voc_annotation_template.xml +23 -23
  30. dtlpy/caches/base_cache.py +32 -32
  31. dtlpy/caches/cache.py +473 -473
  32. dtlpy/caches/dl_cache.py +201 -201
  33. dtlpy/caches/filesystem_cache.py +89 -89
  34. dtlpy/caches/redis_cache.py +84 -84
  35. dtlpy/dlp/__init__.py +20 -20
  36. dtlpy/dlp/cli_utilities.py +367 -367
  37. dtlpy/dlp/command_executor.py +764 -764
  38. dtlpy/dlp/dlp +1 -1
  39. dtlpy/dlp/dlp.bat +1 -1
  40. dtlpy/dlp/dlp.py +128 -128
  41. dtlpy/dlp/parser.py +651 -651
  42. dtlpy/entities/__init__.py +83 -83
  43. dtlpy/entities/analytic.py +311 -311
  44. dtlpy/entities/annotation.py +1879 -1879
  45. dtlpy/entities/annotation_collection.py +699 -699
  46. dtlpy/entities/annotation_definitions/__init__.py +20 -20
  47. dtlpy/entities/annotation_definitions/base_annotation_definition.py +100 -100
  48. dtlpy/entities/annotation_definitions/box.py +195 -195
  49. dtlpy/entities/annotation_definitions/classification.py +67 -67
  50. dtlpy/entities/annotation_definitions/comparison.py +72 -72
  51. dtlpy/entities/annotation_definitions/cube.py +204 -204
  52. dtlpy/entities/annotation_definitions/cube_3d.py +149 -149
  53. dtlpy/entities/annotation_definitions/description.py +32 -32
  54. dtlpy/entities/annotation_definitions/ellipse.py +124 -124
  55. dtlpy/entities/annotation_definitions/free_text.py +62 -62
  56. dtlpy/entities/annotation_definitions/gis.py +69 -69
  57. dtlpy/entities/annotation_definitions/note.py +139 -139
  58. dtlpy/entities/annotation_definitions/point.py +117 -117
  59. dtlpy/entities/annotation_definitions/polygon.py +182 -182
  60. dtlpy/entities/annotation_definitions/polyline.py +111 -111
  61. dtlpy/entities/annotation_definitions/pose.py +92 -92
  62. dtlpy/entities/annotation_definitions/ref_image.py +86 -86
  63. dtlpy/entities/annotation_definitions/segmentation.py +240 -240
  64. dtlpy/entities/annotation_definitions/subtitle.py +34 -34
  65. dtlpy/entities/annotation_definitions/text.py +85 -85
  66. dtlpy/entities/annotation_definitions/undefined_annotation.py +74 -74
  67. dtlpy/entities/app.py +220 -220
  68. dtlpy/entities/app_module.py +107 -107
  69. dtlpy/entities/artifact.py +174 -174
  70. dtlpy/entities/assignment.py +399 -399
  71. dtlpy/entities/base_entity.py +214 -214
  72. dtlpy/entities/bot.py +113 -113
  73. dtlpy/entities/codebase.py +296 -296
  74. dtlpy/entities/collection.py +38 -38
  75. dtlpy/entities/command.py +169 -169
  76. dtlpy/entities/compute.py +442 -442
  77. dtlpy/entities/dataset.py +1285 -1285
  78. dtlpy/entities/directory_tree.py +44 -44
  79. dtlpy/entities/dpk.py +470 -470
  80. dtlpy/entities/driver.py +222 -222
  81. dtlpy/entities/execution.py +397 -397
  82. dtlpy/entities/feature.py +124 -124
  83. dtlpy/entities/feature_set.py +145 -145
  84. dtlpy/entities/filters.py +641 -641
  85. dtlpy/entities/gis_item.py +107 -107
  86. dtlpy/entities/integration.py +184 -184
  87. dtlpy/entities/item.py +953 -953
  88. dtlpy/entities/label.py +123 -123
  89. dtlpy/entities/links.py +85 -85
  90. dtlpy/entities/message.py +175 -175
  91. dtlpy/entities/model.py +694 -691
  92. dtlpy/entities/node.py +1005 -1005
  93. dtlpy/entities/ontology.py +803 -803
  94. dtlpy/entities/organization.py +287 -287
  95. dtlpy/entities/package.py +657 -657
  96. dtlpy/entities/package_defaults.py +5 -5
  97. dtlpy/entities/package_function.py +185 -185
  98. dtlpy/entities/package_module.py +113 -113
  99. dtlpy/entities/package_slot.py +118 -118
  100. dtlpy/entities/paged_entities.py +290 -267
  101. dtlpy/entities/pipeline.py +593 -593
  102. dtlpy/entities/pipeline_execution.py +279 -279
  103. dtlpy/entities/project.py +394 -394
  104. dtlpy/entities/prompt_item.py +499 -499
  105. dtlpy/entities/recipe.py +301 -301
  106. dtlpy/entities/reflect_dict.py +102 -102
  107. dtlpy/entities/resource_execution.py +138 -138
  108. dtlpy/entities/service.py +958 -958
  109. dtlpy/entities/service_driver.py +117 -117
  110. dtlpy/entities/setting.py +294 -294
  111. dtlpy/entities/task.py +491 -491
  112. dtlpy/entities/time_series.py +143 -143
  113. dtlpy/entities/trigger.py +426 -426
  114. dtlpy/entities/user.py +118 -118
  115. dtlpy/entities/webhook.py +124 -124
  116. dtlpy/examples/__init__.py +19 -19
  117. dtlpy/examples/add_labels.py +135 -135
  118. dtlpy/examples/add_metadata_to_item.py +21 -21
  119. dtlpy/examples/annotate_items_using_model.py +65 -65
  120. dtlpy/examples/annotate_video_using_model_and_tracker.py +75 -75
  121. dtlpy/examples/annotations_convert_to_voc.py +9 -9
  122. dtlpy/examples/annotations_convert_to_yolo.py +9 -9
  123. dtlpy/examples/convert_annotation_types.py +51 -51
  124. dtlpy/examples/converter.py +143 -143
  125. dtlpy/examples/copy_annotations.py +22 -22
  126. dtlpy/examples/copy_folder.py +31 -31
  127. dtlpy/examples/create_annotations.py +51 -51
  128. dtlpy/examples/create_video_annotations.py +83 -83
  129. dtlpy/examples/delete_annotations.py +26 -26
  130. dtlpy/examples/filters.py +113 -113
  131. dtlpy/examples/move_item.py +23 -23
  132. dtlpy/examples/play_video_annotation.py +13 -13
  133. dtlpy/examples/show_item_and_mask.py +53 -53
  134. dtlpy/examples/triggers.py +49 -49
  135. dtlpy/examples/upload_batch_of_items.py +20 -20
  136. dtlpy/examples/upload_items_and_custom_format_annotations.py +55 -55
  137. dtlpy/examples/upload_items_with_modalities.py +43 -43
  138. dtlpy/examples/upload_segmentation_annotations_from_mask_image.py +44 -44
  139. dtlpy/examples/upload_yolo_format_annotations.py +70 -70
  140. dtlpy/exceptions.py +125 -125
  141. dtlpy/miscellaneous/__init__.py +20 -20
  142. dtlpy/miscellaneous/dict_differ.py +95 -95
  143. dtlpy/miscellaneous/git_utils.py +217 -217
  144. dtlpy/miscellaneous/json_utils.py +14 -14
  145. dtlpy/miscellaneous/list_print.py +105 -105
  146. dtlpy/miscellaneous/zipping.py +130 -130
  147. dtlpy/ml/__init__.py +20 -20
  148. dtlpy/ml/base_feature_extractor_adapter.py +27 -27
  149. dtlpy/ml/base_model_adapter.py +945 -940
  150. dtlpy/ml/metrics.py +461 -461
  151. dtlpy/ml/predictions_utils.py +274 -274
  152. dtlpy/ml/summary_writer.py +57 -57
  153. dtlpy/ml/train_utils.py +60 -60
  154. dtlpy/new_instance.py +252 -252
  155. dtlpy/repositories/__init__.py +56 -56
  156. dtlpy/repositories/analytics.py +85 -85
  157. dtlpy/repositories/annotations.py +916 -916
  158. dtlpy/repositories/apps.py +383 -383
  159. dtlpy/repositories/artifacts.py +452 -452
  160. dtlpy/repositories/assignments.py +599 -599
  161. dtlpy/repositories/bots.py +213 -213
  162. dtlpy/repositories/codebases.py +559 -559
  163. dtlpy/repositories/collections.py +332 -348
  164. dtlpy/repositories/commands.py +158 -158
  165. dtlpy/repositories/compositions.py +61 -61
  166. dtlpy/repositories/computes.py +434 -406
  167. dtlpy/repositories/datasets.py +1291 -1291
  168. dtlpy/repositories/downloader.py +895 -895
  169. dtlpy/repositories/dpks.py +433 -433
  170. dtlpy/repositories/drivers.py +266 -266
  171. dtlpy/repositories/executions.py +817 -817
  172. dtlpy/repositories/feature_sets.py +226 -226
  173. dtlpy/repositories/features.py +238 -238
  174. dtlpy/repositories/integrations.py +484 -484
  175. dtlpy/repositories/items.py +909 -915
  176. dtlpy/repositories/messages.py +94 -94
  177. dtlpy/repositories/models.py +877 -867
  178. dtlpy/repositories/nodes.py +80 -80
  179. dtlpy/repositories/ontologies.py +511 -511
  180. dtlpy/repositories/organizations.py +525 -525
  181. dtlpy/repositories/packages.py +1941 -1941
  182. dtlpy/repositories/pipeline_executions.py +448 -448
  183. dtlpy/repositories/pipelines.py +642 -642
  184. dtlpy/repositories/projects.py +539 -539
  185. dtlpy/repositories/recipes.py +399 -399
  186. dtlpy/repositories/resource_executions.py +137 -137
  187. dtlpy/repositories/schema.py +120 -120
  188. dtlpy/repositories/service_drivers.py +213 -213
  189. dtlpy/repositories/services.py +1704 -1704
  190. dtlpy/repositories/settings.py +339 -339
  191. dtlpy/repositories/tasks.py +1124 -1124
  192. dtlpy/repositories/times_series.py +278 -278
  193. dtlpy/repositories/triggers.py +536 -536
  194. dtlpy/repositories/upload_element.py +257 -257
  195. dtlpy/repositories/uploader.py +651 -651
  196. dtlpy/repositories/webhooks.py +249 -249
  197. dtlpy/services/__init__.py +22 -22
  198. dtlpy/services/aihttp_retry.py +131 -131
  199. dtlpy/services/api_client.py +1782 -1782
  200. dtlpy/services/api_reference.py +40 -40
  201. dtlpy/services/async_utils.py +133 -133
  202. dtlpy/services/calls_counter.py +44 -44
  203. dtlpy/services/check_sdk.py +68 -68
  204. dtlpy/services/cookie.py +115 -115
  205. dtlpy/services/create_logger.py +156 -156
  206. dtlpy/services/events.py +84 -84
  207. dtlpy/services/logins.py +235 -235
  208. dtlpy/services/reporter.py +256 -256
  209. dtlpy/services/service_defaults.py +91 -91
  210. dtlpy/utilities/__init__.py +20 -20
  211. dtlpy/utilities/annotations/__init__.py +16 -16
  212. dtlpy/utilities/annotations/annotation_converters.py +269 -269
  213. dtlpy/utilities/base_package_runner.py +264 -264
  214. dtlpy/utilities/converter.py +1650 -1650
  215. dtlpy/utilities/dataset_generators/__init__.py +1 -1
  216. dtlpy/utilities/dataset_generators/dataset_generator.py +670 -670
  217. dtlpy/utilities/dataset_generators/dataset_generator_tensorflow.py +23 -23
  218. dtlpy/utilities/dataset_generators/dataset_generator_torch.py +21 -21
  219. dtlpy/utilities/local_development/__init__.py +1 -1
  220. dtlpy/utilities/local_development/local_session.py +179 -179
  221. dtlpy/utilities/reports/__init__.py +2 -2
  222. dtlpy/utilities/reports/figures.py +343 -343
  223. dtlpy/utilities/reports/report.py +71 -71
  224. dtlpy/utilities/videos/__init__.py +17 -17
  225. dtlpy/utilities/videos/video_player.py +598 -598
  226. dtlpy/utilities/videos/videos.py +470 -470
  227. {dtlpy-1.113.10.data → dtlpy-1.114.13.data}/scripts/dlp +1 -1
  228. dtlpy-1.114.13.data/scripts/dlp.bat +2 -0
  229. {dtlpy-1.113.10.data → dtlpy-1.114.13.data}/scripts/dlp.py +128 -128
  230. {dtlpy-1.113.10.dist-info → dtlpy-1.114.13.dist-info}/LICENSE +200 -200
  231. {dtlpy-1.113.10.dist-info → dtlpy-1.114.13.dist-info}/METADATA +172 -172
  232. dtlpy-1.114.13.dist-info/RECORD +240 -0
  233. {dtlpy-1.113.10.dist-info → dtlpy-1.114.13.dist-info}/WHEEL +1 -1
  234. tests/features/environment.py +551 -550
  235. dtlpy-1.113.10.data/scripts/dlp.bat +0 -2
  236. dtlpy-1.113.10.dist-info/RECORD +0 -244
  237. tests/assets/__init__.py +0 -0
  238. tests/assets/models_flow/__init__.py +0 -0
  239. tests/assets/models_flow/failedmain.py +0 -52
  240. tests/assets/models_flow/main.py +0 -62
  241. tests/assets/models_flow/main_model.py +0 -54
  242. {dtlpy-1.113.10.dist-info → dtlpy-1.114.13.dist-info}/entry_points.txt +0 -0
  243. {dtlpy-1.113.10.dist-info → dtlpy-1.114.13.dist-info}/top_level.txt +0 -0
@@ -1,915 +1,909 @@
1
- import logging
2
-
3
- from .. import entities, exceptions, repositories, miscellaneous, _api_reference
4
- from ..services.api_client import ApiClient
5
-
6
- logger = logging.getLogger(name='dtlpy')
7
-
8
-
9
- class Items:
10
- """
11
- Items Repository
12
-
13
- The Items class allows you to manage items in your datasets.
14
- For information on actions related to items see https://developers.dataloop.ai/tutorials/data_management/upload_and_manage_items/chapter/
15
- """
16
-
17
- def __init__(self,
18
- client_api: ApiClient,
19
- datasets: repositories.Datasets = None,
20
- dataset: entities.Dataset = None,
21
- dataset_id=None,
22
- items_entity=None,
23
- project=None):
24
- self._client_api = client_api
25
- self._dataset = dataset
26
- self._dataset_id = dataset_id
27
- self._datasets = datasets
28
- self._project = project
29
- # set items entity to represent the item (Item, Codebase, Artifact etc...)
30
- if items_entity is None:
31
- self.items_entity = entities.Item
32
- if self._dataset_id is None and self._dataset is not None:
33
- self._dataset_id = self._dataset.id
34
-
35
- ############
36
- # entities #
37
- ############
38
- @property
39
- def dataset(self) -> entities.Dataset:
40
- if self._dataset is None:
41
- if self._dataset_id is None:
42
- raise exceptions.PlatformException(
43
- error='400',
44
- message='Cannot perform action WITHOUT Dataset entity in Items repository. Please set a dataset')
45
- self._dataset = self.datasets.get(dataset_id=self._dataset_id, fetch=None)
46
- assert isinstance(self._dataset, entities.Dataset)
47
- return self._dataset
48
-
49
- @dataset.setter
50
- def dataset(self, dataset: entities.Dataset):
51
- if not isinstance(dataset, entities.Dataset):
52
- raise ValueError('Must input a valid Dataset entity')
53
- self._dataset = dataset
54
-
55
- @property
56
- def project(self) -> entities.Project:
57
- if self._project is None:
58
- raise exceptions.PlatformException(
59
- error='400',
60
- message='Cannot perform action WITHOUT Project entity in Items repository. Please set a project')
61
- assert isinstance(self._dataset, entities.Dataset)
62
- return self._project
63
-
64
- @project.setter
65
- def project(self, project: entities.Project):
66
- if not isinstance(project, entities.Project):
67
- raise ValueError('Must input a valid Dataset entity')
68
- self._project = project
69
-
70
- ################
71
- # repositories #
72
- ################
73
- @property
74
- def datasets(self) -> repositories.Datasets:
75
- if self._datasets is None:
76
- self._datasets = repositories.Datasets(client_api=self._client_api)
77
- assert isinstance(self._datasets, repositories.Datasets)
78
- return self._datasets
79
-
80
- ###########
81
- # methods #
82
- ###########
83
-
84
- def set_items_entity(self, entity):
85
- """
86
- Set the item entity type to `Artifact <https://dataloop.ai/docs/auto-annotation-service?#uploading-model-weights-as-artifacts>`_, Item, or Codebase.
87
-
88
- :param entities.Item, entities.Artifact, entities.Codebase entity: entity type [entities.Item, entities.Artifact, entities.Codebase]
89
- """
90
- if entity in [entities.Item, entities.Artifact, entities.Codebase]:
91
- self.items_entity = entity
92
- else:
93
- raise exceptions.PlatformException(error="403",
94
- message="Unable to set given entity. Entity give: {}".format(entity))
95
-
96
- def get_all_items(self, filters: entities.Filters = None) -> [entities.Item]:
97
- """
98
- Get all items in dataset.
99
-
100
- **Prerequisites**: You must be in the role of an *owner* or *developer*.
101
-
102
- :param dtlpy.entities.filters.Filters filters: dl.Filters entity to filters items
103
- :return: list of all items
104
- :rtype: list
105
-
106
- **Example**:
107
-
108
- .. code-block:: python
109
-
110
- dataset.items.get_all_items()
111
-
112
- """
113
- if filters is None:
114
- filters = entities.Filters()
115
- filters._user_query = 'false'
116
- filters.add(field='type', values='file')
117
- pages = self.list(filters=filters)
118
- num_items = pages.items_count
119
- items = [None for _ in range(num_items)]
120
- for i_item, item in enumerate(pages.all()):
121
- items[i_item] = item
122
- items = [item for item in items if item is not None]
123
- return items
124
-
125
- def _build_entities_from_response(self, response_items) -> miscellaneous.List[entities.Item]:
126
- pool = self._client_api.thread_pools(pool_name='entity.create')
127
- jobs = [None for _ in range(len(response_items))]
128
- # return triggers list
129
- for i_item, item in enumerate(response_items):
130
- jobs[i_item] = pool.submit(self.items_entity._protected_from_json,
131
- **{'client_api': self._client_api,
132
- '_json': item,
133
- 'dataset': self.dataset})
134
- # get all results
135
- results = [j.result() for j in jobs]
136
- # log errors
137
- _ = [logger.warning(r[1]) for r in results if r[0] is False]
138
- # return good jobs
139
- items = miscellaneous.List([r[1] for r in results if r[0] is True])
140
- return items
141
-
142
- def _list(self, filters: entities.Filters):
143
- """
144
- Get dataset items list This is a browsing endpoint, for any given path item count will be returned,
145
- user is expected to perform another request then for every folder item to actually get the its item list.
146
-
147
- :param dtlpy.entities.filters.Filters filters: Filters entity or a dictionary containing filters parameters
148
- :return: json response
149
- """
150
- # prepare request
151
- success, response = self._client_api.gen_request(req_type="POST",
152
- path="/datasets/{}/query".format(self.dataset.id),
153
- json_req=filters.prepare(),
154
- headers={'user_query': filters._user_query})
155
- if not success:
156
- raise exceptions.PlatformException(response)
157
- return response.json()
158
-
159
- @_api_reference.add(path='/datasets/{id}/query', method='post')
160
- def list(self,
161
- filters: entities.Filters = None,
162
- page_offset: int = None,
163
- page_size: int = None
164
- ) -> entities.PagedEntities:
165
- """
166
- List items in a dataset.
167
-
168
- **Prerequisites**: You must be in the role of an *owner* or *developer*.
169
-
170
- :param dtlpy.entities.filters.Filters filters: Filters entity or a dictionary containing filters parameters
171
- :param int page_offset: start page
172
- :param int page_size: page size
173
- :return: Pages object
174
- :rtype: dtlpy.entities.paged_entities.PagedEntities
175
-
176
- **Example**:
177
-
178
- .. code-block:: python
179
-
180
- dataset.items.list(page_offset=0, page_size=100)
181
- """
182
- # default filters
183
- if filters is None:
184
- filters = entities.Filters()
185
- filters._user_query = 'false'
186
- # assert type filters
187
- elif not isinstance(filters, entities.Filters):
188
- raise exceptions.PlatformException(error='400',
189
- message='Unknown filters type: {!r}'.format(type(filters)))
190
- if filters.resource != entities.FiltersResource.ITEM and filters.resource != entities.FiltersResource.ANNOTATION:
191
- raise exceptions.PlatformException(
192
- error='400',
193
- message='Filters resource must to be FiltersResource.ITEM. Got: {!r}'.format(filters.resource))
194
-
195
- # page size
196
- if page_size is None:
197
- # take from default
198
- page_size = filters.page_size
199
- else:
200
- filters.page_size = page_size
201
-
202
- # page offset
203
- if page_offset is None:
204
- # take from default
205
- page_offset = filters.page
206
- else:
207
- filters.page = page_offset
208
-
209
- if filters.resource == entities.FiltersResource.ITEM:
210
- items_repository = self
211
- else:
212
- items_repository = repositories.Annotations(client_api=self._client_api,
213
- dataset=self._dataset)
214
-
215
- paged = entities.PagedEntities(items_repository=items_repository,
216
- filters=filters,
217
- page_offset=page_offset,
218
- page_size=page_size,
219
- client_api=self._client_api)
220
- paged.get_page()
221
- return paged
222
-
223
- @_api_reference.add(path='/items/{id}', method='get')
224
- def get(self,
225
- filepath: str = None,
226
- item_id: str = None,
227
- fetch: bool = None,
228
- is_dir: bool = False
229
- ) -> entities.Item:
230
- """
231
- Get Item object
232
-
233
- **Prerequisites**: You must be in the role of an *owner* or *developer*.
234
-
235
- :param str filepath: optional - search by remote path
236
- :param str item_id: optional - search by id
237
- :param bool fetch: optional - fetch entity from platform, default taken from cookie
238
- :param bool is_dir: True if you want to get an item from dir type
239
- :return: Item object
240
- :rtype: dtlpy.entities.item.Item
241
-
242
- **Example**:
243
-
244
- .. code-block:: python
245
-
246
- dataset.items.get(item_id='item_id')
247
- """
248
- if fetch is None:
249
- fetch = self._client_api.fetch_entities
250
-
251
- if fetch:
252
- if item_id is not None:
253
- success, response = self._client_api.gen_request(req_type="get",
254
- path="/items/{}".format(item_id))
255
- if success:
256
- item = self.items_entity.from_json(client_api=self._client_api,
257
- _json=response.json(),
258
- dataset=self._dataset,
259
- project=self._project)
260
- # verify input filepath is same as the given id
261
- if filepath is not None and item.filename != filepath:
262
- logger.warning(
263
- "Mismatch found in items.get: filepath is different then item.filename: "
264
- "{!r} != {!r}".format(
265
- filepath,
266
- item.filename))
267
- else:
268
- raise exceptions.PlatformException(response)
269
- elif filepath is not None:
270
- filters = entities.Filters()
271
- filters.pop(field='hidden')
272
- if is_dir:
273
- filters.add(field='type', values='dir')
274
- filters.recursive = False
275
- filters.add(field='filename', values=filepath)
276
- paged_entity = self.list(filters=filters)
277
- if len(paged_entity.items) == 0:
278
- raise exceptions.PlatformException(error='404',
279
- message='Item not found. filepath= "{}"'.format(filepath))
280
- elif len(paged_entity.items) > 1:
281
- raise exceptions.PlatformException(
282
- error='404',
283
- message='More than one item found. Please "get" by id. filepath: "{}"'.format(filepath))
284
- else:
285
- item = paged_entity.items[0]
286
- else:
287
- raise exceptions.PlatformException(error="400",
288
- message='Must choose by at least one. "filename" or "item_id"')
289
- else:
290
- item = entities.Item.from_json(_json={'id': item_id,
291
- 'filename': filepath},
292
- client_api=self._client_api,
293
- dataset=self._dataset,
294
- is_fetched=False,
295
- project=self._project)
296
- assert isinstance(item, entities.Item)
297
- return item
298
-
299
- @_api_reference.add(path='/items/{id}/clone', method='post')
300
- def clone(self,
301
- item_id: str,
302
- dst_dataset_id: str,
303
- remote_filepath: str = None,
304
- metadata: dict = None,
305
- with_annotations: bool = True,
306
- with_metadata: bool = True,
307
- with_task_annotations_status: bool = False,
308
- allow_many: bool = False,
309
- wait: bool = True):
310
- """
311
- Clone item. Read more about cloning datatsets and items in our `documentation <https://dataloop.ai/docs/clone-merge-dataset#cloned-dataset>`_ and `SDK documentation <https://developers.dataloop.ai/tutorials/data_management/data_versioning/chapter/>`_.
312
-
313
- **Prerequisites**: You must be in the role of an *owner* or *developer*.
314
-
315
- :param str item_id: item to clone
316
- :param str dst_dataset_id: destination dataset id
317
- :param str remote_filepath: complete filepath
318
- :param dict metadata: new metadata to add
319
- :param bool with_annotations: clone annotations
320
- :param bool with_metadata: clone metadata
321
- :param bool with_task_annotations_status: clone task annotations status
322
- :param bool allow_many: `bool` if True, using multiple clones in single dataset is allowed, (default=False)
323
- :param bool wait: wait for the command to finish
324
- :return: Item object
325
- :rtype: dtlpy.entities.item.Item
326
-
327
- **Example**:
328
-
329
- .. code-block:: python
330
-
331
- dataset.items.clone(item_id='item_id',
332
- dst_dataset_id='dist_dataset_id',
333
- with_metadata=True,
334
- with_task_annotations_status=False,
335
- with_annotations=False)
336
- """
337
- if metadata is None:
338
- metadata = dict()
339
- payload = {"targetDatasetId": dst_dataset_id,
340
- "remoteFileName": remote_filepath,
341
- "metadata": metadata,
342
- "cloneDatasetParams": {
343
- "withItemsAnnotations": with_annotations,
344
- "withMetadata": with_metadata,
345
- "withTaskAnnotationsStatus": with_task_annotations_status},
346
- "allowMany": allow_many
347
- }
348
- success, response = self._client_api.gen_request(req_type="post",
349
- path="/items/{}/clone".format(item_id),
350
- json_req=payload)
351
- # check response
352
- if not success:
353
- raise exceptions.PlatformException(response)
354
-
355
- command = entities.Command.from_json(_json=response.json(),
356
- client_api=self._client_api)
357
- if not wait:
358
- return command
359
- command = command.wait()
360
-
361
- if 'returnedModelId' not in command.spec:
362
- raise exceptions.PlatformException(error='400',
363
- message="returnedModelId key is missing in command response: {}"
364
- .format(response))
365
- cloned_item = self.get(item_id=command.spec['returnedModelId'][0])
366
- return cloned_item
367
-
368
- @_api_reference.add(path='/items/{id}', method='delete')
369
- def delete(self,
370
- filename: str = None,
371
- item_id: str = None,
372
- filters: entities.Filters = None):
373
- """
374
- Delete item from platform.
375
-
376
- **Prerequisites**: You must be in the role of an *owner* or *developer*.
377
-
378
- You must provide at least ONE of the following params: item id, filename, filters.
379
-
380
- :param str filename: optional - search item by remote path
381
- :param str item_id: optional - search item by id
382
- :param dtlpy.entities.filters.Filters filters: optional - delete items by filter
383
- :return: True if success
384
- :rtype: bool
385
-
386
- **Example**:
387
-
388
- .. code-block:: python
389
-
390
- dataset.items.delete(item_id='item_id')
391
- """
392
- if item_id is not None:
393
- success, response = self._client_api.gen_request(req_type="delete",
394
- path="/items/{}".format(item_id),
395
- )
396
- elif filename is not None:
397
- if not filename.startswith("/"):
398
- filename = "/" + filename
399
- items = self.get(filepath=filename)
400
- if not isinstance(items, list):
401
- items = [items]
402
- if len(items) == 0:
403
- raise exceptions.PlatformException("404", "Item not found")
404
- elif len(items) > 1:
405
- raise exceptions.PlatformException(error="404", message="More the 1 item exist by the name provided")
406
- else:
407
- item_id = items[0].id
408
- success, response = self._client_api.gen_request(req_type="delete",
409
- path="/items/{}".format(item_id))
410
- elif filters is not None:
411
- # prepare request
412
- success, response = self._client_api.gen_request(req_type="POST",
413
- path="/datasets/{}/query".format(self.dataset.id),
414
- json_req=filters.prepare(operation='delete'))
415
- else:
416
- raise exceptions.PlatformException("400", "Must provide item id, filename or filters")
417
-
418
- # check response
419
- if success:
420
- logger.debug("Item/s deleted successfully")
421
- return success
422
- else:
423
- raise exceptions.PlatformException(response)
424
-
425
- @_api_reference.add(path='/items/{id}', method='patch')
426
- def update(self,
427
- item: entities.Item = None,
428
- filters: entities.Filters = None,
429
- update_values=None,
430
- system_update_values=None,
431
- system_metadata: bool = False):
432
- """
433
- Update item metadata.
434
-
435
- **Prerequisites**: You must be in the role of an *owner* or *developer*.
436
-
437
- You must provide at least ONE of the following params: update_values, system_update_values.
438
-
439
- :param dtlpy.entities.item.Item item: Item object
440
- :param dtlpy.entities.filters.Filters filters: optional update filtered items by given filter
441
- :param update_values: optional field to be updated and new values
442
- :param system_update_values: values in system metadata to be updated
443
- :param bool system_metadata: True, if you want to update the metadata system
444
- :return: Item object
445
- :rtype: dtlpy.entities.item.Item
446
-
447
- **Example**:
448
-
449
- .. code-block:: python
450
-
451
- dataset.items.update(item='item_entity')
452
- """
453
- ref = filters is not None and (filters._ref_task or filters._ref_assignment)
454
-
455
- if system_update_values and not system_metadata:
456
- logger.warning('system metadata will not be updated because param system_metadata is False')
457
-
458
- # check params
459
- if item is None and filters is None:
460
- raise exceptions.PlatformException('400', 'must provide either item or filters')
461
-
462
- value_to_update = update_values or system_update_values
463
-
464
- if item is None and not ref and not value_to_update:
465
- raise exceptions.PlatformException('400',
466
- 'Must provide update_values or system_update_values')
467
-
468
- if item is not None and value_to_update:
469
- raise exceptions.PlatformException('400',
470
- 'Cannot provide "update_values" or "system_update_values" with a specific "item" for an individual update. '
471
- 'These parameters are intended only for bulk updates using filters.')
472
-
473
- # update item
474
- if item is not None:
475
- json_req = miscellaneous.DictDiffer.diff(origin=item._platform_dict,
476
- modified=item.to_json())
477
- if not json_req:
478
- return item
479
- url_path = "/items/{}".format(item.id)
480
- if system_metadata:
481
- url_path += "?system=true"
482
- success, response = self._client_api.gen_request(req_type="patch",
483
- path=url_path,
484
- json_req=json_req)
485
- if success:
486
- logger.debug("Item was updated successfully. Item id: {}".format(item.id))
487
- return self.items_entity.from_json(client_api=self._client_api,
488
- _json=response.json(),
489
- dataset=self._dataset)
490
- else:
491
- logger.error("Error while updating item")
492
- raise exceptions.PlatformException(response)
493
- # update by filters
494
- else:
495
- # prepare request
496
- prepared_filter = filters.prepare(operation='update',
497
- system_update=system_update_values,
498
- system_metadata=system_metadata,
499
- update=update_values)
500
- success, response = self._client_api.gen_request(req_type="POST",
501
- path="/datasets/{}/query".format(self.dataset.id),
502
- json_req=prepared_filter)
503
- if not success:
504
- raise exceptions.PlatformException(response)
505
- else:
506
- logger.debug("Items were updated successfully.")
507
- return response.json()
508
-
509
- def download(
510
- self,
511
- filters: entities.Filters = None,
512
- items=None,
513
- # download options
514
- local_path: str = None,
515
- file_types: list = None,
516
- save_locally: bool = True,
517
- to_array: bool = False,
518
- annotation_options: entities.ViewAnnotationOptions = None,
519
- annotation_filters: entities.Filters = None,
520
- overwrite: bool = False,
521
- to_items_folder: bool = True,
522
- thickness: int = 1,
523
- with_text: bool = False,
524
- without_relative_path=None,
525
- avoid_unnecessary_annotation_download: bool = False,
526
- include_annotations_in_output: bool = True,
527
- export_png_files: bool = False,
528
- filter_output_annotations: bool = False,
529
- alpha: float = 1,
530
- export_version=entities.ExportVersion.V1,
531
- dataset_lock: bool = False,
532
- lock_timeout_sec: int = None,
533
- export_summary: bool = False,
534
- ):
535
- """
536
- Download dataset items by filters.
537
-
538
- Filters the dataset for items and saves them locally.
539
-
540
- Optional -- download annotation, mask, instance, and image mask of the item.
541
-
542
- **Prerequisites**: You must be in the role of an *owner* or *developer*.
543
-
544
- :param dtlpy.entities.filters.Filters filters: Filters entity or a dictionary containing filters parameters
545
- :param List[dtlpy.entities.item.Item] or dtlpy.entities.item.Item items: download Item entity or item_id (or a list of item)
546
- :param str local_path: local folder or filename to save to.
547
- :param list file_types: a list of file type to download. e.g ['video/webm', 'video/mp4', 'image/jpeg', 'image/png']
548
- :param bool save_locally: bool. save to disk or return a buffer
549
- :param bool to_array: returns Ndarray when True and local_path = False
550
- :param list annotation_options: download annotations options: list(dl.ViewAnnotationOptions)
551
- :param dtlpy.entities.filters.Filters annotation_filters: Filters entity to filter annotations for download
552
- :param bool overwrite: optional - default = False
553
- :param bool dataset_lock: optional - default = False
554
- :param bool export_summary: optional - default = False
555
- :param int lock_timeout_sec: optional
556
- :param bool to_items_folder: Create 'items' folder and download items to it
557
- :param int thickness: optional - line thickness, if -1 annotation will be filled, default =1
558
- :param bool with_text: optional - add text to annotations, default = False
559
- :param bool without_relative_path: bool - download items without the relative path from platform
560
- :param bool avoid_unnecessary_annotation_download: default - False
561
- :param bool include_annotations_in_output: default - False , if export should contain annotations
562
- :param bool export_png_files: default - if True, semantic annotations should be exported as png files
563
- :param bool filter_output_annotations: default - False, given an export by filter - determine if to filter out annotations
564
- :param float alpha: opacity value [0 1], default 1
565
- :param str export_version: exported items will have original extension in filename, `V1` - no original extension in filenames
566
- :return: generator of local_path per each downloaded item
567
- :rtype: generator or single item
568
-
569
- **Example**:
570
-
571
- .. code-block:: python
572
-
573
- dataset.items.download(local_path='local_path',
574
- annotation_options=dl.ViewAnnotationOptions,
575
- overwrite=False,
576
- thickness=1,
577
- with_text=False,
578
- alpha=1,
579
- save_locally=True
580
- )
581
- """
582
- downloader = repositories.Downloader(self)
583
- return downloader.download(
584
- filters=filters,
585
- items=items,
586
- local_path=local_path,
587
- file_types=file_types,
588
- save_locally=save_locally,
589
- to_array=to_array,
590
- annotation_options=annotation_options,
591
- annotation_filters=annotation_filters,
592
- overwrite=overwrite,
593
- to_items_folder=to_items_folder,
594
- thickness=thickness,
595
- alpha=alpha,
596
- with_text=with_text,
597
- without_relative_path=without_relative_path,
598
- avoid_unnecessary_annotation_download=avoid_unnecessary_annotation_download,
599
- include_annotations_in_output=include_annotations_in_output,
600
- export_png_files=export_png_files,
601
- filter_output_annotations=filter_output_annotations,
602
- export_version=export_version,
603
- dataset_lock=dataset_lock,
604
- lock_timeout_sec=lock_timeout_sec,
605
- export_summary=export_summary
606
- )
607
-
608
- def upload(
609
- self,
610
- # what to upload
611
- local_path: str,
612
- local_annotations_path: str = None,
613
- # upload options
614
- remote_path: str = "/",
615
- remote_name: str = None,
616
- file_types: list = None,
617
- overwrite: bool = False,
618
- item_metadata: dict = None,
619
- output_entity=entities.Item,
620
- no_output: bool = False,
621
- export_version: str = entities.ExportVersion.V1,
622
- item_description: str = None,
623
- raise_on_error: bool = False,
624
- return_as_list: bool = False
625
- ):
626
- """
627
- Upload local file to dataset.
628
- Local filesystem will remain unchanged.
629
- If "*" at the end of local_path (e.g. "/images/*") items will be uploaded without the head directory.
630
-
631
- **Prerequisites**: Any user can upload items.
632
-
633
- :param str local_path: list of local file, local folder, BufferIO, numpy.ndarray or url to upload
634
- :param str local_annotations_path: path to dataloop format annotations json files.
635
- :param str remote_path: remote path to save.
636
- :param str remote_name: remote base name to save. when upload numpy.ndarray as local path, remote_name with .jpg or .png ext is mandatory
637
- :param list file_types: list of file type to upload. e.g ['.jpg', '.png']. default is all
638
- :param dict item_metadata: metadata dict to upload to item or ExportMetadata option to export metadata from annotation file
639
- :param bool overwrite: optional - default = False
640
- :param output_entity: output type
641
- :param bool no_output: do not return the items after upload
642
- :param str export_version: exported items will have original extension in filename, `V1` - no original extension in filenames
643
- :param str item_description: add a string description to the uploaded item
644
- :param bool raise_on_error: raise an exception if an error occurs
645
- :param bool return_as_list: return a list of items instead of a generator
646
-
647
- :return: Output (generator/single item)
648
- :rtype: generator or single item
649
-
650
- **Example**:
651
-
652
- .. code-block:: python
653
-
654
- dataset.items.upload(local_path='local_path',
655
- local_annotations_path='local_annotations_path',
656
- overwrite=True,
657
- item_metadata={'Hellow': 'Word'}
658
- )
659
- """
660
- # initiate and use uploader
661
- uploader = repositories.Uploader(items_repository=self, output_entity=output_entity, no_output=no_output)
662
- return uploader.upload(
663
- local_path=local_path,
664
- local_annotations_path=local_annotations_path,
665
- # upload options
666
- remote_path=remote_path,
667
- remote_name=remote_name,
668
- file_types=file_types,
669
- # config
670
- overwrite=overwrite,
671
- # metadata to upload with items
672
- item_metadata=item_metadata,
673
- export_version=export_version,
674
- item_description=item_description,
675
- raise_on_error=raise_on_error,
676
- return_as_list=return_as_list
677
- )
678
-
679
- @property
680
- def platform_url(self):
681
- return self._client_api._get_resource_url(
682
- "projects/{}/datasets/{}/items".format(self.dataset.project.id, self.dataset.id))
683
-
684
- def open_in_web(self, filepath=None, item_id=None, item=None):
685
- """
686
- Open the item in web platform
687
-
688
- **Prerequisites**: You must be in the role of an *owner* or *developer* or be an *annotation manager*/*annotator* with access to that item through task.
689
-
690
- :param str filepath: item file path
691
- :param str item_id: item id
692
- :param dtlpy.entities.item.Item item: item entity
693
-
694
- **Example**:
695
-
696
- .. code-block:: python
697
-
698
- dataset.items.open_in_web(item_id='item_id')
699
-
700
- """
701
- if filepath is not None:
702
- item = self.get(filepath=filepath)
703
- if item is not None:
704
- item.open_in_web()
705
- elif item_id is not None:
706
- self._client_api._open_in_web(url=self.platform_url + '/' + str(item_id))
707
- else:
708
- self._client_api._open_in_web(url=self.platform_url)
709
-
710
- def update_status(self,
711
- status: entities.ItemStatus,
712
- items=None,
713
- item_ids=None,
714
- filters=None,
715
- dataset=None,
716
- clear=False):
717
- """
718
- Update item status in task
719
-
720
- **Prerequisites**: You must be in the role of an *owner* or *developer* or *annotation manager* who has been assigned a task with the item.
721
-
722
- You must provide at least ONE of the following params: items, item_ids, filters.
723
-
724
- :param str status: ItemStatus.COMPLETED, ItemStatus.APPROVED, ItemStatus.DISCARDED
725
- :param list items: list of items
726
- :param list item_ids: list of items id
727
- :param dtlpy.entities.filters.Filters filters: Filters entity or a dictionary containing filters parameters
728
- :param dtlpy.entities.dataset.Dataset dataset: dataset object
729
- :param bool clear: to delete status
730
-
731
- **Example**:
732
-
733
- .. code-block:: python
734
-
735
- dataset.items.update_status(item_ids='item_id', status=dl.ItemStatus.COMPLETED)
736
-
737
- """
738
- if items is None and item_ids is None and filters is None:
739
- raise exceptions.PlatformException('400', 'Must provide either items, item_ids or filters')
740
-
741
- if self._dataset is None and dataset is None:
742
- raise exceptions.PlatformException('400', 'Please provide dataset')
743
- elif dataset is None:
744
- dataset = self._dataset
745
-
746
- if filters is not None:
747
- items = dataset.items.list(filters=filters)
748
- item_count = items.items_count
749
- elif items is not None:
750
- if isinstance(items, entities.PagedEntities):
751
- item_count = items.items_count
752
- else:
753
- if not isinstance(items, list):
754
- items = [items]
755
- item_count = len(items)
756
- items = [items]
757
- else:
758
- if not isinstance(item_ids, list):
759
- item_ids = [item_ids]
760
- item_count = len(item_ids)
761
- items = [[dataset.items.get(item_id=item_id, fetch=False) for item_id in item_ids]]
762
-
763
- pool = self._client_api.thread_pools(pool_name='item.status_update')
764
- jobs = [None for _ in range(item_count)]
765
- # call multiprocess wrapper to run service on each item in list
766
- for page in items:
767
- for i_item, item in enumerate(page):
768
- jobs[i_item] = pool.submit(item.update_status,
769
- **{'status': status,
770
- 'clear': clear})
771
-
772
- # get all results
773
- results = [j.result() for j in jobs]
774
- out_success = [r for r in results if r is True]
775
- out_errors = [r for r in results if r is False]
776
- if len(out_errors) == 0:
777
- logger.debug('Item/s updated successfully. {}/{}'.format(len(out_success), len(results)))
778
- else:
779
- logger.error(out_errors)
780
- logger.error('Item/s updated with {} errors'.format(len(out_errors)))
781
-
782
- def make_dir(self, directory, dataset: entities.Dataset = None) -> entities.Item:
783
- """
784
- Create a directory in a dataset.
785
-
786
- **Prerequisites**: All users.
787
-
788
- :param str directory: name of directory
789
- :param dtlpy.entities.dataset.Dataset dataset: dataset object
790
- :return: Item object
791
- :rtype: dtlpy.entities.item.Item
792
-
793
- **Example**:
794
-
795
- .. code-block:: python
796
-
797
- dataset.items.make_dir(directory='directory_name')
798
- """
799
- if self._dataset_id is None and dataset is None:
800
- raise exceptions.PlatformException('400', 'Please provide parameter dataset')
801
-
802
- payload = {
803
- 'type': 'dir',
804
- 'path': directory
805
- }
806
- headers = {'content-type': 'application/x-www-form-urlencoded'}
807
- success, response = self._client_api.gen_request(req_type="post",
808
- headers=headers,
809
- path="/datasets/{}/items".format(self._dataset_id),
810
- data=payload)
811
- if success:
812
- item = self.items_entity.from_json(client_api=self._client_api,
813
- _json=response.json(),
814
- dataset=self._dataset)
815
- else:
816
- raise exceptions.PlatformException(response)
817
-
818
- return item
819
-
820
- def move_items(self,
821
- destination: str,
822
- filters: entities.Filters = None,
823
- items=None,
824
- dataset: entities.Dataset = None
825
- ) -> bool:
826
- """
827
- Move items to another directory.
828
- If directory does not exist we will create it
829
-
830
- **Prerequisites**: You must be in the role of an *owner* or *developer*.
831
-
832
- :param str destination: destination directory
833
- :param dtlpy.entities.filters.Filters filters: optional - either this or items. Query of items to move
834
- :param items: optional - either this or filters. A list of items to move
835
- :param dtlpy.entities.dataset.Dataset dataset: dataset object
836
- :return: True if success
837
- :rtype: bool
838
-
839
- **Example**:
840
-
841
- .. code-block:: python
842
-
843
- dataset.items.move_items(destination='directory_name')
844
- """
845
- if filters is None and items is None:
846
- raise exceptions.PlatformException('400', 'Must provide either filters or items')
847
-
848
- dest_dir_filter = entities.Filters(resource=entities.FiltersResource.ITEM, field='type', values='dir')
849
- dest_dir_filter.recursive = False
850
- dest_dir_filter.add(field='filename', values=destination)
851
- dirs_page = self.list(filters=dest_dir_filter)
852
-
853
- if dirs_page.items_count == 0:
854
- directory = self.make_dir(directory=destination, dataset=dataset)
855
- elif dirs_page.items_count == 1:
856
- directory = dirs_page.items[0]
857
- else:
858
- raise exceptions.PlatformException('404', 'More than one directory by the name of: {}'.format(destination))
859
-
860
- if filters is not None:
861
- items = self.list(filters=filters)
862
- elif isinstance(items, list):
863
- items = [items]
864
- elif not isinstance(items, entities.PagedEntities):
865
- raise exceptions.PlatformException('400', 'items must be a list of items or a pages entity not {}'.format(
866
- type(items)))
867
-
868
- item_ids = list()
869
- for page in items:
870
- for item in page:
871
- item_ids.append(item.id)
872
-
873
- success, response = self._client_api.gen_request(req_type="put",
874
- path="/datasets/{}/items/{}".format(self._dataset_id,
875
- directory.id),
876
- json_req=item_ids)
877
- if not success:
878
- raise exceptions.PlatformException(response)
879
-
880
- return success
881
-
882
- def task_scores(self, item_id: str, task_id: str, page_offset: int = 0, page_size: int = 100):
883
- """
884
- Get item score
885
-
886
- **Prerequisites**: You must be able to read the task
887
-
888
- :param str item_id: item id
889
- :param str task_id: task id
890
- :param int page_offset: start page
891
- :param int page_size: page size
892
- :return: page of item scores
893
-
894
- **Example**:
895
-
896
- .. code-block:: python
897
-
898
- dataset.items.item_score(item_id='item_id', task_id='task_id')
899
-
900
- """
901
-
902
- if item_id is None:
903
- raise exceptions.PlatformException('400', 'Must provide item id')
904
-
905
- if task_id is None:
906
- raise exceptions.PlatformException('400', 'Must provide task id')
907
-
908
- success, response = self._client_api.gen_request(req_type="get",
909
- path="/scores/tasks/{}/items/{}?page={}&pageSize={}"
910
- .format(task_id, item_id, page_offset, page_size))
911
- if success:
912
- return response.json()
913
- else:
914
- raise exceptions.PlatformException(response)
915
-
1
+ import logging
2
+
3
+ from .. import entities, exceptions, repositories, miscellaneous, _api_reference
4
+ from ..services.api_client import ApiClient
5
+
6
+ logger = logging.getLogger(name='dtlpy')
7
+
8
+
9
+ class Items:
10
+ """
11
+ Items Repository
12
+
13
+ The Items class allows you to manage items in your datasets.
14
+ For information on actions related to items see https://developers.dataloop.ai/tutorials/data_management/upload_and_manage_items/chapter/
15
+ """
16
+
17
+ def __init__(self,
18
+ client_api: ApiClient,
19
+ datasets: repositories.Datasets = None,
20
+ dataset: entities.Dataset = None,
21
+ dataset_id=None,
22
+ items_entity=None,
23
+ project=None):
24
+ self._client_api = client_api
25
+ self._dataset = dataset
26
+ self._dataset_id = dataset_id
27
+ self._datasets = datasets
28
+ self._project = project
29
+ # set items entity to represent the item (Item, Codebase, Artifact etc...)
30
+ if items_entity is None:
31
+ self.items_entity = entities.Item
32
+ if self._dataset_id is None and self._dataset is not None:
33
+ self._dataset_id = self._dataset.id
34
+
35
+ ############
36
+ # entities #
37
+ ############
38
+ @property
39
+ def dataset(self) -> entities.Dataset:
40
+ if self._dataset is None:
41
+ if self._dataset_id is None:
42
+ raise exceptions.PlatformException(
43
+ error='400',
44
+ message='Cannot perform action WITHOUT Dataset entity in Items repository. Please set a dataset')
45
+ self._dataset = self.datasets.get(dataset_id=self._dataset_id, fetch=None)
46
+ assert isinstance(self._dataset, entities.Dataset)
47
+ return self._dataset
48
+
49
+ @dataset.setter
50
+ def dataset(self, dataset: entities.Dataset):
51
+ if not isinstance(dataset, entities.Dataset):
52
+ raise ValueError('Must input a valid Dataset entity')
53
+ self._dataset = dataset
54
+
55
+ @property
56
+ def project(self) -> entities.Project:
57
+ if self._project is None:
58
+ raise exceptions.PlatformException(
59
+ error='400',
60
+ message='Cannot perform action WITHOUT Project entity in Items repository. Please set a project')
61
+ assert isinstance(self._dataset, entities.Dataset)
62
+ return self._project
63
+
64
+ @project.setter
65
+ def project(self, project: entities.Project):
66
+ if not isinstance(project, entities.Project):
67
+ raise ValueError('Must input a valid Dataset entity')
68
+ self._project = project
69
+
70
+ ################
71
+ # repositories #
72
+ ################
73
+ @property
74
+ def datasets(self) -> repositories.Datasets:
75
+ if self._datasets is None:
76
+ self._datasets = repositories.Datasets(client_api=self._client_api)
77
+ assert isinstance(self._datasets, repositories.Datasets)
78
+ return self._datasets
79
+
80
+ ###########
81
+ # methods #
82
+ ###########
83
+
84
+ def set_items_entity(self, entity):
85
+ """
86
+ Set the item entity type to `Artifact <https://dataloop.ai/docs/auto-annotation-service?#uploading-model-weights-as-artifacts>`_, Item, or Codebase.
87
+
88
+ :param entities.Item, entities.Artifact, entities.Codebase entity: entity type [entities.Item, entities.Artifact, entities.Codebase]
89
+ """
90
+ if entity in [entities.Item, entities.Artifact, entities.Codebase]:
91
+ self.items_entity = entity
92
+ else:
93
+ raise exceptions.PlatformException(error="403",
94
+ message="Unable to set given entity. Entity give: {}".format(entity))
95
+
96
+ def get_all_items(self, filters: entities.Filters = None) -> [entities.Item]:
97
+ """
98
+ Get all items in dataset.
99
+
100
+ **Prerequisites**: You must be in the role of an *owner* or *developer*.
101
+
102
+ :param dtlpy.entities.filters.Filters filters: dl.Filters entity to filters items
103
+ :return: list of all items
104
+ :rtype: list
105
+
106
+ **Example**:
107
+
108
+ .. code-block:: python
109
+
110
+ dataset.items.get_all_items()
111
+
112
+ """
113
+ if filters is None:
114
+ filters = entities.Filters()
115
+ filters._user_query = 'false'
116
+ filters.add(field='type', values='file')
117
+ pages = self.list(filters=filters)
118
+ num_items = pages.items_count
119
+ items = [None for _ in range(num_items)]
120
+ for i_item, item in enumerate(pages.all()):
121
+ items[i_item] = item
122
+ items = [item for item in items if item is not None]
123
+ return items
124
+
125
+ def _build_entities_from_response(self, response_items) -> miscellaneous.List[entities.Item]:
126
+ pool = self._client_api.thread_pools(pool_name='entity.create')
127
+ jobs = [None for _ in range(len(response_items))]
128
+ # return triggers list
129
+ for i_item, item in enumerate(response_items):
130
+ jobs[i_item] = pool.submit(self.items_entity._protected_from_json,
131
+ **{'client_api': self._client_api,
132
+ '_json': item,
133
+ 'dataset': self.dataset})
134
+ # get all results
135
+ results = [j.result() for j in jobs]
136
+ # log errors
137
+ _ = [logger.warning(r[1]) for r in results if r[0] is False]
138
+ # return good jobs
139
+ items = miscellaneous.List([r[1] for r in results if r[0] is True])
140
+ return items
141
+
142
+ def _list(self, filters: entities.Filters):
143
+ """
144
+ Get dataset items list This is a browsing endpoint, for any given path item count will be returned,
145
+ user is expected to perform another request then for every folder item to actually get the its item list.
146
+
147
+ :param dtlpy.entities.filters.Filters filters: Filters entity or a dictionary containing filters parameters
148
+ :return: json response
149
+ """
150
+ # prepare request
151
+ success, response = self._client_api.gen_request(req_type="POST",
152
+ path="/datasets/{}/query".format(self.dataset.id),
153
+ json_req=filters.prepare(),
154
+ headers={'user_query': filters._user_query})
155
+ if not success:
156
+ raise exceptions.PlatformException(response)
157
+ return response.json()
158
+
159
+ @_api_reference.add(path='/datasets/{id}/query', method='post')
160
+ def list(self,
161
+ filters: entities.Filters = None,
162
+ page_offset: int = None,
163
+ page_size: int = None
164
+ ) -> entities.PagedEntities:
165
+ """
166
+ List items in a dataset.
167
+
168
+ **Prerequisites**: You must be in the role of an *owner* or *developer*.
169
+
170
+ :param dtlpy.entities.filters.Filters filters: Filters entity or a dictionary containing filters parameters
171
+ :param int page_offset: start page
172
+ :param int page_size: page size
173
+ :return: Pages object
174
+ :rtype: dtlpy.entities.paged_entities.PagedEntities
175
+
176
+ **Example**:
177
+
178
+ .. code-block:: python
179
+
180
+ dataset.items.list(page_offset=0, page_size=100)
181
+ """
182
+ # default filters
183
+ if filters is None:
184
+ filters = entities.Filters()
185
+ filters._user_query = 'false'
186
+ # assert type filters
187
+ elif not isinstance(filters, entities.Filters):
188
+ raise exceptions.PlatformException(error='400',
189
+ message='Unknown filters type: {!r}'.format(type(filters)))
190
+ if filters.resource != entities.FiltersResource.ITEM and filters.resource != entities.FiltersResource.ANNOTATION:
191
+ raise exceptions.PlatformException(
192
+ error='400',
193
+ message='Filters resource must to be FiltersResource.ITEM. Got: {!r}'.format(filters.resource))
194
+
195
+ # page size
196
+ if page_size is not None:
197
+ filters.page_size = page_size
198
+
199
+ # page offset
200
+ if page_offset is not None:
201
+ filters.page = page_offset
202
+
203
+ if filters.resource == entities.FiltersResource.ITEM:
204
+ items_repository = self
205
+ else:
206
+ items_repository = repositories.Annotations(client_api=self._client_api,
207
+ dataset=self._dataset)
208
+
209
+ paged = entities.PagedEntities(items_repository=items_repository,
210
+ filters=filters,
211
+ page_offset=filters.page,
212
+ page_size=filters.page_size,
213
+ client_api=self._client_api)
214
+ paged.get_page()
215
+ return paged
216
+
217
+ @_api_reference.add(path='/items/{id}', method='get')
218
+ def get(self,
219
+ filepath: str = None,
220
+ item_id: str = None,
221
+ fetch: bool = None,
222
+ is_dir: bool = False
223
+ ) -> entities.Item:
224
+ """
225
+ Get Item object
226
+
227
+ **Prerequisites**: You must be in the role of an *owner* or *developer*.
228
+
229
+ :param str filepath: optional - search by remote path
230
+ :param str item_id: optional - search by id
231
+ :param bool fetch: optional - fetch entity from platform, default taken from cookie
232
+ :param bool is_dir: True if you want to get an item from dir type
233
+ :return: Item object
234
+ :rtype: dtlpy.entities.item.Item
235
+
236
+ **Example**:
237
+
238
+ .. code-block:: python
239
+
240
+ dataset.items.get(item_id='item_id')
241
+ """
242
+ if fetch is None:
243
+ fetch = self._client_api.fetch_entities
244
+
245
+ if fetch:
246
+ if item_id is not None:
247
+ success, response = self._client_api.gen_request(req_type="get",
248
+ path="/items/{}".format(item_id))
249
+ if success:
250
+ item = self.items_entity.from_json(client_api=self._client_api,
251
+ _json=response.json(),
252
+ dataset=self._dataset,
253
+ project=self._project)
254
+ # verify input filepath is same as the given id
255
+ if filepath is not None and item.filename != filepath:
256
+ logger.warning(
257
+ "Mismatch found in items.get: filepath is different then item.filename: "
258
+ "{!r} != {!r}".format(
259
+ filepath,
260
+ item.filename))
261
+ else:
262
+ raise exceptions.PlatformException(response)
263
+ elif filepath is not None:
264
+ filters = entities.Filters()
265
+ filters.pop(field='hidden')
266
+ if is_dir:
267
+ filters.add(field='type', values='dir')
268
+ filters.recursive = False
269
+ filters.add(field='filename', values=filepath)
270
+ paged_entity = self.list(filters=filters)
271
+ if len(paged_entity.items) == 0:
272
+ raise exceptions.PlatformException(error='404',
273
+ message='Item not found. filepath= "{}"'.format(filepath))
274
+ elif len(paged_entity.items) > 1:
275
+ raise exceptions.PlatformException(
276
+ error='404',
277
+ message='More than one item found. Please "get" by id. filepath: "{}"'.format(filepath))
278
+ else:
279
+ item = paged_entity.items[0]
280
+ else:
281
+ raise exceptions.PlatformException(error="400",
282
+ message='Must choose by at least one. "filename" or "item_id"')
283
+ else:
284
+ item = entities.Item.from_json(_json={'id': item_id,
285
+ 'filename': filepath},
286
+ client_api=self._client_api,
287
+ dataset=self._dataset,
288
+ is_fetched=False,
289
+ project=self._project)
290
+ assert isinstance(item, entities.Item)
291
+ return item
292
+
293
+ @_api_reference.add(path='/items/{id}/clone', method='post')
294
+ def clone(self,
295
+ item_id: str,
296
+ dst_dataset_id: str,
297
+ remote_filepath: str = None,
298
+ metadata: dict = None,
299
+ with_annotations: bool = True,
300
+ with_metadata: bool = True,
301
+ with_task_annotations_status: bool = False,
302
+ allow_many: bool = False,
303
+ wait: bool = True):
304
+ """
305
+ Clone item. Read more about cloning datatsets and items in our `documentation <https://dataloop.ai/docs/clone-merge-dataset#cloned-dataset>`_ and `SDK documentation <https://developers.dataloop.ai/tutorials/data_management/data_versioning/chapter/>`_.
306
+
307
+ **Prerequisites**: You must be in the role of an *owner* or *developer*.
308
+
309
+ :param str item_id: item to clone
310
+ :param str dst_dataset_id: destination dataset id
311
+ :param str remote_filepath: complete filepath
312
+ :param dict metadata: new metadata to add
313
+ :param bool with_annotations: clone annotations
314
+ :param bool with_metadata: clone metadata
315
+ :param bool with_task_annotations_status: clone task annotations status
316
+ :param bool allow_many: `bool` if True, using multiple clones in single dataset is allowed, (default=False)
317
+ :param bool wait: wait for the command to finish
318
+ :return: Item object
319
+ :rtype: dtlpy.entities.item.Item
320
+
321
+ **Example**:
322
+
323
+ .. code-block:: python
324
+
325
+ dataset.items.clone(item_id='item_id',
326
+ dst_dataset_id='dist_dataset_id',
327
+ with_metadata=True,
328
+ with_task_annotations_status=False,
329
+ with_annotations=False)
330
+ """
331
+ if metadata is None:
332
+ metadata = dict()
333
+ payload = {"targetDatasetId": dst_dataset_id,
334
+ "remoteFileName": remote_filepath,
335
+ "metadata": metadata,
336
+ "cloneDatasetParams": {
337
+ "withItemsAnnotations": with_annotations,
338
+ "withMetadata": with_metadata,
339
+ "withTaskAnnotationsStatus": with_task_annotations_status},
340
+ "allowMany": allow_many
341
+ }
342
+ success, response = self._client_api.gen_request(req_type="post",
343
+ path="/items/{}/clone".format(item_id),
344
+ json_req=payload)
345
+ # check response
346
+ if not success:
347
+ raise exceptions.PlatformException(response)
348
+
349
+ command = entities.Command.from_json(_json=response.json(),
350
+ client_api=self._client_api)
351
+ if not wait:
352
+ return command
353
+ command = command.wait()
354
+
355
+ if 'returnedModelId' not in command.spec:
356
+ raise exceptions.PlatformException(error='400',
357
+ message="returnedModelId key is missing in command response: {}"
358
+ .format(response))
359
+ cloned_item = self.get(item_id=command.spec['returnedModelId'][0])
360
+ return cloned_item
361
+
362
+ @_api_reference.add(path='/items/{id}', method='delete')
363
+ def delete(self,
364
+ filename: str = None,
365
+ item_id: str = None,
366
+ filters: entities.Filters = None):
367
+ """
368
+ Delete item from platform.
369
+
370
+ **Prerequisites**: You must be in the role of an *owner* or *developer*.
371
+
372
+ You must provide at least ONE of the following params: item id, filename, filters.
373
+
374
+ :param str filename: optional - search item by remote path
375
+ :param str item_id: optional - search item by id
376
+ :param dtlpy.entities.filters.Filters filters: optional - delete items by filter
377
+ :return: True if success
378
+ :rtype: bool
379
+
380
+ **Example**:
381
+
382
+ .. code-block:: python
383
+
384
+ dataset.items.delete(item_id='item_id')
385
+ """
386
+ if item_id is not None:
387
+ success, response = self._client_api.gen_request(req_type="delete",
388
+ path="/items/{}".format(item_id),
389
+ )
390
+ elif filename is not None:
391
+ if not filename.startswith("/"):
392
+ filename = "/" + filename
393
+ items = self.get(filepath=filename)
394
+ if not isinstance(items, list):
395
+ items = [items]
396
+ if len(items) == 0:
397
+ raise exceptions.PlatformException("404", "Item not found")
398
+ elif len(items) > 1:
399
+ raise exceptions.PlatformException(error="404", message="More the 1 item exist by the name provided")
400
+ else:
401
+ item_id = items[0].id
402
+ success, response = self._client_api.gen_request(req_type="delete",
403
+ path="/items/{}".format(item_id))
404
+ elif filters is not None:
405
+ # prepare request
406
+ success, response = self._client_api.gen_request(req_type="POST",
407
+ path="/datasets/{}/query".format(self.dataset.id),
408
+ json_req=filters.prepare(operation='delete'))
409
+ else:
410
+ raise exceptions.PlatformException("400", "Must provide item id, filename or filters")
411
+
412
+ # check response
413
+ if success:
414
+ logger.debug("Item/s deleted successfully")
415
+ return success
416
+ else:
417
+ raise exceptions.PlatformException(response)
418
+
419
+ @_api_reference.add(path='/items/{id}', method='patch')
420
+ def update(self,
421
+ item: entities.Item = None,
422
+ filters: entities.Filters = None,
423
+ update_values=None,
424
+ system_update_values=None,
425
+ system_metadata: bool = False):
426
+ """
427
+ Update item metadata.
428
+
429
+ **Prerequisites**: You must be in the role of an *owner* or *developer*.
430
+
431
+ You must provide at least ONE of the following params: update_values, system_update_values.
432
+
433
+ :param dtlpy.entities.item.Item item: Item object
434
+ :param dtlpy.entities.filters.Filters filters: optional update filtered items by given filter
435
+ :param update_values: optional field to be updated and new values
436
+ :param system_update_values: values in system metadata to be updated
437
+ :param bool system_metadata: True, if you want to update the metadata system
438
+ :return: Item object
439
+ :rtype: dtlpy.entities.item.Item
440
+
441
+ **Example**:
442
+
443
+ .. code-block:: python
444
+
445
+ dataset.items.update(item='item_entity')
446
+ """
447
+ ref = filters is not None and (filters._ref_task or filters._ref_assignment)
448
+
449
+ if system_update_values and not system_metadata:
450
+ logger.warning('system metadata will not be updated because param system_metadata is False')
451
+
452
+ # check params
453
+ if item is None and filters is None:
454
+ raise exceptions.PlatformException('400', 'must provide either item or filters')
455
+
456
+ value_to_update = update_values or system_update_values
457
+
458
+ if item is None and not ref and not value_to_update:
459
+ raise exceptions.PlatformException('400',
460
+ 'Must provide update_values or system_update_values')
461
+
462
+ if item is not None and value_to_update:
463
+ raise exceptions.PlatformException('400',
464
+ 'Cannot provide "update_values" or "system_update_values" with a specific "item" for an individual update. '
465
+ 'These parameters are intended only for bulk updates using filters.')
466
+
467
+ # update item
468
+ if item is not None:
469
+ json_req = miscellaneous.DictDiffer.diff(origin=item._platform_dict,
470
+ modified=item.to_json())
471
+ if not json_req:
472
+ return item
473
+ url_path = "/items/{}".format(item.id)
474
+ if system_metadata:
475
+ url_path += "?system=true"
476
+ success, response = self._client_api.gen_request(req_type="patch",
477
+ path=url_path,
478
+ json_req=json_req)
479
+ if success:
480
+ logger.debug("Item was updated successfully. Item id: {}".format(item.id))
481
+ return self.items_entity.from_json(client_api=self._client_api,
482
+ _json=response.json(),
483
+ dataset=self._dataset)
484
+ else:
485
+ logger.error("Error while updating item")
486
+ raise exceptions.PlatformException(response)
487
+ # update by filters
488
+ else:
489
+ # prepare request
490
+ prepared_filter = filters.prepare(operation='update',
491
+ system_update=system_update_values,
492
+ system_metadata=system_metadata,
493
+ update=update_values)
494
+ success, response = self._client_api.gen_request(req_type="POST",
495
+ path="/datasets/{}/query".format(self.dataset.id),
496
+ json_req=prepared_filter)
497
+ if not success:
498
+ raise exceptions.PlatformException(response)
499
+ else:
500
+ logger.debug("Items were updated successfully.")
501
+ return response.json()
502
+
503
+ def download(
504
+ self,
505
+ filters: entities.Filters = None,
506
+ items=None,
507
+ # download options
508
+ local_path: str = None,
509
+ file_types: list = None,
510
+ save_locally: bool = True,
511
+ to_array: bool = False,
512
+ annotation_options: entities.ViewAnnotationOptions = None,
513
+ annotation_filters: entities.Filters = None,
514
+ overwrite: bool = False,
515
+ to_items_folder: bool = True,
516
+ thickness: int = 1,
517
+ with_text: bool = False,
518
+ without_relative_path=None,
519
+ avoid_unnecessary_annotation_download: bool = False,
520
+ include_annotations_in_output: bool = True,
521
+ export_png_files: bool = False,
522
+ filter_output_annotations: bool = False,
523
+ alpha: float = 1,
524
+ export_version=entities.ExportVersion.V1,
525
+ dataset_lock: bool = False,
526
+ lock_timeout_sec: int = None,
527
+ export_summary: bool = False,
528
+ ):
529
+ """
530
+ Download dataset items by filters.
531
+
532
+ Filters the dataset for items and saves them locally.
533
+
534
+ Optional -- download annotation, mask, instance, and image mask of the item.
535
+
536
+ **Prerequisites**: You must be in the role of an *owner* or *developer*.
537
+
538
+ :param dtlpy.entities.filters.Filters filters: Filters entity or a dictionary containing filters parameters
539
+ :param List[dtlpy.entities.item.Item] or dtlpy.entities.item.Item items: download Item entity or item_id (or a list of item)
540
+ :param str local_path: local folder or filename to save to.
541
+ :param list file_types: a list of file type to download. e.g ['video/webm', 'video/mp4', 'image/jpeg', 'image/png']
542
+ :param bool save_locally: bool. save to disk or return a buffer
543
+ :param bool to_array: returns Ndarray when True and local_path = False
544
+ :param list annotation_options: download annotations options: list(dl.ViewAnnotationOptions)
545
+ :param dtlpy.entities.filters.Filters annotation_filters: Filters entity to filter annotations for download
546
+ :param bool overwrite: optional - default = False
547
+ :param bool dataset_lock: optional - default = False
548
+ :param bool export_summary: optional - default = False
549
+ :param int lock_timeout_sec: optional
550
+ :param bool to_items_folder: Create 'items' folder and download items to it
551
+ :param int thickness: optional - line thickness, if -1 annotation will be filled, default =1
552
+ :param bool with_text: optional - add text to annotations, default = False
553
+ :param bool without_relative_path: bool - download items without the relative path from platform
554
+ :param bool avoid_unnecessary_annotation_download: default - False
555
+ :param bool include_annotations_in_output: default - False , if export should contain annotations
556
+ :param bool export_png_files: default - if True, semantic annotations should be exported as png files
557
+ :param bool filter_output_annotations: default - False, given an export by filter - determine if to filter out annotations
558
+ :param float alpha: opacity value [0 1], default 1
559
+ :param str export_version: exported items will have original extension in filename, `V1` - no original extension in filenames
560
+ :return: generator of local_path per each downloaded item
561
+ :rtype: generator or single item
562
+
563
+ **Example**:
564
+
565
+ .. code-block:: python
566
+
567
+ dataset.items.download(local_path='local_path',
568
+ annotation_options=dl.ViewAnnotationOptions,
569
+ overwrite=False,
570
+ thickness=1,
571
+ with_text=False,
572
+ alpha=1,
573
+ save_locally=True
574
+ )
575
+ """
576
+ downloader = repositories.Downloader(self)
577
+ return downloader.download(
578
+ filters=filters,
579
+ items=items,
580
+ local_path=local_path,
581
+ file_types=file_types,
582
+ save_locally=save_locally,
583
+ to_array=to_array,
584
+ annotation_options=annotation_options,
585
+ annotation_filters=annotation_filters,
586
+ overwrite=overwrite,
587
+ to_items_folder=to_items_folder,
588
+ thickness=thickness,
589
+ alpha=alpha,
590
+ with_text=with_text,
591
+ without_relative_path=without_relative_path,
592
+ avoid_unnecessary_annotation_download=avoid_unnecessary_annotation_download,
593
+ include_annotations_in_output=include_annotations_in_output,
594
+ export_png_files=export_png_files,
595
+ filter_output_annotations=filter_output_annotations,
596
+ export_version=export_version,
597
+ dataset_lock=dataset_lock,
598
+ lock_timeout_sec=lock_timeout_sec,
599
+ export_summary=export_summary
600
+ )
601
+
602
+ def upload(
603
+ self,
604
+ # what to upload
605
+ local_path: str,
606
+ local_annotations_path: str = None,
607
+ # upload options
608
+ remote_path: str = "/",
609
+ remote_name: str = None,
610
+ file_types: list = None,
611
+ overwrite: bool = False,
612
+ item_metadata: dict = None,
613
+ output_entity=entities.Item,
614
+ no_output: bool = False,
615
+ export_version: str = entities.ExportVersion.V1,
616
+ item_description: str = None,
617
+ raise_on_error: bool = False,
618
+ return_as_list: bool = False
619
+ ):
620
+ """
621
+ Upload local file to dataset.
622
+ Local filesystem will remain unchanged.
623
+ If "*" at the end of local_path (e.g. "/images/*") items will be uploaded without the head directory.
624
+
625
+ **Prerequisites**: Any user can upload items.
626
+
627
+ :param str local_path: list of local file, local folder, BufferIO, numpy.ndarray or url to upload
628
+ :param str local_annotations_path: path to dataloop format annotations json files.
629
+ :param str remote_path: remote path to save.
630
+ :param str remote_name: remote base name to save. when upload numpy.ndarray as local path, remote_name with .jpg or .png ext is mandatory
631
+ :param list file_types: list of file type to upload. e.g ['.jpg', '.png']. default is all
632
+ :param dict item_metadata: metadata dict to upload to item or ExportMetadata option to export metadata from annotation file
633
+ :param bool overwrite: optional - default = False
634
+ :param output_entity: output type
635
+ :param bool no_output: do not return the items after upload
636
+ :param str export_version: exported items will have original extension in filename, `V1` - no original extension in filenames
637
+ :param str item_description: add a string description to the uploaded item
638
+ :param bool raise_on_error: raise an exception if an error occurs
639
+ :param bool return_as_list: return a list of items instead of a generator
640
+
641
+ :return: Output (generator/single item)
642
+ :rtype: generator or single item
643
+
644
+ **Example**:
645
+
646
+ .. code-block:: python
647
+
648
+ dataset.items.upload(local_path='local_path',
649
+ local_annotations_path='local_annotations_path',
650
+ overwrite=True,
651
+ item_metadata={'Hellow': 'Word'}
652
+ )
653
+ """
654
+ # initiate and use uploader
655
+ uploader = repositories.Uploader(items_repository=self, output_entity=output_entity, no_output=no_output)
656
+ return uploader.upload(
657
+ local_path=local_path,
658
+ local_annotations_path=local_annotations_path,
659
+ # upload options
660
+ remote_path=remote_path,
661
+ remote_name=remote_name,
662
+ file_types=file_types,
663
+ # config
664
+ overwrite=overwrite,
665
+ # metadata to upload with items
666
+ item_metadata=item_metadata,
667
+ export_version=export_version,
668
+ item_description=item_description,
669
+ raise_on_error=raise_on_error,
670
+ return_as_list=return_as_list
671
+ )
672
+
673
+ @property
674
+ def platform_url(self):
675
+ return self._client_api._get_resource_url(
676
+ "projects/{}/datasets/{}/items".format(self.dataset.project.id, self.dataset.id))
677
+
678
+ def open_in_web(self, filepath=None, item_id=None, item=None):
679
+ """
680
+ Open the item in web platform
681
+
682
+ **Prerequisites**: You must be in the role of an *owner* or *developer* or be an *annotation manager*/*annotator* with access to that item through task.
683
+
684
+ :param str filepath: item file path
685
+ :param str item_id: item id
686
+ :param dtlpy.entities.item.Item item: item entity
687
+
688
+ **Example**:
689
+
690
+ .. code-block:: python
691
+
692
+ dataset.items.open_in_web(item_id='item_id')
693
+
694
+ """
695
+ if filepath is not None:
696
+ item = self.get(filepath=filepath)
697
+ if item is not None:
698
+ item.open_in_web()
699
+ elif item_id is not None:
700
+ self._client_api._open_in_web(url=self.platform_url + '/' + str(item_id))
701
+ else:
702
+ self._client_api._open_in_web(url=self.platform_url)
703
+
704
+ def update_status(self,
705
+ status: entities.ItemStatus,
706
+ items=None,
707
+ item_ids=None,
708
+ filters=None,
709
+ dataset=None,
710
+ clear=False):
711
+ """
712
+ Update item status in task
713
+
714
+ **Prerequisites**: You must be in the role of an *owner* or *developer* or *annotation manager* who has been assigned a task with the item.
715
+
716
+ You must provide at least ONE of the following params: items, item_ids, filters.
717
+
718
+ :param str status: ItemStatus.COMPLETED, ItemStatus.APPROVED, ItemStatus.DISCARDED
719
+ :param list items: list of items
720
+ :param list item_ids: list of items id
721
+ :param dtlpy.entities.filters.Filters filters: Filters entity or a dictionary containing filters parameters
722
+ :param dtlpy.entities.dataset.Dataset dataset: dataset object
723
+ :param bool clear: to delete status
724
+
725
+ **Example**:
726
+
727
+ .. code-block:: python
728
+
729
+ dataset.items.update_status(item_ids='item_id', status=dl.ItemStatus.COMPLETED)
730
+
731
+ """
732
+ if items is None and item_ids is None and filters is None:
733
+ raise exceptions.PlatformException('400', 'Must provide either items, item_ids or filters')
734
+
735
+ if self._dataset is None and dataset is None:
736
+ raise exceptions.PlatformException('400', 'Please provide dataset')
737
+ elif dataset is None:
738
+ dataset = self._dataset
739
+
740
+ if filters is not None:
741
+ items = dataset.items.list(filters=filters)
742
+ item_count = items.items_count
743
+ elif items is not None:
744
+ if isinstance(items, entities.PagedEntities):
745
+ item_count = items.items_count
746
+ else:
747
+ if not isinstance(items, list):
748
+ items = [items]
749
+ item_count = len(items)
750
+ items = [items]
751
+ else:
752
+ if not isinstance(item_ids, list):
753
+ item_ids = [item_ids]
754
+ item_count = len(item_ids)
755
+ items = [[dataset.items.get(item_id=item_id, fetch=False) for item_id in item_ids]]
756
+
757
+ pool = self._client_api.thread_pools(pool_name='item.status_update')
758
+ jobs = [None for _ in range(item_count)]
759
+ # call multiprocess wrapper to run service on each item in list
760
+ for page in items:
761
+ for i_item, item in enumerate(page):
762
+ jobs[i_item] = pool.submit(item.update_status,
763
+ **{'status': status,
764
+ 'clear': clear})
765
+
766
+ # get all results
767
+ results = [j.result() for j in jobs]
768
+ out_success = [r for r in results if r is True]
769
+ out_errors = [r for r in results if r is False]
770
+ if len(out_errors) == 0:
771
+ logger.debug('Item/s updated successfully. {}/{}'.format(len(out_success), len(results)))
772
+ else:
773
+ logger.error(out_errors)
774
+ logger.error('Item/s updated with {} errors'.format(len(out_errors)))
775
+
776
+ def make_dir(self, directory, dataset: entities.Dataset = None) -> entities.Item:
777
+ """
778
+ Create a directory in a dataset.
779
+
780
+ **Prerequisites**: All users.
781
+
782
+ :param str directory: name of directory
783
+ :param dtlpy.entities.dataset.Dataset dataset: dataset object
784
+ :return: Item object
785
+ :rtype: dtlpy.entities.item.Item
786
+
787
+ **Example**:
788
+
789
+ .. code-block:: python
790
+
791
+ dataset.items.make_dir(directory='directory_name')
792
+ """
793
+ if self._dataset_id is None and dataset is None:
794
+ raise exceptions.PlatformException('400', 'Please provide parameter dataset')
795
+
796
+ payload = {
797
+ 'type': 'dir',
798
+ 'path': directory
799
+ }
800
+ headers = {'content-type': 'application/x-www-form-urlencoded'}
801
+ success, response = self._client_api.gen_request(req_type="post",
802
+ headers=headers,
803
+ path="/datasets/{}/items".format(self._dataset_id),
804
+ data=payload)
805
+ if success:
806
+ item = self.items_entity.from_json(client_api=self._client_api,
807
+ _json=response.json(),
808
+ dataset=self._dataset)
809
+ else:
810
+ raise exceptions.PlatformException(response)
811
+
812
+ return item
813
+
814
+ def move_items(self,
815
+ destination: str,
816
+ filters: entities.Filters = None,
817
+ items=None,
818
+ dataset: entities.Dataset = None
819
+ ) -> bool:
820
+ """
821
+ Move items to another directory.
822
+ If directory does not exist we will create it
823
+
824
+ **Prerequisites**: You must be in the role of an *owner* or *developer*.
825
+
826
+ :param str destination: destination directory
827
+ :param dtlpy.entities.filters.Filters filters: optional - either this or items. Query of items to move
828
+ :param items: optional - either this or filters. A list of items to move
829
+ :param dtlpy.entities.dataset.Dataset dataset: dataset object
830
+ :return: True if success
831
+ :rtype: bool
832
+
833
+ **Example**:
834
+
835
+ .. code-block:: python
836
+
837
+ dataset.items.move_items(destination='directory_name')
838
+ """
839
+ if filters is None and items is None:
840
+ raise exceptions.PlatformException('400', 'Must provide either filters or items')
841
+
842
+ dest_dir_filter = entities.Filters(resource=entities.FiltersResource.ITEM, field='type', values='dir')
843
+ dest_dir_filter.recursive = False
844
+ dest_dir_filter.add(field='filename', values=destination)
845
+ dirs_page = self.list(filters=dest_dir_filter)
846
+
847
+ if dirs_page.items_count == 0:
848
+ directory = self.make_dir(directory=destination, dataset=dataset)
849
+ elif dirs_page.items_count == 1:
850
+ directory = dirs_page.items[0]
851
+ else:
852
+ raise exceptions.PlatformException('404', 'More than one directory by the name of: {}'.format(destination))
853
+
854
+ if filters is not None:
855
+ items = self.list(filters=filters)
856
+ elif isinstance(items, list):
857
+ items = [items]
858
+ elif not isinstance(items, entities.PagedEntities):
859
+ raise exceptions.PlatformException('400', 'items must be a list of items or a pages entity not {}'.format(
860
+ type(items)))
861
+
862
+ item_ids = list()
863
+ for page in items:
864
+ for item in page:
865
+ item_ids.append(item.id)
866
+
867
+ success, response = self._client_api.gen_request(req_type="put",
868
+ path="/datasets/{}/items/{}".format(self._dataset_id,
869
+ directory.id),
870
+ json_req=item_ids)
871
+ if not success:
872
+ raise exceptions.PlatformException(response)
873
+
874
+ return success
875
+
876
+ def task_scores(self, item_id: str, task_id: str, page_offset: int = 0, page_size: int = 100):
877
+ """
878
+ Get item score
879
+
880
+ **Prerequisites**: You must be able to read the task
881
+
882
+ :param str item_id: item id
883
+ :param str task_id: task id
884
+ :param int page_offset: start page
885
+ :param int page_size: page size
886
+ :return: page of item scores
887
+
888
+ **Example**:
889
+
890
+ .. code-block:: python
891
+
892
+ dataset.items.item_score(item_id='item_id', task_id='task_id')
893
+
894
+ """
895
+
896
+ if item_id is None:
897
+ raise exceptions.PlatformException('400', 'Must provide item id')
898
+
899
+ if task_id is None:
900
+ raise exceptions.PlatformException('400', 'Must provide task id')
901
+
902
+ success, response = self._client_api.gen_request(req_type="get",
903
+ path="/scores/tasks/{}/items/{}?page={}&pageSize={}"
904
+ .format(task_id, item_id, page_offset, page_size))
905
+ if success:
906
+ return response.json()
907
+ else:
908
+ raise exceptions.PlatformException(response)
909
+