dtlpy 1.114.17__py3-none-any.whl → 1.116.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (238) hide show
  1. dtlpy/__init__.py +491 -491
  2. dtlpy/__version__.py +1 -1
  3. dtlpy/assets/__init__.py +26 -26
  4. dtlpy/assets/code_server/config.yaml +2 -2
  5. dtlpy/assets/code_server/installation.sh +24 -24
  6. dtlpy/assets/code_server/launch.json +13 -13
  7. dtlpy/assets/code_server/settings.json +2 -2
  8. dtlpy/assets/main.py +53 -53
  9. dtlpy/assets/main_partial.py +18 -18
  10. dtlpy/assets/mock.json +11 -11
  11. dtlpy/assets/model_adapter.py +83 -83
  12. dtlpy/assets/package.json +61 -61
  13. dtlpy/assets/package_catalog.json +29 -29
  14. dtlpy/assets/package_gitignore +307 -307
  15. dtlpy/assets/service_runners/__init__.py +33 -33
  16. dtlpy/assets/service_runners/converter.py +96 -96
  17. dtlpy/assets/service_runners/multi_method.py +49 -49
  18. dtlpy/assets/service_runners/multi_method_annotation.py +54 -54
  19. dtlpy/assets/service_runners/multi_method_dataset.py +55 -55
  20. dtlpy/assets/service_runners/multi_method_item.py +52 -52
  21. dtlpy/assets/service_runners/multi_method_json.py +52 -52
  22. dtlpy/assets/service_runners/single_method.py +37 -37
  23. dtlpy/assets/service_runners/single_method_annotation.py +43 -43
  24. dtlpy/assets/service_runners/single_method_dataset.py +43 -43
  25. dtlpy/assets/service_runners/single_method_item.py +41 -41
  26. dtlpy/assets/service_runners/single_method_json.py +42 -42
  27. dtlpy/assets/service_runners/single_method_multi_input.py +45 -45
  28. dtlpy/assets/voc_annotation_template.xml +23 -23
  29. dtlpy/caches/base_cache.py +32 -32
  30. dtlpy/caches/cache.py +473 -473
  31. dtlpy/caches/dl_cache.py +201 -201
  32. dtlpy/caches/filesystem_cache.py +89 -89
  33. dtlpy/caches/redis_cache.py +84 -84
  34. dtlpy/dlp/__init__.py +20 -20
  35. dtlpy/dlp/cli_utilities.py +367 -367
  36. dtlpy/dlp/command_executor.py +764 -764
  37. dtlpy/dlp/dlp +1 -1
  38. dtlpy/dlp/dlp.bat +1 -1
  39. dtlpy/dlp/dlp.py +128 -128
  40. dtlpy/dlp/parser.py +651 -651
  41. dtlpy/entities/__init__.py +83 -83
  42. dtlpy/entities/analytic.py +347 -311
  43. dtlpy/entities/annotation.py +1879 -1879
  44. dtlpy/entities/annotation_collection.py +699 -699
  45. dtlpy/entities/annotation_definitions/__init__.py +20 -20
  46. dtlpy/entities/annotation_definitions/base_annotation_definition.py +100 -100
  47. dtlpy/entities/annotation_definitions/box.py +195 -195
  48. dtlpy/entities/annotation_definitions/classification.py +67 -67
  49. dtlpy/entities/annotation_definitions/comparison.py +72 -72
  50. dtlpy/entities/annotation_definitions/cube.py +204 -204
  51. dtlpy/entities/annotation_definitions/cube_3d.py +149 -149
  52. dtlpy/entities/annotation_definitions/description.py +32 -32
  53. dtlpy/entities/annotation_definitions/ellipse.py +124 -124
  54. dtlpy/entities/annotation_definitions/free_text.py +62 -62
  55. dtlpy/entities/annotation_definitions/gis.py +69 -69
  56. dtlpy/entities/annotation_definitions/note.py +139 -139
  57. dtlpy/entities/annotation_definitions/point.py +117 -117
  58. dtlpy/entities/annotation_definitions/polygon.py +182 -182
  59. dtlpy/entities/annotation_definitions/polyline.py +111 -111
  60. dtlpy/entities/annotation_definitions/pose.py +92 -92
  61. dtlpy/entities/annotation_definitions/ref_image.py +86 -86
  62. dtlpy/entities/annotation_definitions/segmentation.py +240 -240
  63. dtlpy/entities/annotation_definitions/subtitle.py +34 -34
  64. dtlpy/entities/annotation_definitions/text.py +85 -85
  65. dtlpy/entities/annotation_definitions/undefined_annotation.py +74 -74
  66. dtlpy/entities/app.py +220 -220
  67. dtlpy/entities/app_module.py +107 -107
  68. dtlpy/entities/artifact.py +174 -174
  69. dtlpy/entities/assignment.py +399 -399
  70. dtlpy/entities/base_entity.py +214 -214
  71. dtlpy/entities/bot.py +113 -113
  72. dtlpy/entities/codebase.py +292 -296
  73. dtlpy/entities/collection.py +38 -38
  74. dtlpy/entities/command.py +169 -169
  75. dtlpy/entities/compute.py +449 -442
  76. dtlpy/entities/dataset.py +1299 -1285
  77. dtlpy/entities/directory_tree.py +44 -44
  78. dtlpy/entities/dpk.py +470 -470
  79. dtlpy/entities/driver.py +235 -223
  80. dtlpy/entities/execution.py +397 -397
  81. dtlpy/entities/feature.py +124 -124
  82. dtlpy/entities/feature_set.py +145 -145
  83. dtlpy/entities/filters.py +798 -645
  84. dtlpy/entities/gis_item.py +107 -107
  85. dtlpy/entities/integration.py +184 -184
  86. dtlpy/entities/item.py +959 -953
  87. dtlpy/entities/label.py +123 -123
  88. dtlpy/entities/links.py +85 -85
  89. dtlpy/entities/message.py +175 -175
  90. dtlpy/entities/model.py +684 -684
  91. dtlpy/entities/node.py +1005 -1005
  92. dtlpy/entities/ontology.py +810 -803
  93. dtlpy/entities/organization.py +287 -287
  94. dtlpy/entities/package.py +657 -657
  95. dtlpy/entities/package_defaults.py +5 -5
  96. dtlpy/entities/package_function.py +185 -185
  97. dtlpy/entities/package_module.py +113 -113
  98. dtlpy/entities/package_slot.py +118 -118
  99. dtlpy/entities/paged_entities.py +299 -299
  100. dtlpy/entities/pipeline.py +624 -624
  101. dtlpy/entities/pipeline_execution.py +279 -279
  102. dtlpy/entities/project.py +394 -394
  103. dtlpy/entities/prompt_item.py +505 -499
  104. dtlpy/entities/recipe.py +301 -301
  105. dtlpy/entities/reflect_dict.py +102 -102
  106. dtlpy/entities/resource_execution.py +138 -138
  107. dtlpy/entities/service.py +963 -958
  108. dtlpy/entities/service_driver.py +117 -117
  109. dtlpy/entities/setting.py +294 -294
  110. dtlpy/entities/task.py +495 -495
  111. dtlpy/entities/time_series.py +143 -143
  112. dtlpy/entities/trigger.py +426 -426
  113. dtlpy/entities/user.py +118 -118
  114. dtlpy/entities/webhook.py +124 -124
  115. dtlpy/examples/__init__.py +19 -19
  116. dtlpy/examples/add_labels.py +135 -135
  117. dtlpy/examples/add_metadata_to_item.py +21 -21
  118. dtlpy/examples/annotate_items_using_model.py +65 -65
  119. dtlpy/examples/annotate_video_using_model_and_tracker.py +75 -75
  120. dtlpy/examples/annotations_convert_to_voc.py +9 -9
  121. dtlpy/examples/annotations_convert_to_yolo.py +9 -9
  122. dtlpy/examples/convert_annotation_types.py +51 -51
  123. dtlpy/examples/converter.py +143 -143
  124. dtlpy/examples/copy_annotations.py +22 -22
  125. dtlpy/examples/copy_folder.py +31 -31
  126. dtlpy/examples/create_annotations.py +51 -51
  127. dtlpy/examples/create_video_annotations.py +83 -83
  128. dtlpy/examples/delete_annotations.py +26 -26
  129. dtlpy/examples/filters.py +113 -113
  130. dtlpy/examples/move_item.py +23 -23
  131. dtlpy/examples/play_video_annotation.py +13 -13
  132. dtlpy/examples/show_item_and_mask.py +53 -53
  133. dtlpy/examples/triggers.py +49 -49
  134. dtlpy/examples/upload_batch_of_items.py +20 -20
  135. dtlpy/examples/upload_items_and_custom_format_annotations.py +55 -55
  136. dtlpy/examples/upload_items_with_modalities.py +43 -43
  137. dtlpy/examples/upload_segmentation_annotations_from_mask_image.py +44 -44
  138. dtlpy/examples/upload_yolo_format_annotations.py +70 -70
  139. dtlpy/exceptions.py +125 -125
  140. dtlpy/miscellaneous/__init__.py +20 -20
  141. dtlpy/miscellaneous/dict_differ.py +95 -95
  142. dtlpy/miscellaneous/git_utils.py +217 -217
  143. dtlpy/miscellaneous/json_utils.py +14 -14
  144. dtlpy/miscellaneous/list_print.py +105 -105
  145. dtlpy/miscellaneous/zipping.py +130 -130
  146. dtlpy/ml/__init__.py +20 -20
  147. dtlpy/ml/base_feature_extractor_adapter.py +27 -27
  148. dtlpy/ml/base_model_adapter.py +1257 -1086
  149. dtlpy/ml/metrics.py +461 -461
  150. dtlpy/ml/predictions_utils.py +274 -274
  151. dtlpy/ml/summary_writer.py +57 -57
  152. dtlpy/ml/train_utils.py +60 -60
  153. dtlpy/new_instance.py +252 -252
  154. dtlpy/repositories/__init__.py +56 -56
  155. dtlpy/repositories/analytics.py +85 -85
  156. dtlpy/repositories/annotations.py +916 -916
  157. dtlpy/repositories/apps.py +383 -383
  158. dtlpy/repositories/artifacts.py +452 -452
  159. dtlpy/repositories/assignments.py +599 -599
  160. dtlpy/repositories/bots.py +213 -213
  161. dtlpy/repositories/codebases.py +559 -559
  162. dtlpy/repositories/collections.py +332 -332
  163. dtlpy/repositories/commands.py +152 -158
  164. dtlpy/repositories/compositions.py +61 -61
  165. dtlpy/repositories/computes.py +439 -435
  166. dtlpy/repositories/datasets.py +1504 -1291
  167. dtlpy/repositories/downloader.py +976 -903
  168. dtlpy/repositories/dpks.py +433 -433
  169. dtlpy/repositories/drivers.py +482 -470
  170. dtlpy/repositories/executions.py +815 -817
  171. dtlpy/repositories/feature_sets.py +226 -226
  172. dtlpy/repositories/features.py +255 -238
  173. dtlpy/repositories/integrations.py +484 -484
  174. dtlpy/repositories/items.py +912 -909
  175. dtlpy/repositories/messages.py +94 -94
  176. dtlpy/repositories/models.py +1000 -988
  177. dtlpy/repositories/nodes.py +80 -80
  178. dtlpy/repositories/ontologies.py +511 -511
  179. dtlpy/repositories/organizations.py +525 -525
  180. dtlpy/repositories/packages.py +1941 -1941
  181. dtlpy/repositories/pipeline_executions.py +451 -451
  182. dtlpy/repositories/pipelines.py +640 -640
  183. dtlpy/repositories/projects.py +539 -539
  184. dtlpy/repositories/recipes.py +419 -399
  185. dtlpy/repositories/resource_executions.py +137 -137
  186. dtlpy/repositories/schema.py +120 -120
  187. dtlpy/repositories/service_drivers.py +213 -213
  188. dtlpy/repositories/services.py +1704 -1704
  189. dtlpy/repositories/settings.py +339 -339
  190. dtlpy/repositories/tasks.py +1477 -1477
  191. dtlpy/repositories/times_series.py +278 -278
  192. dtlpy/repositories/triggers.py +536 -536
  193. dtlpy/repositories/upload_element.py +257 -257
  194. dtlpy/repositories/uploader.py +661 -651
  195. dtlpy/repositories/webhooks.py +249 -249
  196. dtlpy/services/__init__.py +22 -22
  197. dtlpy/services/aihttp_retry.py +131 -131
  198. dtlpy/services/api_client.py +1785 -1782
  199. dtlpy/services/api_reference.py +40 -40
  200. dtlpy/services/async_utils.py +133 -133
  201. dtlpy/services/calls_counter.py +44 -44
  202. dtlpy/services/check_sdk.py +68 -68
  203. dtlpy/services/cookie.py +115 -115
  204. dtlpy/services/create_logger.py +156 -156
  205. dtlpy/services/events.py +84 -84
  206. dtlpy/services/logins.py +235 -235
  207. dtlpy/services/reporter.py +256 -256
  208. dtlpy/services/service_defaults.py +91 -91
  209. dtlpy/utilities/__init__.py +20 -20
  210. dtlpy/utilities/annotations/__init__.py +16 -16
  211. dtlpy/utilities/annotations/annotation_converters.py +269 -269
  212. dtlpy/utilities/base_package_runner.py +285 -264
  213. dtlpy/utilities/converter.py +1650 -1650
  214. dtlpy/utilities/dataset_generators/__init__.py +1 -1
  215. dtlpy/utilities/dataset_generators/dataset_generator.py +670 -670
  216. dtlpy/utilities/dataset_generators/dataset_generator_tensorflow.py +23 -23
  217. dtlpy/utilities/dataset_generators/dataset_generator_torch.py +21 -21
  218. dtlpy/utilities/local_development/__init__.py +1 -1
  219. dtlpy/utilities/local_development/local_session.py +179 -179
  220. dtlpy/utilities/reports/__init__.py +2 -2
  221. dtlpy/utilities/reports/figures.py +343 -343
  222. dtlpy/utilities/reports/report.py +71 -71
  223. dtlpy/utilities/videos/__init__.py +17 -17
  224. dtlpy/utilities/videos/video_player.py +598 -598
  225. dtlpy/utilities/videos/videos.py +470 -470
  226. {dtlpy-1.114.17.data → dtlpy-1.116.6.data}/scripts/dlp +1 -1
  227. dtlpy-1.116.6.data/scripts/dlp.bat +2 -0
  228. {dtlpy-1.114.17.data → dtlpy-1.116.6.data}/scripts/dlp.py +128 -128
  229. {dtlpy-1.114.17.dist-info → dtlpy-1.116.6.dist-info}/METADATA +186 -183
  230. dtlpy-1.116.6.dist-info/RECORD +239 -0
  231. {dtlpy-1.114.17.dist-info → dtlpy-1.116.6.dist-info}/WHEEL +1 -1
  232. {dtlpy-1.114.17.dist-info → dtlpy-1.116.6.dist-info}/licenses/LICENSE +200 -200
  233. tests/features/environment.py +551 -551
  234. dtlpy/assets/__pycache__/__init__.cpython-310.pyc +0 -0
  235. dtlpy-1.114.17.data/scripts/dlp.bat +0 -2
  236. dtlpy-1.114.17.dist-info/RECORD +0 -240
  237. {dtlpy-1.114.17.dist-info → dtlpy-1.116.6.dist-info}/entry_points.txt +0 -0
  238. {dtlpy-1.114.17.dist-info → dtlpy-1.116.6.dist-info}/top_level.txt +0 -0
@@ -1,909 +1,912 @@
1
- import logging
2
-
3
- from .. import entities, exceptions, repositories, miscellaneous, _api_reference
4
- from ..services.api_client import ApiClient
5
-
6
- logger = logging.getLogger(name='dtlpy')
7
-
8
-
9
- class Items:
10
- """
11
- Items Repository
12
-
13
- The Items class allows you to manage items in your datasets.
14
- For information on actions related to items see https://developers.dataloop.ai/tutorials/data_management/upload_and_manage_items/chapter/
15
- """
16
-
17
- def __init__(self,
18
- client_api: ApiClient,
19
- datasets: repositories.Datasets = None,
20
- dataset: entities.Dataset = None,
21
- dataset_id=None,
22
- items_entity=None,
23
- project=None):
24
- self._client_api = client_api
25
- self._dataset = dataset
26
- self._dataset_id = dataset_id
27
- self._datasets = datasets
28
- self._project = project
29
- # set items entity to represent the item (Item, Codebase, Artifact etc...)
30
- if items_entity is None:
31
- self.items_entity = entities.Item
32
- if self._dataset_id is None and self._dataset is not None:
33
- self._dataset_id = self._dataset.id
34
-
35
- ############
36
- # entities #
37
- ############
38
- @property
39
- def dataset(self) -> entities.Dataset:
40
- if self._dataset is None:
41
- if self._dataset_id is None:
42
- raise exceptions.PlatformException(
43
- error='400',
44
- message='Cannot perform action WITHOUT Dataset entity in Items repository. Please set a dataset')
45
- self._dataset = self.datasets.get(dataset_id=self._dataset_id, fetch=None)
46
- assert isinstance(self._dataset, entities.Dataset)
47
- return self._dataset
48
-
49
- @dataset.setter
50
- def dataset(self, dataset: entities.Dataset):
51
- if not isinstance(dataset, entities.Dataset):
52
- raise ValueError('Must input a valid Dataset entity')
53
- self._dataset = dataset
54
-
55
- @property
56
- def project(self) -> entities.Project:
57
- if self._project is None:
58
- raise exceptions.PlatformException(
59
- error='400',
60
- message='Cannot perform action WITHOUT Project entity in Items repository. Please set a project')
61
- assert isinstance(self._dataset, entities.Dataset)
62
- return self._project
63
-
64
- @project.setter
65
- def project(self, project: entities.Project):
66
- if not isinstance(project, entities.Project):
67
- raise ValueError('Must input a valid Dataset entity')
68
- self._project = project
69
-
70
- ################
71
- # repositories #
72
- ################
73
- @property
74
- def datasets(self) -> repositories.Datasets:
75
- if self._datasets is None:
76
- self._datasets = repositories.Datasets(client_api=self._client_api)
77
- assert isinstance(self._datasets, repositories.Datasets)
78
- return self._datasets
79
-
80
- ###########
81
- # methods #
82
- ###########
83
-
84
- def set_items_entity(self, entity):
85
- """
86
- Set the item entity type to `Artifact <https://dataloop.ai/docs/auto-annotation-service?#uploading-model-weights-as-artifacts>`_, Item, or Codebase.
87
-
88
- :param entities.Item, entities.Artifact, entities.Codebase entity: entity type [entities.Item, entities.Artifact, entities.Codebase]
89
- """
90
- if entity in [entities.Item, entities.Artifact, entities.Codebase]:
91
- self.items_entity = entity
92
- else:
93
- raise exceptions.PlatformException(error="403",
94
- message="Unable to set given entity. Entity give: {}".format(entity))
95
-
96
- def get_all_items(self, filters: entities.Filters = None) -> [entities.Item]:
97
- """
98
- Get all items in dataset.
99
-
100
- **Prerequisites**: You must be in the role of an *owner* or *developer*.
101
-
102
- :param dtlpy.entities.filters.Filters filters: dl.Filters entity to filters items
103
- :return: list of all items
104
- :rtype: list
105
-
106
- **Example**:
107
-
108
- .. code-block:: python
109
-
110
- dataset.items.get_all_items()
111
-
112
- """
113
- if filters is None:
114
- filters = entities.Filters()
115
- filters._user_query = 'false'
116
- filters.add(field='type', values='file')
117
- pages = self.list(filters=filters)
118
- num_items = pages.items_count
119
- items = [None for _ in range(num_items)]
120
- for i_item, item in enumerate(pages.all()):
121
- items[i_item] = item
122
- items = [item for item in items if item is not None]
123
- return items
124
-
125
- def _build_entities_from_response(self, response_items) -> miscellaneous.List[entities.Item]:
126
- pool = self._client_api.thread_pools(pool_name='entity.create')
127
- jobs = [None for _ in range(len(response_items))]
128
- # return triggers list
129
- for i_item, item in enumerate(response_items):
130
- jobs[i_item] = pool.submit(self.items_entity._protected_from_json,
131
- **{'client_api': self._client_api,
132
- '_json': item,
133
- 'dataset': self.dataset})
134
- # get all results
135
- results = [j.result() for j in jobs]
136
- # log errors
137
- _ = [logger.warning(r[1]) for r in results if r[0] is False]
138
- # return good jobs
139
- items = miscellaneous.List([r[1] for r in results if r[0] is True])
140
- return items
141
-
142
- def _list(self, filters: entities.Filters):
143
- """
144
- Get dataset items list This is a browsing endpoint, for any given path item count will be returned,
145
- user is expected to perform another request then for every folder item to actually get the its item list.
146
-
147
- :param dtlpy.entities.filters.Filters filters: Filters entity or a dictionary containing filters parameters
148
- :return: json response
149
- """
150
- # prepare request
151
- success, response = self._client_api.gen_request(req_type="POST",
152
- path="/datasets/{}/query".format(self.dataset.id),
153
- json_req=filters.prepare(),
154
- headers={'user_query': filters._user_query})
155
- if not success:
156
- raise exceptions.PlatformException(response)
157
- return response.json()
158
-
159
- @_api_reference.add(path='/datasets/{id}/query', method='post')
160
- def list(self,
161
- filters: entities.Filters = None,
162
- page_offset: int = None,
163
- page_size: int = None
164
- ) -> entities.PagedEntities:
165
- """
166
- List items in a dataset.
167
-
168
- **Prerequisites**: You must be in the role of an *owner* or *developer*.
169
-
170
- :param dtlpy.entities.filters.Filters filters: Filters entity or a dictionary containing filters parameters
171
- :param int page_offset: start page
172
- :param int page_size: page size
173
- :return: Pages object
174
- :rtype: dtlpy.entities.paged_entities.PagedEntities
175
-
176
- **Example**:
177
-
178
- .. code-block:: python
179
-
180
- dataset.items.list(page_offset=0, page_size=100)
181
- """
182
- # default filters
183
- if filters is None:
184
- filters = entities.Filters()
185
- filters._user_query = 'false'
186
- # assert type filters
187
- elif not isinstance(filters, entities.Filters):
188
- raise exceptions.PlatformException(error='400',
189
- message='Unknown filters type: {!r}'.format(type(filters)))
190
- if filters.resource != entities.FiltersResource.ITEM and filters.resource != entities.FiltersResource.ANNOTATION:
191
- raise exceptions.PlatformException(
192
- error='400',
193
- message='Filters resource must to be FiltersResource.ITEM. Got: {!r}'.format(filters.resource))
194
-
195
- # page size
196
- if page_size is not None:
197
- filters.page_size = page_size
198
-
199
- # page offset
200
- if page_offset is not None:
201
- filters.page = page_offset
202
-
203
- if filters.resource == entities.FiltersResource.ITEM:
204
- items_repository = self
205
- else:
206
- items_repository = repositories.Annotations(client_api=self._client_api,
207
- dataset=self._dataset)
208
-
209
- paged = entities.PagedEntities(items_repository=items_repository,
210
- filters=filters,
211
- page_offset=filters.page,
212
- page_size=filters.page_size,
213
- client_api=self._client_api)
214
- paged.get_page()
215
- return paged
216
-
217
- @_api_reference.add(path='/items/{id}', method='get')
218
- def get(self,
219
- filepath: str = None,
220
- item_id: str = None,
221
- fetch: bool = None,
222
- is_dir: bool = False
223
- ) -> entities.Item:
224
- """
225
- Get Item object
226
-
227
- **Prerequisites**: You must be in the role of an *owner* or *developer*.
228
-
229
- :param str filepath: optional - search by remote path
230
- :param str item_id: optional - search by id
231
- :param bool fetch: optional - fetch entity from platform, default taken from cookie
232
- :param bool is_dir: True if you want to get an item from dir type
233
- :return: Item object
234
- :rtype: dtlpy.entities.item.Item
235
-
236
- **Example**:
237
-
238
- .. code-block:: python
239
-
240
- dataset.items.get(item_id='item_id')
241
- """
242
- if fetch is None:
243
- fetch = self._client_api.fetch_entities
244
-
245
- if fetch:
246
- if item_id is not None:
247
- success, response = self._client_api.gen_request(req_type="get",
248
- path="/items/{}".format(item_id))
249
- if success:
250
- item = self.items_entity.from_json(client_api=self._client_api,
251
- _json=response.json(),
252
- dataset=self._dataset,
253
- project=self._project)
254
- # verify input filepath is same as the given id
255
- if filepath is not None and item.filename != filepath:
256
- logger.warning(
257
- "Mismatch found in items.get: filepath is different then item.filename: "
258
- "{!r} != {!r}".format(
259
- filepath,
260
- item.filename))
261
- else:
262
- raise exceptions.PlatformException(response)
263
- elif filepath is not None:
264
- filters = entities.Filters()
265
- filters.pop(field='hidden')
266
- if is_dir:
267
- filters.add(field='type', values='dir')
268
- filters.recursive = False
269
- filters.add(field='filename', values=filepath)
270
- paged_entity = self.list(filters=filters)
271
- if len(paged_entity.items) == 0:
272
- raise exceptions.PlatformException(error='404',
273
- message='Item not found. filepath= "{}"'.format(filepath))
274
- elif len(paged_entity.items) > 1:
275
- raise exceptions.PlatformException(
276
- error='404',
277
- message='More than one item found. Please "get" by id. filepath: "{}"'.format(filepath))
278
- else:
279
- item = paged_entity.items[0]
280
- else:
281
- raise exceptions.PlatformException(error="400",
282
- message='Must choose by at least one. "filename" or "item_id"')
283
- else:
284
- item = entities.Item.from_json(_json={'id': item_id,
285
- 'filename': filepath},
286
- client_api=self._client_api,
287
- dataset=self._dataset,
288
- is_fetched=False,
289
- project=self._project)
290
- assert isinstance(item, entities.Item)
291
- return item
292
-
293
- @_api_reference.add(path='/items/{id}/clone', method='post')
294
- def clone(self,
295
- item_id: str,
296
- dst_dataset_id: str,
297
- remote_filepath: str = None,
298
- metadata: dict = None,
299
- with_annotations: bool = True,
300
- with_metadata: bool = True,
301
- with_task_annotations_status: bool = False,
302
- allow_many: bool = False,
303
- wait: bool = True):
304
- """
305
- Clone item. Read more about cloning datatsets and items in our `documentation <https://dataloop.ai/docs/clone-merge-dataset#cloned-dataset>`_ and `SDK documentation <https://developers.dataloop.ai/tutorials/data_management/data_versioning/chapter/>`_.
306
-
307
- **Prerequisites**: You must be in the role of an *owner* or *developer*.
308
-
309
- :param str item_id: item to clone
310
- :param str dst_dataset_id: destination dataset id
311
- :param str remote_filepath: complete filepath
312
- :param dict metadata: new metadata to add
313
- :param bool with_annotations: clone annotations
314
- :param bool with_metadata: clone metadata
315
- :param bool with_task_annotations_status: clone task annotations status
316
- :param bool allow_many: `bool` if True, using multiple clones in single dataset is allowed, (default=False)
317
- :param bool wait: wait for the command to finish
318
- :return: Item object
319
- :rtype: dtlpy.entities.item.Item
320
-
321
- **Example**:
322
-
323
- .. code-block:: python
324
-
325
- dataset.items.clone(item_id='item_id',
326
- dst_dataset_id='dist_dataset_id',
327
- with_metadata=True,
328
- with_task_annotations_status=False,
329
- with_annotations=False)
330
- """
331
- if metadata is None:
332
- metadata = dict()
333
- payload = {"targetDatasetId": dst_dataset_id,
334
- "remoteFileName": remote_filepath,
335
- "metadata": metadata,
336
- "cloneDatasetParams": {
337
- "withItemsAnnotations": with_annotations,
338
- "withMetadata": with_metadata,
339
- "withTaskAnnotationsStatus": with_task_annotations_status},
340
- "allowMany": allow_many
341
- }
342
- success, response = self._client_api.gen_request(req_type="post",
343
- path="/items/{}/clone".format(item_id),
344
- json_req=payload)
345
- # check response
346
- if not success:
347
- raise exceptions.PlatformException(response)
348
-
349
- command = entities.Command.from_json(_json=response.json(),
350
- client_api=self._client_api)
351
- if not wait:
352
- return command
353
- command = command.wait()
354
-
355
- if 'returnedModelId' not in command.spec:
356
- raise exceptions.PlatformException(error='400',
357
- message="returnedModelId key is missing in command response: {}"
358
- .format(response))
359
- cloned_item = self.get(item_id=command.spec['returnedModelId'][0])
360
- return cloned_item
361
-
362
- @_api_reference.add(path='/items/{id}', method='delete')
363
- def delete(self,
364
- filename: str = None,
365
- item_id: str = None,
366
- filters: entities.Filters = None):
367
- """
368
- Delete item from platform.
369
-
370
- **Prerequisites**: You must be in the role of an *owner* or *developer*.
371
-
372
- You must provide at least ONE of the following params: item id, filename, filters.
373
-
374
- :param str filename: optional - search item by remote path
375
- :param str item_id: optional - search item by id
376
- :param dtlpy.entities.filters.Filters filters: optional - delete items by filter
377
- :return: True if success
378
- :rtype: bool
379
-
380
- **Example**:
381
-
382
- .. code-block:: python
383
-
384
- dataset.items.delete(item_id='item_id')
385
- """
386
- if item_id is not None:
387
- success, response = self._client_api.gen_request(req_type="delete",
388
- path="/items/{}".format(item_id),
389
- )
390
- elif filename is not None:
391
- if not filename.startswith("/"):
392
- filename = "/" + filename
393
- items = self.get(filepath=filename)
394
- if not isinstance(items, list):
395
- items = [items]
396
- if len(items) == 0:
397
- raise exceptions.PlatformException("404", "Item not found")
398
- elif len(items) > 1:
399
- raise exceptions.PlatformException(error="404", message="More the 1 item exist by the name provided")
400
- else:
401
- item_id = items[0].id
402
- success, response = self._client_api.gen_request(req_type="delete",
403
- path="/items/{}".format(item_id))
404
- elif filters is not None:
405
- # prepare request
406
- success, response = self._client_api.gen_request(req_type="POST",
407
- path="/datasets/{}/query".format(self.dataset.id),
408
- json_req=filters.prepare(operation='delete'))
409
- else:
410
- raise exceptions.PlatformException("400", "Must provide item id, filename or filters")
411
-
412
- # check response
413
- if success:
414
- logger.debug("Item/s deleted successfully")
415
- return success
416
- else:
417
- raise exceptions.PlatformException(response)
418
-
419
- @_api_reference.add(path='/items/{id}', method='patch')
420
- def update(self,
421
- item: entities.Item = None,
422
- filters: entities.Filters = None,
423
- update_values=None,
424
- system_update_values=None,
425
- system_metadata: bool = False):
426
- """
427
- Update item metadata.
428
-
429
- **Prerequisites**: You must be in the role of an *owner* or *developer*.
430
-
431
- You must provide at least ONE of the following params: update_values, system_update_values.
432
-
433
- :param dtlpy.entities.item.Item item: Item object
434
- :param dtlpy.entities.filters.Filters filters: optional update filtered items by given filter
435
- :param update_values: optional field to be updated and new values
436
- :param system_update_values: values in system metadata to be updated
437
- :param bool system_metadata: True, if you want to update the metadata system
438
- :return: Item object
439
- :rtype: dtlpy.entities.item.Item
440
-
441
- **Example**:
442
-
443
- .. code-block:: python
444
-
445
- dataset.items.update(item='item_entity')
446
- """
447
- ref = filters is not None and (filters._ref_task or filters._ref_assignment)
448
-
449
- if system_update_values and not system_metadata:
450
- logger.warning('system metadata will not be updated because param system_metadata is False')
451
-
452
- # check params
453
- if item is None and filters is None:
454
- raise exceptions.PlatformException('400', 'must provide either item or filters')
455
-
456
- value_to_update = update_values or system_update_values
457
-
458
- if item is None and not ref and not value_to_update:
459
- raise exceptions.PlatformException('400',
460
- 'Must provide update_values or system_update_values')
461
-
462
- if item is not None and value_to_update:
463
- raise exceptions.PlatformException('400',
464
- 'Cannot provide "update_values" or "system_update_values" with a specific "item" for an individual update. '
465
- 'These parameters are intended only for bulk updates using filters.')
466
-
467
- # update item
468
- if item is not None:
469
- json_req = miscellaneous.DictDiffer.diff(origin=item._platform_dict,
470
- modified=item.to_json())
471
- if not json_req:
472
- return item
473
- url_path = "/items/{}".format(item.id)
474
- if system_metadata:
475
- url_path += "?system=true"
476
- success, response = self._client_api.gen_request(req_type="patch",
477
- path=url_path,
478
- json_req=json_req)
479
- if success:
480
- logger.debug("Item was updated successfully. Item id: {}".format(item.id))
481
- return self.items_entity.from_json(client_api=self._client_api,
482
- _json=response.json(),
483
- dataset=self._dataset)
484
- else:
485
- logger.error("Error while updating item")
486
- raise exceptions.PlatformException(response)
487
- # update by filters
488
- else:
489
- # prepare request
490
- prepared_filter = filters.prepare(operation='update',
491
- system_update=system_update_values,
492
- system_metadata=system_metadata,
493
- update=update_values)
494
- success, response = self._client_api.gen_request(req_type="POST",
495
- path="/datasets/{}/query".format(self.dataset.id),
496
- json_req=prepared_filter)
497
- if not success:
498
- raise exceptions.PlatformException(response)
499
- else:
500
- logger.debug("Items were updated successfully.")
501
- return response.json()
502
-
503
- def download(
504
- self,
505
- filters: entities.Filters = None,
506
- items=None,
507
- # download options
508
- local_path: str = None,
509
- file_types: list = None,
510
- save_locally: bool = True,
511
- to_array: bool = False,
512
- annotation_options: entities.ViewAnnotationOptions = None,
513
- annotation_filters: entities.Filters = None,
514
- overwrite: bool = False,
515
- to_items_folder: bool = True,
516
- thickness: int = 1,
517
- with_text: bool = False,
518
- without_relative_path=None,
519
- avoid_unnecessary_annotation_download: bool = False,
520
- include_annotations_in_output: bool = True,
521
- export_png_files: bool = False,
522
- filter_output_annotations: bool = False,
523
- alpha: float = 1,
524
- export_version=entities.ExportVersion.V1,
525
- dataset_lock: bool = False,
526
- lock_timeout_sec: int = None,
527
- export_summary: bool = False,
528
- ):
529
- """
530
- Download dataset items by filters.
531
-
532
- Filters the dataset for items and saves them locally.
533
-
534
- Optional -- download annotation, mask, instance, and image mask of the item.
535
-
536
- **Prerequisites**: You must be in the role of an *owner* or *developer*.
537
-
538
- :param dtlpy.entities.filters.Filters filters: Filters entity or a dictionary containing filters parameters
539
- :param List[dtlpy.entities.item.Item] or dtlpy.entities.item.Item items: download Item entity or item_id (or a list of item)
540
- :param str local_path: local folder or filename to save to.
541
- :param list file_types: a list of file type to download. e.g ['video/webm', 'video/mp4', 'image/jpeg', 'image/png']
542
- :param bool save_locally: bool. save to disk or return a buffer
543
- :param bool to_array: returns Ndarray when True and local_path = False
544
- :param list annotation_options: download annotations options: list(dl.ViewAnnotationOptions)
545
- :param dtlpy.entities.filters.Filters annotation_filters: Filters entity to filter annotations for download
546
- :param bool overwrite: optional - default = False
547
- :param bool dataset_lock: optional - default = False
548
- :param bool export_summary: optional - default = False
549
- :param int lock_timeout_sec: optional
550
- :param bool to_items_folder: Create 'items' folder and download items to it
551
- :param int thickness: optional - line thickness, if -1 annotation will be filled, default =1
552
- :param bool with_text: optional - add text to annotations, default = False
553
- :param bool without_relative_path: bool - download items without the relative path from platform
554
- :param bool avoid_unnecessary_annotation_download: default - False
555
- :param bool include_annotations_in_output: default - False , if export should contain annotations
556
- :param bool export_png_files: default - if True, semantic annotations should be exported as png files
557
- :param bool filter_output_annotations: default - False, given an export by filter - determine if to filter out annotations
558
- :param float alpha: opacity value [0 1], default 1
559
- :param str export_version: exported items will have original extension in filename, `V1` - no original extension in filenames
560
- :return: generator of local_path per each downloaded item
561
- :rtype: generator or single item
562
-
563
- **Example**:
564
-
565
- .. code-block:: python
566
-
567
- dataset.items.download(local_path='local_path',
568
- annotation_options=dl.ViewAnnotationOptions,
569
- overwrite=False,
570
- thickness=1,
571
- with_text=False,
572
- alpha=1,
573
- save_locally=True
574
- )
575
- """
576
- downloader = repositories.Downloader(self)
577
- return downloader.download(
578
- filters=filters,
579
- items=items,
580
- local_path=local_path,
581
- file_types=file_types,
582
- save_locally=save_locally,
583
- to_array=to_array,
584
- annotation_options=annotation_options,
585
- annotation_filters=annotation_filters,
586
- overwrite=overwrite,
587
- to_items_folder=to_items_folder,
588
- thickness=thickness,
589
- alpha=alpha,
590
- with_text=with_text,
591
- without_relative_path=without_relative_path,
592
- avoid_unnecessary_annotation_download=avoid_unnecessary_annotation_download,
593
- include_annotations_in_output=include_annotations_in_output,
594
- export_png_files=export_png_files,
595
- filter_output_annotations=filter_output_annotations,
596
- export_version=export_version,
597
- dataset_lock=dataset_lock,
598
- lock_timeout_sec=lock_timeout_sec,
599
- export_summary=export_summary
600
- )
601
-
602
- def upload(
603
- self,
604
- # what to upload
605
- local_path: str,
606
- local_annotations_path: str = None,
607
- # upload options
608
- remote_path: str = "/",
609
- remote_name: str = None,
610
- file_types: list = None,
611
- overwrite: bool = False,
612
- item_metadata: dict = None,
613
- output_entity=entities.Item,
614
- no_output: bool = False,
615
- export_version: str = entities.ExportVersion.V1,
616
- item_description: str = None,
617
- raise_on_error: bool = False,
618
- return_as_list: bool = False
619
- ):
620
- """
621
- Upload local file to dataset.
622
- Local filesystem will remain unchanged.
623
- If "*" at the end of local_path (e.g. "/images/*") items will be uploaded without the head directory.
624
-
625
- **Prerequisites**: Any user can upload items.
626
-
627
- :param str local_path: list of local file, local folder, BufferIO, numpy.ndarray or url to upload
628
- :param str local_annotations_path: path to dataloop format annotations json files.
629
- :param str remote_path: remote path to save.
630
- :param str remote_name: remote base name to save. when upload numpy.ndarray as local path, remote_name with .jpg or .png ext is mandatory
631
- :param list file_types: list of file type to upload. e.g ['.jpg', '.png']. default is all
632
- :param dict item_metadata: metadata dict to upload to item or ExportMetadata option to export metadata from annotation file
633
- :param bool overwrite: optional - default = False
634
- :param output_entity: output type
635
- :param bool no_output: do not return the items after upload
636
- :param str export_version: exported items will have original extension in filename, `V1` - no original extension in filenames
637
- :param str item_description: add a string description to the uploaded item
638
- :param bool raise_on_error: raise an exception if an error occurs
639
- :param bool return_as_list: return a list of items instead of a generator
640
-
641
- :return: Output (generator/single item)
642
- :rtype: generator or single item
643
-
644
- **Example**:
645
-
646
- .. code-block:: python
647
-
648
- dataset.items.upload(local_path='local_path',
649
- local_annotations_path='local_annotations_path',
650
- overwrite=True,
651
- item_metadata={'Hellow': 'Word'}
652
- )
653
- """
654
- # initiate and use uploader
655
- uploader = repositories.Uploader(items_repository=self, output_entity=output_entity, no_output=no_output)
656
- return uploader.upload(
657
- local_path=local_path,
658
- local_annotations_path=local_annotations_path,
659
- # upload options
660
- remote_path=remote_path,
661
- remote_name=remote_name,
662
- file_types=file_types,
663
- # config
664
- overwrite=overwrite,
665
- # metadata to upload with items
666
- item_metadata=item_metadata,
667
- export_version=export_version,
668
- item_description=item_description,
669
- raise_on_error=raise_on_error,
670
- return_as_list=return_as_list
671
- )
672
-
673
- @property
674
- def platform_url(self):
675
- return self._client_api._get_resource_url(
676
- "projects/{}/datasets/{}/items".format(self.dataset.project.id, self.dataset.id))
677
-
678
- def open_in_web(self, filepath=None, item_id=None, item=None):
679
- """
680
- Open the item in web platform
681
-
682
- **Prerequisites**: You must be in the role of an *owner* or *developer* or be an *annotation manager*/*annotator* with access to that item through task.
683
-
684
- :param str filepath: item file path
685
- :param str item_id: item id
686
- :param dtlpy.entities.item.Item item: item entity
687
-
688
- **Example**:
689
-
690
- .. code-block:: python
691
-
692
- dataset.items.open_in_web(item_id='item_id')
693
-
694
- """
695
- if filepath is not None:
696
- item = self.get(filepath=filepath)
697
- if item is not None:
698
- item.open_in_web()
699
- elif item_id is not None:
700
- self._client_api._open_in_web(url=self.platform_url + '/' + str(item_id))
701
- else:
702
- self._client_api._open_in_web(url=self.platform_url)
703
-
704
- def update_status(self,
705
- status: entities.ItemStatus,
706
- items=None,
707
- item_ids=None,
708
- filters=None,
709
- dataset=None,
710
- clear=False):
711
- """
712
- Update item status in task
713
-
714
- **Prerequisites**: You must be in the role of an *owner* or *developer* or *annotation manager* who has been assigned a task with the item.
715
-
716
- You must provide at least ONE of the following params: items, item_ids, filters.
717
-
718
- :param str status: ItemStatus.COMPLETED, ItemStatus.APPROVED, ItemStatus.DISCARDED
719
- :param list items: list of items
720
- :param list item_ids: list of items id
721
- :param dtlpy.entities.filters.Filters filters: Filters entity or a dictionary containing filters parameters
722
- :param dtlpy.entities.dataset.Dataset dataset: dataset object
723
- :param bool clear: to delete status
724
-
725
- **Example**:
726
-
727
- .. code-block:: python
728
-
729
- dataset.items.update_status(item_ids='item_id', status=dl.ItemStatus.COMPLETED)
730
-
731
- """
732
- if items is None and item_ids is None and filters is None:
733
- raise exceptions.PlatformException('400', 'Must provide either items, item_ids or filters')
734
-
735
- if self._dataset is None and dataset is None:
736
- raise exceptions.PlatformException('400', 'Please provide dataset')
737
- elif dataset is None:
738
- dataset = self._dataset
739
-
740
- if filters is not None:
741
- items = dataset.items.list(filters=filters)
742
- item_count = items.items_count
743
- elif items is not None:
744
- if isinstance(items, entities.PagedEntities):
745
- item_count = items.items_count
746
- else:
747
- if not isinstance(items, list):
748
- items = [items]
749
- item_count = len(items)
750
- items = [items]
751
- else:
752
- if not isinstance(item_ids, list):
753
- item_ids = [item_ids]
754
- item_count = len(item_ids)
755
- items = [[dataset.items.get(item_id=item_id, fetch=False) for item_id in item_ids]]
756
-
757
- pool = self._client_api.thread_pools(pool_name='item.status_update')
758
- jobs = [None for _ in range(item_count)]
759
- # call multiprocess wrapper to run service on each item in list
760
- for page in items:
761
- for i_item, item in enumerate(page):
762
- jobs[i_item] = pool.submit(item.update_status,
763
- **{'status': status,
764
- 'clear': clear})
765
-
766
- # get all results
767
- results = [j.result() for j in jobs]
768
- out_success = [r for r in results if r is True]
769
- out_errors = [r for r in results if r is False]
770
- if len(out_errors) == 0:
771
- logger.debug('Item/s updated successfully. {}/{}'.format(len(out_success), len(results)))
772
- else:
773
- logger.error(out_errors)
774
- logger.error('Item/s updated with {} errors'.format(len(out_errors)))
775
-
776
- def make_dir(self, directory, dataset: entities.Dataset = None) -> entities.Item:
777
- """
778
- Create a directory in a dataset.
779
-
780
- **Prerequisites**: All users.
781
-
782
- :param str directory: name of directory
783
- :param dtlpy.entities.dataset.Dataset dataset: dataset object
784
- :return: Item object
785
- :rtype: dtlpy.entities.item.Item
786
-
787
- **Example**:
788
-
789
- .. code-block:: python
790
-
791
- dataset.items.make_dir(directory='directory_name')
792
- """
793
- if self._dataset_id is None and dataset is None:
794
- raise exceptions.PlatformException('400', 'Please provide parameter dataset')
795
-
796
- payload = {
797
- 'type': 'dir',
798
- 'path': directory
799
- }
800
- headers = {'content-type': 'application/x-www-form-urlencoded'}
801
- success, response = self._client_api.gen_request(req_type="post",
802
- headers=headers,
803
- path="/datasets/{}/items".format(self._dataset_id),
804
- data=payload)
805
- if success:
806
- item = self.items_entity.from_json(client_api=self._client_api,
807
- _json=response.json(),
808
- dataset=self._dataset)
809
- else:
810
- raise exceptions.PlatformException(response)
811
-
812
- return item
813
-
814
- def move_items(self,
815
- destination: str,
816
- filters: entities.Filters = None,
817
- items=None,
818
- dataset: entities.Dataset = None
819
- ) -> bool:
820
- """
821
- Move items to another directory.
822
- If directory does not exist we will create it
823
-
824
- **Prerequisites**: You must be in the role of an *owner* or *developer*.
825
-
826
- :param str destination: destination directory
827
- :param dtlpy.entities.filters.Filters filters: optional - either this or items. Query of items to move
828
- :param items: optional - either this or filters. A list of items to move
829
- :param dtlpy.entities.dataset.Dataset dataset: dataset object
830
- :return: True if success
831
- :rtype: bool
832
-
833
- **Example**:
834
-
835
- .. code-block:: python
836
-
837
- dataset.items.move_items(destination='directory_name')
838
- """
839
- if filters is None and items is None:
840
- raise exceptions.PlatformException('400', 'Must provide either filters or items')
841
-
842
- dest_dir_filter = entities.Filters(resource=entities.FiltersResource.ITEM, field='type', values='dir')
843
- dest_dir_filter.recursive = False
844
- dest_dir_filter.add(field='filename', values=destination)
845
- dirs_page = self.list(filters=dest_dir_filter)
846
-
847
- if dirs_page.items_count == 0:
848
- directory = self.make_dir(directory=destination, dataset=dataset)
849
- elif dirs_page.items_count == 1:
850
- directory = dirs_page.items[0]
851
- else:
852
- raise exceptions.PlatformException('404', 'More than one directory by the name of: {}'.format(destination))
853
-
854
- if filters is not None:
855
- items = self.list(filters=filters)
856
- elif isinstance(items, list):
857
- items = [items]
858
- elif not isinstance(items, entities.PagedEntities):
859
- raise exceptions.PlatformException('400', 'items must be a list of items or a pages entity not {}'.format(
860
- type(items)))
861
-
862
- item_ids = list()
863
- for page in items:
864
- for item in page:
865
- item_ids.append(item.id)
866
-
867
- success, response = self._client_api.gen_request(req_type="put",
868
- path="/datasets/{}/items/{}".format(self._dataset_id,
869
- directory.id),
870
- json_req=item_ids)
871
- if not success:
872
- raise exceptions.PlatformException(response)
873
-
874
- return success
875
-
876
- def task_scores(self, item_id: str, task_id: str, page_offset: int = 0, page_size: int = 100):
877
- """
878
- Get item score
879
-
880
- **Prerequisites**: You must be able to read the task
881
-
882
- :param str item_id: item id
883
- :param str task_id: task id
884
- :param int page_offset: start page
885
- :param int page_size: page size
886
- :return: page of item scores
887
-
888
- **Example**:
889
-
890
- .. code-block:: python
891
-
892
- dataset.items.item_score(item_id='item_id', task_id='task_id')
893
-
894
- """
895
-
896
- if item_id is None:
897
- raise exceptions.PlatformException('400', 'Must provide item id')
898
-
899
- if task_id is None:
900
- raise exceptions.PlatformException('400', 'Must provide task id')
901
-
902
- success, response = self._client_api.gen_request(req_type="get",
903
- path="/scores/tasks/{}/items/{}?page={}&pageSize={}"
904
- .format(task_id, item_id, page_offset, page_size))
905
- if success:
906
- return response.json()
907
- else:
908
- raise exceptions.PlatformException(response)
909
-
1
+ import logging
2
+
3
+ from .. import entities, exceptions, repositories, miscellaneous, _api_reference
4
+ from ..services.api_client import ApiClient
5
+
6
+ logger = logging.getLogger(name='dtlpy')
7
+
8
+
9
+ class Items:
10
+ """
11
+ Items Repository
12
+
13
+ The Items class allows you to manage items in your datasets.
14
+ For information on actions related to items see https://developers.dataloop.ai/tutorials/data_management/upload_and_manage_items/chapter/
15
+ """
16
+
17
+ def __init__(self,
18
+ client_api: ApiClient,
19
+ datasets: repositories.Datasets = None,
20
+ dataset: entities.Dataset = None,
21
+ dataset_id=None,
22
+ items_entity=None,
23
+ project=None):
24
+ self._client_api = client_api
25
+ self._dataset = dataset
26
+ self._dataset_id = dataset_id
27
+ self._datasets = datasets
28
+ self._project = project
29
+ # set items entity to represent the item (Item, Codebase, Artifact etc...)
30
+ if items_entity is None:
31
+ self.items_entity = entities.Item
32
+ if self._dataset_id is None and self._dataset is not None:
33
+ self._dataset_id = self._dataset.id
34
+
35
+ ############
36
+ # entities #
37
+ ############
38
+ @property
39
+ def dataset(self) -> entities.Dataset:
40
+ if self._dataset is None:
41
+ if self._dataset_id is None:
42
+ raise exceptions.PlatformException(
43
+ error='400',
44
+ message='Cannot perform action WITHOUT Dataset entity in Items repository. Please set a dataset')
45
+ self._dataset = self.datasets.get(dataset_id=self._dataset_id, fetch=None)
46
+ assert isinstance(self._dataset, entities.Dataset)
47
+ return self._dataset
48
+
49
+ @dataset.setter
50
+ def dataset(self, dataset: entities.Dataset):
51
+ if not isinstance(dataset, entities.Dataset):
52
+ raise ValueError('Must input a valid Dataset entity')
53
+ self._dataset = dataset
54
+
55
+ @property
56
+ def project(self) -> entities.Project:
57
+ if self._project is None:
58
+ raise exceptions.PlatformException(
59
+ error='400',
60
+ message='Cannot perform action WITHOUT Project entity in Items repository. Please set a project')
61
+ assert isinstance(self._dataset, entities.Dataset)
62
+ return self._project
63
+
64
+ @project.setter
65
+ def project(self, project: entities.Project):
66
+ if not isinstance(project, entities.Project):
67
+ raise ValueError('Must input a valid Dataset entity')
68
+ self._project = project
69
+
70
+ ################
71
+ # repositories #
72
+ ################
73
+ @property
74
+ def datasets(self) -> repositories.Datasets:
75
+ if self._datasets is None:
76
+ self._datasets = repositories.Datasets(client_api=self._client_api)
77
+ assert isinstance(self._datasets, repositories.Datasets)
78
+ return self._datasets
79
+
80
+ ###########
81
+ # methods #
82
+ ###########
83
+
84
+ def set_items_entity(self, entity):
85
+ """
86
+ Set the item entity type to `Artifact <https://dataloop.ai/docs/auto-annotation-service?#uploading-model-weights-as-artifacts>`_, Item, or Codebase.
87
+
88
+ :param entities.Item, entities.Artifact, entities.Codebase entity: entity type [entities.Item, entities.Artifact, entities.Codebase]
89
+ """
90
+ if entity in [entities.Item, entities.Artifact, entities.Codebase]:
91
+ self.items_entity = entity
92
+ else:
93
+ raise exceptions.PlatformException(error="403",
94
+ message="Unable to set given entity. Entity give: {}".format(entity))
95
+
96
+ def get_all_items(self, filters: entities.Filters = None) -> [entities.Item]:
97
+ """
98
+ Get all items in dataset.
99
+
100
+ **Prerequisites**: You must be in the role of an *owner* or *developer*.
101
+
102
+ :param dtlpy.entities.filters.Filters filters: dl.Filters entity to filters items
103
+ :return: list of all items
104
+ :rtype: list
105
+
106
+ **Example**:
107
+
108
+ .. code-block:: python
109
+
110
+ dataset.items.get_all_items()
111
+
112
+ """
113
+ if filters is None:
114
+ filters = entities.Filters()
115
+ filters._user_query = 'false'
116
+ filters.add(field='type', values='file')
117
+ pages = self.list(filters=filters)
118
+ num_items = pages.items_count
119
+ items = [None for _ in range(num_items)]
120
+ for i_item, item in enumerate(pages.all()):
121
+ items[i_item] = item
122
+ items = [item for item in items if item is not None]
123
+ return items
124
+
125
+ def _build_entities_from_response(self, response_items) -> miscellaneous.List[entities.Item]:
126
+ pool = self._client_api.thread_pools(pool_name='entity.create')
127
+ jobs = [None for _ in range(len(response_items))]
128
+ # return triggers list
129
+ for i_item, item in enumerate(response_items):
130
+ jobs[i_item] = pool.submit(self.items_entity._protected_from_json,
131
+ **{'client_api': self._client_api,
132
+ '_json': item,
133
+ 'dataset': self.dataset})
134
+ # get all results
135
+ results = [j.result() for j in jobs]
136
+ # log errors
137
+ _ = [logger.warning(r[1]) for r in results if r[0] is False]
138
+ # return good jobs
139
+ items = miscellaneous.List([r[1] for r in results if r[0] is True])
140
+ return items
141
+
142
+ def _list(self, filters: entities.Filters):
143
+ """
144
+ Get dataset items list This is a browsing endpoint, for any given path item count will be returned,
145
+ user is expected to perform another request then for every folder item to actually get the its item list.
146
+
147
+ :param dtlpy.entities.filters.Filters filters: Filters entity or a dictionary containing filters parameters
148
+ :return: json response
149
+ """
150
+ # prepare request
151
+ success, response = self._client_api.gen_request(req_type="POST",
152
+ path="/datasets/{}/query".format(self.dataset.id),
153
+ json_req=filters.prepare(),
154
+ headers={'user_query': filters._user_query})
155
+ if not success:
156
+ raise exceptions.PlatformException(response)
157
+ return response.json()
158
+
159
+ @_api_reference.add(path='/datasets/{id}/query', method='post')
160
+ def list(self,
161
+ filters: entities.Filters = None,
162
+ page_offset: int = None,
163
+ page_size: int = None
164
+ ) -> entities.PagedEntities:
165
+ """
166
+ List items in a dataset.
167
+
168
+ **Prerequisites**: You must be in the role of an *owner* or *developer*.
169
+
170
+ :param dtlpy.entities.filters.Filters filters: Filters entity or a dictionary containing filters parameters
171
+ :param int page_offset: start page
172
+ :param int page_size: page size
173
+ :return: Pages object
174
+ :rtype: dtlpy.entities.paged_entities.PagedEntities
175
+
176
+ **Example**:
177
+
178
+ .. code-block:: python
179
+
180
+ dataset.items.list(page_offset=0, page_size=100)
181
+ """
182
+ # default filters
183
+ if filters is None:
184
+ filters = entities.Filters()
185
+ filters._user_query = 'false'
186
+ # assert type filters
187
+ elif not isinstance(filters, entities.Filters):
188
+ raise exceptions.PlatformException(error='400',
189
+ message='Unknown filters type: {!r}'.format(type(filters)))
190
+ if filters.resource != entities.FiltersResource.ITEM and filters.resource != entities.FiltersResource.ANNOTATION:
191
+ raise exceptions.PlatformException(
192
+ error='400',
193
+ message='Filters resource must to be FiltersResource.ITEM. Got: {!r}'.format(filters.resource))
194
+
195
+ # page size
196
+ if page_size is not None:
197
+ filters.page_size = page_size
198
+
199
+ # page offset
200
+ if page_offset is not None:
201
+ filters.page = page_offset
202
+
203
+ if filters.resource == entities.FiltersResource.ITEM:
204
+ items_repository = self
205
+ else:
206
+ items_repository = repositories.Annotations(client_api=self._client_api,
207
+ dataset=self._dataset)
208
+
209
+ paged = entities.PagedEntities(items_repository=items_repository,
210
+ filters=filters,
211
+ page_offset=filters.page,
212
+ page_size=filters.page_size,
213
+ client_api=self._client_api)
214
+ paged.get_page()
215
+ return paged
216
+
217
+ @_api_reference.add(path='/items/{id}', method='get')
218
+ def get(self,
219
+ filepath: str = None,
220
+ item_id: str = None,
221
+ fetch: bool = None,
222
+ is_dir: bool = False
223
+ ) -> entities.Item:
224
+ """
225
+ Get Item object
226
+
227
+ **Prerequisites**: You must be in the role of an *owner* or *developer*.
228
+
229
+ :param str filepath: optional - search by remote path
230
+ :param str item_id: optional - search by id
231
+ :param bool fetch: optional - fetch entity from platform, default taken from cookie
232
+ :param bool is_dir: True if you want to get an item from dir type
233
+ :return: Item object
234
+ :rtype: dtlpy.entities.item.Item
235
+
236
+ **Example**:
237
+
238
+ .. code-block:: python
239
+
240
+ dataset.items.get(item_id='item_id')
241
+ """
242
+ if fetch is None:
243
+ fetch = self._client_api.fetch_entities
244
+
245
+ if fetch:
246
+ if item_id is not None:
247
+ success, response = self._client_api.gen_request(req_type="get",
248
+ path="/items/{}".format(item_id))
249
+ if success:
250
+ item = self.items_entity.from_json(client_api=self._client_api,
251
+ _json=response.json(),
252
+ dataset=self._dataset,
253
+ project=self._project)
254
+ # verify input filepath is same as the given id
255
+ if filepath is not None and item.filename != filepath:
256
+ logger.warning(
257
+ "Mismatch found in items.get: filepath is different then item.filename: "
258
+ "{!r} != {!r}".format(
259
+ filepath,
260
+ item.filename))
261
+ else:
262
+ raise exceptions.PlatformException(response)
263
+ elif filepath is not None:
264
+ filters = entities.Filters()
265
+ filters.pop(field='hidden')
266
+ if is_dir:
267
+ filters.add(field='type', values='dir')
268
+ filters.recursive = False
269
+ filters.add(field='filename', values=filepath)
270
+ paged_entity = self.list(filters=filters)
271
+ if len(paged_entity.items) == 0:
272
+ raise exceptions.PlatformException(error='404',
273
+ message='Item not found. filepath= "{}"'.format(filepath))
274
+ elif len(paged_entity.items) > 1:
275
+ raise exceptions.PlatformException(
276
+ error='404',
277
+ message='More than one item found. Please "get" by id. filepath: "{}"'.format(filepath))
278
+ else:
279
+ item = paged_entity.items[0]
280
+ else:
281
+ raise exceptions.PlatformException(error="400",
282
+ message='Must choose by at least one. "filename" or "item_id"')
283
+ else:
284
+ item = entities.Item.from_json(_json={'id': item_id,
285
+ 'filename': filepath},
286
+ client_api=self._client_api,
287
+ dataset=self._dataset,
288
+ is_fetched=False,
289
+ project=self._project)
290
+ assert isinstance(item, entities.Item)
291
+ return item
292
+
293
+ @_api_reference.add(path='/items/{id}/clone', method='post')
294
+ def clone(self,
295
+ item_id: str,
296
+ dst_dataset_id: str,
297
+ remote_filepath: str = None,
298
+ metadata: dict = None,
299
+ with_annotations: bool = True,
300
+ with_metadata: bool = True,
301
+ with_task_annotations_status: bool = False,
302
+ allow_many: bool = False,
303
+ wait: bool = True):
304
+ """
305
+ Clone item. Read more about cloning datatsets and items in our `documentation <https://dataloop.ai/docs/clone-merge-dataset#cloned-dataset>`_ and `SDK documentation <https://developers.dataloop.ai/tutorials/data_management/data_versioning/chapter/>`_.
306
+
307
+ **Prerequisites**: You must be in the role of an *owner* or *developer*.
308
+
309
+ :param str item_id: item to clone
310
+ :param str dst_dataset_id: destination dataset id
311
+ :param str remote_filepath: complete filepath
312
+ :param dict metadata: new metadata to add
313
+ :param bool with_annotations: clone annotations
314
+ :param bool with_metadata: clone metadata
315
+ :param bool with_task_annotations_status: clone task annotations status
316
+ :param bool allow_many: `bool` if True, using multiple clones in single dataset is allowed, (default=False)
317
+ :param bool wait: wait for the command to finish
318
+ :return: Item object
319
+ :rtype: dtlpy.entities.item.Item
320
+
321
+ **Example**:
322
+
323
+ .. code-block:: python
324
+
325
+ dataset.items.clone(item_id='item_id',
326
+ dst_dataset_id='dist_dataset_id',
327
+ with_metadata=True,
328
+ with_task_annotations_status=False,
329
+ with_annotations=False)
330
+ """
331
+ if metadata is None:
332
+ metadata = dict()
333
+ payload = {"targetDatasetId": dst_dataset_id,
334
+ "remoteFileName": remote_filepath,
335
+ "metadata": metadata,
336
+ "cloneDatasetParams": {
337
+ "withItemsAnnotations": with_annotations,
338
+ "withMetadata": with_metadata,
339
+ "withTaskAnnotationsStatus": with_task_annotations_status},
340
+ "allowMany": allow_many
341
+ }
342
+ success, response = self._client_api.gen_request(req_type="post",
343
+ path="/items/{}/clone".format(item_id),
344
+ json_req=payload)
345
+ # check response
346
+ if not success:
347
+ raise exceptions.PlatformException(response)
348
+
349
+ command = entities.Command.from_json(_json=response.json(),
350
+ client_api=self._client_api)
351
+ if not wait:
352
+ return command
353
+ command = command.wait()
354
+
355
+ if 'returnedModelId' not in command.spec:
356
+ raise exceptions.PlatformException(error='400',
357
+ message="returnedModelId key is missing in command response: {}"
358
+ .format(response))
359
+ cloned_item = self.get(item_id=command.spec['returnedModelId'][0])
360
+ return cloned_item
361
+
362
+ @_api_reference.add(path='/items/{id}', method='delete')
363
+ def delete(self,
364
+ filename: str = None,
365
+ item_id: str = None,
366
+ filters: entities.Filters = None):
367
+ """
368
+ Delete item from platform.
369
+
370
+ **Prerequisites**: You must be in the role of an *owner* or *developer*.
371
+
372
+ You must provide at least ONE of the following params: item id, filename, filters.
373
+
374
+ :param str filename: optional - search item by remote path
375
+ :param str item_id: optional - search item by id
376
+ :param dtlpy.entities.filters.Filters filters: optional - delete items by filter
377
+ :return: True if success
378
+ :rtype: bool
379
+
380
+ **Example**:
381
+
382
+ .. code-block:: python
383
+
384
+ dataset.items.delete(item_id='item_id')
385
+ """
386
+ if item_id is not None:
387
+ success, response = self._client_api.gen_request(req_type="delete",
388
+ path="/items/{}".format(item_id),
389
+ )
390
+ elif filename is not None:
391
+ if not filename.startswith("/"):
392
+ filename = "/" + filename
393
+ items = self.get(filepath=filename)
394
+ if not isinstance(items, list):
395
+ items = [items]
396
+ if len(items) == 0:
397
+ raise exceptions.PlatformException("404", "Item not found")
398
+ elif len(items) > 1:
399
+ raise exceptions.PlatformException(error="404", message="More the 1 item exist by the name provided")
400
+ else:
401
+ item_id = items[0].id
402
+ success, response = self._client_api.gen_request(req_type="delete",
403
+ path="/items/{}".format(item_id))
404
+ elif filters is not None:
405
+ # prepare request
406
+ success, response = self._client_api.gen_request(req_type="POST",
407
+ path="/datasets/{}/query".format(self.dataset.id),
408
+ json_req=filters.prepare(operation='delete'))
409
+ else:
410
+ raise exceptions.PlatformException("400", "Must provide item id, filename or filters")
411
+
412
+ # check response
413
+ if success:
414
+ logger.debug("Item/s deleted successfully")
415
+ return success
416
+ else:
417
+ raise exceptions.PlatformException(response)
418
+
419
+ @_api_reference.add(path='/items/{id}', method='patch')
420
+ def update(self,
421
+ item: entities.Item = None,
422
+ filters: entities.Filters = None,
423
+ update_values=None,
424
+ system_update_values=None,
425
+ system_metadata: bool = False):
426
+ """
427
+ Update item metadata.
428
+
429
+ **Prerequisites**: You must be in the role of an *owner* or *developer*.
430
+
431
+ You must provide at least ONE of the following params: update_values, system_update_values.
432
+
433
+ :param dtlpy.entities.item.Item item: Item object
434
+ :param dtlpy.entities.filters.Filters filters: optional update filtered items by given filter
435
+ :param update_values: optional field to be updated and new values
436
+ :param system_update_values: values in system metadata to be updated
437
+ :param bool system_metadata: True, if you want to update the metadata system
438
+ :return: Item object
439
+ :rtype: dtlpy.entities.item.Item
440
+
441
+ **Example**:
442
+
443
+ .. code-block:: python
444
+
445
+ dataset.items.update(item='item_entity')
446
+ """
447
+ ref = filters is not None and (filters._ref_task or filters._ref_assignment)
448
+
449
+ if system_update_values and not system_metadata:
450
+ logger.warning('system metadata will not be updated because param system_metadata is False')
451
+
452
+ # check params
453
+ if item is None and filters is None:
454
+ raise exceptions.PlatformException('400', 'must provide either item or filters')
455
+
456
+ value_to_update = update_values or system_update_values
457
+
458
+ if item is None and not ref and not value_to_update:
459
+ raise exceptions.PlatformException('400',
460
+ 'Must provide update_values or system_update_values')
461
+
462
+ if item is not None and value_to_update:
463
+ raise exceptions.PlatformException('400',
464
+ 'Cannot provide "update_values" or "system_update_values" with a specific "item" for an individual update. '
465
+ 'These parameters are intended only for bulk updates using filters.')
466
+
467
+ # update item
468
+ if item is not None:
469
+ json_req = miscellaneous.DictDiffer.diff(origin=item._platform_dict,
470
+ modified=item.to_json())
471
+ if not json_req:
472
+ return item
473
+ url_path = "/items/{}".format(item.id)
474
+ if system_metadata:
475
+ url_path += "?system=true"
476
+ success, response = self._client_api.gen_request(req_type="patch",
477
+ path=url_path,
478
+ json_req=json_req)
479
+ if success:
480
+ logger.debug("Item was updated successfully. Item id: {}".format(item.id))
481
+ return self.items_entity.from_json(client_api=self._client_api,
482
+ _json=response.json(),
483
+ dataset=self._dataset)
484
+ else:
485
+ logger.error("Error while updating item")
486
+ raise exceptions.PlatformException(response)
487
+ # update by filters
488
+ else:
489
+ # prepare request
490
+ prepared_filter = filters.prepare(operation='update',
491
+ system_update=system_update_values,
492
+ system_metadata=system_metadata,
493
+ update=update_values)
494
+ success, response = self._client_api.gen_request(req_type="POST",
495
+ path="/datasets/{}/query".format(self.dataset.id),
496
+ json_req=prepared_filter)
497
+ if not success:
498
+ raise exceptions.PlatformException(response)
499
+ else:
500
+ logger.debug("Items were updated successfully.")
501
+ return response.json()
502
+
503
+ def download(
504
+ self,
505
+ filters: entities.Filters = None,
506
+ items=None,
507
+ # download options
508
+ local_path: str = None,
509
+ file_types: list = None,
510
+ save_locally: bool = True,
511
+ to_array: bool = False,
512
+ annotation_options: entities.ViewAnnotationOptions = None,
513
+ annotation_filters: entities.Filters = None,
514
+ overwrite: bool = False,
515
+ to_items_folder: bool = True,
516
+ thickness: int = 1,
517
+ with_text: bool = False,
518
+ without_relative_path=None,
519
+ avoid_unnecessary_annotation_download: bool = False,
520
+ include_annotations_in_output: bool = True,
521
+ export_png_files: bool = False,
522
+ filter_output_annotations: bool = False,
523
+ alpha: float = 1,
524
+ export_version=entities.ExportVersion.V1,
525
+ dataset_lock: bool = False,
526
+ lock_timeout_sec: int = None,
527
+ export_summary: bool = False,
528
+ raise_on_error: bool = False,
529
+ ):
530
+ """
531
+ Download dataset items by filters.
532
+
533
+ Filters the dataset for items and saves them locally.
534
+
535
+ Optional -- download annotation, mask, instance, and image mask of the item.
536
+
537
+ **Prerequisites**: You must be in the role of an *owner* or *developer*.
538
+
539
+ :param dtlpy.entities.filters.Filters filters: Filters entity or a dictionary containing filters parameters
540
+ :param List[dtlpy.entities.item.Item] or dtlpy.entities.item.Item items: download Item entity or item_id (or a list of item)
541
+ :param str local_path: local folder or filename to save to.
542
+ :param list file_types: a list of file type to download. e.g ['video/webm', 'video/mp4', 'image/jpeg', 'image/png']
543
+ :param bool save_locally: bool. save to disk or return a buffer
544
+ :param bool to_array: returns Ndarray when True and local_path = False
545
+ :param list annotation_options: download annotations options: list(dl.ViewAnnotationOptions)
546
+ :param dtlpy.entities.filters.Filters annotation_filters: Filters entity to filter annotations for download
547
+ :param bool overwrite: optional - default = False
548
+ :param bool dataset_lock: optional - default = False
549
+ :param bool export_summary: optional - default = False
550
+ :param int lock_timeout_sec: optional
551
+ :param bool to_items_folder: Create 'items' folder and download items to it
552
+ :param int thickness: optional - line thickness, if -1 annotation will be filled, default =1
553
+ :param bool with_text: optional - add text to annotations, default = False
554
+ :param bool without_relative_path: bool - download items without the relative path from platform
555
+ :param bool avoid_unnecessary_annotation_download: default - False
556
+ :param bool include_annotations_in_output: default - False , if export should contain annotations
557
+ :param bool export_png_files: default - if True, semantic annotations should be exported as png files
558
+ :param bool filter_output_annotations: default - False, given an export by filter - determine if to filter out annotations
559
+ :param float alpha: opacity value [0 1], default 1
560
+ :param str export_version: exported items will have original extension in filename, `V1` - no original extension in filenames
561
+ :param bool raise_on_error: raise an exception if an error occurs
562
+ :return: generator of local_path per each downloaded item
563
+ :rtype: generator or single item
564
+
565
+ **Example**:
566
+
567
+ .. code-block:: python
568
+
569
+ dataset.items.download(local_path='local_path',
570
+ annotation_options=dl.ViewAnnotationOptions,
571
+ overwrite=False,
572
+ thickness=1,
573
+ with_text=False,
574
+ alpha=1,
575
+ save_locally=True
576
+ )
577
+ """
578
+ downloader = repositories.Downloader(self)
579
+ return downloader.download(
580
+ filters=filters,
581
+ items=items,
582
+ local_path=local_path,
583
+ file_types=file_types,
584
+ save_locally=save_locally,
585
+ to_array=to_array,
586
+ annotation_options=annotation_options,
587
+ annotation_filters=annotation_filters,
588
+ overwrite=overwrite,
589
+ to_items_folder=to_items_folder,
590
+ thickness=thickness,
591
+ alpha=alpha,
592
+ with_text=with_text,
593
+ without_relative_path=without_relative_path,
594
+ avoid_unnecessary_annotation_download=avoid_unnecessary_annotation_download,
595
+ include_annotations_in_output=include_annotations_in_output,
596
+ export_png_files=export_png_files,
597
+ filter_output_annotations=filter_output_annotations,
598
+ export_version=export_version,
599
+ dataset_lock=dataset_lock,
600
+ lock_timeout_sec=lock_timeout_sec,
601
+ export_summary=export_summary,
602
+ raise_on_error=raise_on_error
603
+ )
604
+
605
+ def upload(
606
+ self,
607
+ # what to upload
608
+ local_path: str,
609
+ local_annotations_path: str = None,
610
+ # upload options
611
+ remote_path: str = "/",
612
+ remote_name: str = None,
613
+ file_types: list = None,
614
+ overwrite: bool = False,
615
+ item_metadata: dict = None,
616
+ output_entity=entities.Item,
617
+ no_output: bool = False,
618
+ export_version: str = entities.ExportVersion.V1,
619
+ item_description: str = None,
620
+ raise_on_error: bool = False,
621
+ return_as_list: bool = False
622
+ ):
623
+ """
624
+ Upload local file to dataset.
625
+ Local filesystem will remain unchanged.
626
+ If "*" at the end of local_path (e.g. "/images/*") items will be uploaded without the head directory.
627
+
628
+ **Prerequisites**: Any user can upload items.
629
+
630
+ :param str local_path: list of local file, local folder, BufferIO, numpy.ndarray or url to upload
631
+ :param str local_annotations_path: path to dataloop format annotations json files.
632
+ :param str remote_path: remote path to save.
633
+ :param str remote_name: remote base name to save. when upload numpy.ndarray as local path, remote_name with .jpg or .png ext is mandatory
634
+ :param list file_types: list of file type to upload. e.g ['.jpg', '.png']. default is all
635
+ :param dict item_metadata: metadata dict to upload to item or ExportMetadata option to export metadata from annotation file
636
+ :param bool overwrite: optional - default = False
637
+ :param output_entity: output type
638
+ :param bool no_output: do not return the items after upload
639
+ :param str export_version: exported items will have original extension in filename, `V1` - no original extension in filenames
640
+ :param str item_description: add a string description to the uploaded item
641
+ :param bool raise_on_error: raise an exception if an error occurs
642
+ :param bool return_as_list: return a list of items instead of a generator
643
+
644
+ :return: Output (generator/single item)
645
+ :rtype: generator or single item
646
+
647
+ **Example**:
648
+
649
+ .. code-block:: python
650
+
651
+ dataset.items.upload(local_path='local_path',
652
+ local_annotations_path='local_annotations_path',
653
+ overwrite=True,
654
+ item_metadata={'Hellow': 'Word'}
655
+ )
656
+ """
657
+ # initiate and use uploader
658
+ uploader = repositories.Uploader(items_repository=self, output_entity=output_entity, no_output=no_output)
659
+ return uploader.upload(
660
+ local_path=local_path,
661
+ local_annotations_path=local_annotations_path,
662
+ # upload options
663
+ remote_path=remote_path,
664
+ remote_name=remote_name,
665
+ file_types=file_types,
666
+ # config
667
+ overwrite=overwrite,
668
+ # metadata to upload with items
669
+ item_metadata=item_metadata,
670
+ export_version=export_version,
671
+ item_description=item_description,
672
+ raise_on_error=raise_on_error,
673
+ return_as_list=return_as_list
674
+ )
675
+
676
+ @property
677
+ def platform_url(self):
678
+ return self._client_api._get_resource_url(
679
+ "projects/{}/datasets/{}/items".format(self.dataset.project.id, self.dataset.id))
680
+
681
+ def open_in_web(self, filepath=None, item_id=None, item=None):
682
+ """
683
+ Open the item in web platform
684
+
685
+ **Prerequisites**: You must be in the role of an *owner* or *developer* or be an *annotation manager*/*annotator* with access to that item through task.
686
+
687
+ :param str filepath: item file path
688
+ :param str item_id: item id
689
+ :param dtlpy.entities.item.Item item: item entity
690
+
691
+ **Example**:
692
+
693
+ .. code-block:: python
694
+
695
+ dataset.items.open_in_web(item_id='item_id')
696
+
697
+ """
698
+ if filepath is not None:
699
+ item = self.get(filepath=filepath)
700
+ if item is not None:
701
+ item.open_in_web()
702
+ elif item_id is not None:
703
+ self._client_api._open_in_web(url=self.platform_url + '/' + str(item_id))
704
+ else:
705
+ self._client_api._open_in_web(url=self.platform_url)
706
+
707
+ def update_status(self,
708
+ status: entities.ItemStatus,
709
+ items=None,
710
+ item_ids=None,
711
+ filters=None,
712
+ dataset=None,
713
+ clear=False):
714
+ """
715
+ Update item status in task
716
+
717
+ **Prerequisites**: You must be in the role of an *owner* or *developer* or *annotation manager* who has been assigned a task with the item.
718
+
719
+ You must provide at least ONE of the following params: items, item_ids, filters.
720
+
721
+ :param str status: ItemStatus.COMPLETED, ItemStatus.APPROVED, ItemStatus.DISCARDED
722
+ :param list items: list of items
723
+ :param list item_ids: list of items id
724
+ :param dtlpy.entities.filters.Filters filters: Filters entity or a dictionary containing filters parameters
725
+ :param dtlpy.entities.dataset.Dataset dataset: dataset object
726
+ :param bool clear: to delete status
727
+
728
+ **Example**:
729
+
730
+ .. code-block:: python
731
+
732
+ dataset.items.update_status(item_ids='item_id', status=dl.ItemStatus.COMPLETED)
733
+
734
+ """
735
+ if items is None and item_ids is None and filters is None:
736
+ raise exceptions.PlatformException('400', 'Must provide either items, item_ids or filters')
737
+
738
+ if self._dataset is None and dataset is None:
739
+ raise exceptions.PlatformException('400', 'Please provide dataset')
740
+ elif dataset is None:
741
+ dataset = self._dataset
742
+
743
+ if filters is not None:
744
+ items = dataset.items.list(filters=filters)
745
+ item_count = items.items_count
746
+ elif items is not None:
747
+ if isinstance(items, entities.PagedEntities):
748
+ item_count = items.items_count
749
+ else:
750
+ if not isinstance(items, list):
751
+ items = [items]
752
+ item_count = len(items)
753
+ items = [items]
754
+ else:
755
+ if not isinstance(item_ids, list):
756
+ item_ids = [item_ids]
757
+ item_count = len(item_ids)
758
+ items = [[dataset.items.get(item_id=item_id, fetch=False) for item_id in item_ids]]
759
+
760
+ pool = self._client_api.thread_pools(pool_name='item.status_update')
761
+ jobs = [None for _ in range(item_count)]
762
+ # call multiprocess wrapper to run service on each item in list
763
+ for page in items:
764
+ for i_item, item in enumerate(page):
765
+ jobs[i_item] = pool.submit(item.update_status,
766
+ **{'status': status,
767
+ 'clear': clear})
768
+
769
+ # get all results
770
+ results = [j.result() for j in jobs]
771
+ out_success = [r for r in results if r is True]
772
+ out_errors = [r for r in results if r is False]
773
+ if len(out_errors) == 0:
774
+ logger.debug('Item/s updated successfully. {}/{}'.format(len(out_success), len(results)))
775
+ else:
776
+ logger.error(out_errors)
777
+ logger.error('Item/s updated with {} errors'.format(len(out_errors)))
778
+
779
+ def make_dir(self, directory, dataset: entities.Dataset = None) -> entities.Item:
780
+ """
781
+ Create a directory in a dataset.
782
+
783
+ **Prerequisites**: All users.
784
+
785
+ :param str directory: name of directory
786
+ :param dtlpy.entities.dataset.Dataset dataset: dataset object
787
+ :return: Item object
788
+ :rtype: dtlpy.entities.item.Item
789
+
790
+ **Example**:
791
+
792
+ .. code-block:: python
793
+
794
+ dataset.items.make_dir(directory='directory_name')
795
+ """
796
+ if self._dataset_id is None and dataset is None:
797
+ raise exceptions.PlatformException('400', 'Please provide parameter dataset')
798
+
799
+ payload = {
800
+ 'type': 'dir',
801
+ 'path': directory
802
+ }
803
+ headers = {'content-type': 'application/x-www-form-urlencoded'}
804
+ success, response = self._client_api.gen_request(req_type="post",
805
+ headers=headers,
806
+ path="/datasets/{}/items".format(self._dataset_id),
807
+ data=payload)
808
+ if success:
809
+ item = self.items_entity.from_json(client_api=self._client_api,
810
+ _json=response.json(),
811
+ dataset=self._dataset)
812
+ else:
813
+ raise exceptions.PlatformException(response)
814
+
815
+ return item
816
+
817
+ def move_items(self,
818
+ destination: str,
819
+ filters: entities.Filters = None,
820
+ items=None,
821
+ dataset: entities.Dataset = None
822
+ ) -> bool:
823
+ """
824
+ Move items to another directory.
825
+ If directory does not exist we will create it
826
+
827
+ **Prerequisites**: You must be in the role of an *owner* or *developer*.
828
+
829
+ :param str destination: destination directory
830
+ :param dtlpy.entities.filters.Filters filters: optional - either this or items. Query of items to move
831
+ :param items: optional - either this or filters. A list of items to move
832
+ :param dtlpy.entities.dataset.Dataset dataset: dataset object
833
+ :return: True if success
834
+ :rtype: bool
835
+
836
+ **Example**:
837
+
838
+ .. code-block:: python
839
+
840
+ dataset.items.move_items(destination='directory_name')
841
+ """
842
+ if filters is None and items is None:
843
+ raise exceptions.PlatformException('400', 'Must provide either filters or items')
844
+
845
+ dest_dir_filter = entities.Filters(resource=entities.FiltersResource.ITEM, field='type', values='dir')
846
+ dest_dir_filter.recursive = False
847
+ dest_dir_filter.add(field='filename', values=destination)
848
+ dirs_page = self.list(filters=dest_dir_filter)
849
+
850
+ if dirs_page.items_count == 0:
851
+ directory = self.make_dir(directory=destination, dataset=dataset)
852
+ elif dirs_page.items_count == 1:
853
+ directory = dirs_page.items[0]
854
+ else:
855
+ raise exceptions.PlatformException('404', 'More than one directory by the name of: {}'.format(destination))
856
+
857
+ if filters is not None:
858
+ items = self.list(filters=filters)
859
+ elif isinstance(items, list):
860
+ items = [items]
861
+ elif not isinstance(items, entities.PagedEntities):
862
+ raise exceptions.PlatformException('400', 'items must be a list of items or a pages entity not {}'.format(
863
+ type(items)))
864
+
865
+ item_ids = list()
866
+ for page in items:
867
+ for item in page:
868
+ item_ids.append(item.id)
869
+
870
+ success, response = self._client_api.gen_request(req_type="put",
871
+ path="/datasets/{}/items/{}".format(self._dataset_id,
872
+ directory.id),
873
+ json_req=item_ids)
874
+ if not success:
875
+ raise exceptions.PlatformException(response)
876
+
877
+ return success
878
+
879
+ def task_scores(self, item_id: str, task_id: str, page_offset: int = 0, page_size: int = 100):
880
+ """
881
+ Get item score
882
+
883
+ **Prerequisites**: You must be able to read the task
884
+
885
+ :param str item_id: item id
886
+ :param str task_id: task id
887
+ :param int page_offset: start page
888
+ :param int page_size: page size
889
+ :return: page of item scores
890
+
891
+ **Example**:
892
+
893
+ .. code-block:: python
894
+
895
+ dataset.items.item_score(item_id='item_id', task_id='task_id')
896
+
897
+ """
898
+
899
+ if item_id is None:
900
+ raise exceptions.PlatformException('400', 'Must provide item id')
901
+
902
+ if task_id is None:
903
+ raise exceptions.PlatformException('400', 'Must provide task id')
904
+
905
+ success, response = self._client_api.gen_request(req_type="get",
906
+ path="/scores/tasks/{}/items/{}?page={}&pageSize={}"
907
+ .format(task_id, item_id, page_offset, page_size))
908
+ if success:
909
+ return response.json()
910
+ else:
911
+ raise exceptions.PlatformException(response)
912
+