dtlpy 1.115.44__py3-none-any.whl → 1.117.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dtlpy/__init__.py +491 -491
- dtlpy/__version__.py +1 -1
- dtlpy/assets/__init__.py +26 -26
- dtlpy/assets/code_server/config.yaml +2 -2
- dtlpy/assets/code_server/installation.sh +24 -24
- dtlpy/assets/code_server/launch.json +13 -13
- dtlpy/assets/code_server/settings.json +2 -2
- dtlpy/assets/main.py +53 -53
- dtlpy/assets/main_partial.py +18 -18
- dtlpy/assets/mock.json +11 -11
- dtlpy/assets/model_adapter.py +83 -83
- dtlpy/assets/package.json +61 -61
- dtlpy/assets/package_catalog.json +29 -29
- dtlpy/assets/package_gitignore +307 -307
- dtlpy/assets/service_runners/__init__.py +33 -33
- dtlpy/assets/service_runners/converter.py +96 -96
- dtlpy/assets/service_runners/multi_method.py +49 -49
- dtlpy/assets/service_runners/multi_method_annotation.py +54 -54
- dtlpy/assets/service_runners/multi_method_dataset.py +55 -55
- dtlpy/assets/service_runners/multi_method_item.py +52 -52
- dtlpy/assets/service_runners/multi_method_json.py +52 -52
- dtlpy/assets/service_runners/single_method.py +37 -37
- dtlpy/assets/service_runners/single_method_annotation.py +43 -43
- dtlpy/assets/service_runners/single_method_dataset.py +43 -43
- dtlpy/assets/service_runners/single_method_item.py +41 -41
- dtlpy/assets/service_runners/single_method_json.py +42 -42
- dtlpy/assets/service_runners/single_method_multi_input.py +45 -45
- dtlpy/assets/voc_annotation_template.xml +23 -23
- dtlpy/caches/base_cache.py +32 -32
- dtlpy/caches/cache.py +473 -473
- dtlpy/caches/dl_cache.py +201 -201
- dtlpy/caches/filesystem_cache.py +89 -89
- dtlpy/caches/redis_cache.py +84 -84
- dtlpy/dlp/__init__.py +20 -20
- dtlpy/dlp/cli_utilities.py +367 -367
- dtlpy/dlp/command_executor.py +764 -764
- dtlpy/dlp/dlp +1 -1
- dtlpy/dlp/dlp.bat +1 -1
- dtlpy/dlp/dlp.py +128 -128
- dtlpy/dlp/parser.py +651 -651
- dtlpy/entities/__init__.py +83 -83
- dtlpy/entities/analytic.py +347 -347
- dtlpy/entities/annotation.py +1879 -1879
- dtlpy/entities/annotation_collection.py +699 -699
- dtlpy/entities/annotation_definitions/__init__.py +20 -20
- dtlpy/entities/annotation_definitions/base_annotation_definition.py +100 -100
- dtlpy/entities/annotation_definitions/box.py +195 -195
- dtlpy/entities/annotation_definitions/classification.py +67 -67
- dtlpy/entities/annotation_definitions/comparison.py +72 -72
- dtlpy/entities/annotation_definitions/cube.py +204 -204
- dtlpy/entities/annotation_definitions/cube_3d.py +149 -149
- dtlpy/entities/annotation_definitions/description.py +32 -32
- dtlpy/entities/annotation_definitions/ellipse.py +124 -124
- dtlpy/entities/annotation_definitions/free_text.py +62 -62
- dtlpy/entities/annotation_definitions/gis.py +69 -69
- dtlpy/entities/annotation_definitions/note.py +139 -139
- dtlpy/entities/annotation_definitions/point.py +117 -117
- dtlpy/entities/annotation_definitions/polygon.py +182 -182
- dtlpy/entities/annotation_definitions/polyline.py +111 -111
- dtlpy/entities/annotation_definitions/pose.py +92 -92
- dtlpy/entities/annotation_definitions/ref_image.py +86 -86
- dtlpy/entities/annotation_definitions/segmentation.py +240 -240
- dtlpy/entities/annotation_definitions/subtitle.py +34 -34
- dtlpy/entities/annotation_definitions/text.py +85 -85
- dtlpy/entities/annotation_definitions/undefined_annotation.py +74 -74
- dtlpy/entities/app.py +220 -220
- dtlpy/entities/app_module.py +107 -107
- dtlpy/entities/artifact.py +174 -174
- dtlpy/entities/assignment.py +399 -399
- dtlpy/entities/base_entity.py +214 -214
- dtlpy/entities/bot.py +113 -113
- dtlpy/entities/codebase.py +292 -292
- dtlpy/entities/collection.py +38 -38
- dtlpy/entities/command.py +169 -169
- dtlpy/entities/compute.py +449 -449
- dtlpy/entities/dataset.py +1299 -1299
- dtlpy/entities/directory_tree.py +44 -44
- dtlpy/entities/dpk.py +470 -470
- dtlpy/entities/driver.py +235 -235
- dtlpy/entities/execution.py +397 -397
- dtlpy/entities/feature.py +124 -124
- dtlpy/entities/feature_set.py +152 -145
- dtlpy/entities/filters.py +798 -798
- dtlpy/entities/gis_item.py +107 -107
- dtlpy/entities/integration.py +184 -184
- dtlpy/entities/item.py +975 -959
- dtlpy/entities/label.py +123 -123
- dtlpy/entities/links.py +85 -85
- dtlpy/entities/message.py +175 -175
- dtlpy/entities/model.py +684 -684
- dtlpy/entities/node.py +1005 -1005
- dtlpy/entities/ontology.py +810 -803
- dtlpy/entities/organization.py +287 -287
- dtlpy/entities/package.py +657 -657
- dtlpy/entities/package_defaults.py +5 -5
- dtlpy/entities/package_function.py +185 -185
- dtlpy/entities/package_module.py +113 -113
- dtlpy/entities/package_slot.py +118 -118
- dtlpy/entities/paged_entities.py +299 -299
- dtlpy/entities/pipeline.py +624 -624
- dtlpy/entities/pipeline_execution.py +279 -279
- dtlpy/entities/project.py +394 -394
- dtlpy/entities/prompt_item.py +505 -505
- dtlpy/entities/recipe.py +301 -301
- dtlpy/entities/reflect_dict.py +102 -102
- dtlpy/entities/resource_execution.py +138 -138
- dtlpy/entities/service.py +974 -963
- dtlpy/entities/service_driver.py +117 -117
- dtlpy/entities/setting.py +294 -294
- dtlpy/entities/task.py +495 -495
- dtlpy/entities/time_series.py +143 -143
- dtlpy/entities/trigger.py +426 -426
- dtlpy/entities/user.py +118 -118
- dtlpy/entities/webhook.py +124 -124
- dtlpy/examples/__init__.py +19 -19
- dtlpy/examples/add_labels.py +135 -135
- dtlpy/examples/add_metadata_to_item.py +21 -21
- dtlpy/examples/annotate_items_using_model.py +65 -65
- dtlpy/examples/annotate_video_using_model_and_tracker.py +75 -75
- dtlpy/examples/annotations_convert_to_voc.py +9 -9
- dtlpy/examples/annotations_convert_to_yolo.py +9 -9
- dtlpy/examples/convert_annotation_types.py +51 -51
- dtlpy/examples/converter.py +143 -143
- dtlpy/examples/copy_annotations.py +22 -22
- dtlpy/examples/copy_folder.py +31 -31
- dtlpy/examples/create_annotations.py +51 -51
- dtlpy/examples/create_video_annotations.py +83 -83
- dtlpy/examples/delete_annotations.py +26 -26
- dtlpy/examples/filters.py +113 -113
- dtlpy/examples/move_item.py +23 -23
- dtlpy/examples/play_video_annotation.py +13 -13
- dtlpy/examples/show_item_and_mask.py +53 -53
- dtlpy/examples/triggers.py +49 -49
- dtlpy/examples/upload_batch_of_items.py +20 -20
- dtlpy/examples/upload_items_and_custom_format_annotations.py +55 -55
- dtlpy/examples/upload_items_with_modalities.py +43 -43
- dtlpy/examples/upload_segmentation_annotations_from_mask_image.py +44 -44
- dtlpy/examples/upload_yolo_format_annotations.py +70 -70
- dtlpy/exceptions.py +125 -125
- dtlpy/miscellaneous/__init__.py +20 -20
- dtlpy/miscellaneous/dict_differ.py +95 -95
- dtlpy/miscellaneous/git_utils.py +217 -217
- dtlpy/miscellaneous/json_utils.py +14 -14
- dtlpy/miscellaneous/list_print.py +105 -105
- dtlpy/miscellaneous/zipping.py +130 -130
- dtlpy/ml/__init__.py +20 -20
- dtlpy/ml/base_feature_extractor_adapter.py +27 -27
- dtlpy/ml/base_model_adapter.py +1287 -1230
- dtlpy/ml/metrics.py +461 -461
- dtlpy/ml/predictions_utils.py +274 -274
- dtlpy/ml/summary_writer.py +57 -57
- dtlpy/ml/train_utils.py +60 -60
- dtlpy/new_instance.py +252 -252
- dtlpy/repositories/__init__.py +56 -56
- dtlpy/repositories/analytics.py +85 -85
- dtlpy/repositories/annotations.py +916 -916
- dtlpy/repositories/apps.py +383 -383
- dtlpy/repositories/artifacts.py +452 -452
- dtlpy/repositories/assignments.py +599 -599
- dtlpy/repositories/bots.py +213 -213
- dtlpy/repositories/codebases.py +559 -559
- dtlpy/repositories/collections.py +332 -332
- dtlpy/repositories/commands.py +152 -152
- dtlpy/repositories/compositions.py +61 -61
- dtlpy/repositories/computes.py +439 -439
- dtlpy/repositories/datasets.py +1585 -1504
- dtlpy/repositories/downloader.py +1157 -923
- dtlpy/repositories/dpks.py +433 -433
- dtlpy/repositories/drivers.py +482 -482
- dtlpy/repositories/executions.py +815 -815
- dtlpy/repositories/feature_sets.py +256 -226
- dtlpy/repositories/features.py +255 -255
- dtlpy/repositories/integrations.py +484 -484
- dtlpy/repositories/items.py +912 -912
- dtlpy/repositories/messages.py +94 -94
- dtlpy/repositories/models.py +1000 -1000
- dtlpy/repositories/nodes.py +80 -80
- dtlpy/repositories/ontologies.py +511 -511
- dtlpy/repositories/organizations.py +525 -525
- dtlpy/repositories/packages.py +1941 -1941
- dtlpy/repositories/pipeline_executions.py +451 -451
- dtlpy/repositories/pipelines.py +640 -640
- dtlpy/repositories/projects.py +539 -539
- dtlpy/repositories/recipes.py +429 -399
- dtlpy/repositories/resource_executions.py +137 -137
- dtlpy/repositories/schema.py +120 -120
- dtlpy/repositories/service_drivers.py +213 -213
- dtlpy/repositories/services.py +1704 -1704
- dtlpy/repositories/settings.py +339 -339
- dtlpy/repositories/tasks.py +1477 -1477
- dtlpy/repositories/times_series.py +278 -278
- dtlpy/repositories/triggers.py +536 -536
- dtlpy/repositories/upload_element.py +257 -257
- dtlpy/repositories/uploader.py +661 -661
- dtlpy/repositories/webhooks.py +249 -249
- dtlpy/services/__init__.py +22 -22
- dtlpy/services/aihttp_retry.py +131 -131
- dtlpy/services/api_client.py +1786 -1785
- dtlpy/services/api_reference.py +40 -40
- dtlpy/services/async_utils.py +133 -133
- dtlpy/services/calls_counter.py +44 -44
- dtlpy/services/check_sdk.py +68 -68
- dtlpy/services/cookie.py +115 -115
- dtlpy/services/create_logger.py +156 -156
- dtlpy/services/events.py +84 -84
- dtlpy/services/logins.py +235 -235
- dtlpy/services/reporter.py +256 -256
- dtlpy/services/service_defaults.py +91 -91
- dtlpy/utilities/__init__.py +20 -20
- dtlpy/utilities/annotations/__init__.py +16 -16
- dtlpy/utilities/annotations/annotation_converters.py +269 -269
- dtlpy/utilities/base_package_runner.py +285 -264
- dtlpy/utilities/converter.py +1650 -1650
- dtlpy/utilities/dataset_generators/__init__.py +1 -1
- dtlpy/utilities/dataset_generators/dataset_generator.py +670 -670
- dtlpy/utilities/dataset_generators/dataset_generator_tensorflow.py +23 -23
- dtlpy/utilities/dataset_generators/dataset_generator_torch.py +21 -21
- dtlpy/utilities/local_development/__init__.py +1 -1
- dtlpy/utilities/local_development/local_session.py +179 -179
- dtlpy/utilities/reports/__init__.py +2 -2
- dtlpy/utilities/reports/figures.py +343 -343
- dtlpy/utilities/reports/report.py +71 -71
- dtlpy/utilities/videos/__init__.py +17 -17
- dtlpy/utilities/videos/video_player.py +598 -598
- dtlpy/utilities/videos/videos.py +470 -470
- {dtlpy-1.115.44.data → dtlpy-1.117.6.data}/scripts/dlp +1 -1
- dtlpy-1.117.6.data/scripts/dlp.bat +2 -0
- {dtlpy-1.115.44.data → dtlpy-1.117.6.data}/scripts/dlp.py +128 -128
- {dtlpy-1.115.44.dist-info → dtlpy-1.117.6.dist-info}/METADATA +186 -186
- dtlpy-1.117.6.dist-info/RECORD +239 -0
- {dtlpy-1.115.44.dist-info → dtlpy-1.117.6.dist-info}/WHEEL +1 -1
- {dtlpy-1.115.44.dist-info → dtlpy-1.117.6.dist-info}/licenses/LICENSE +200 -200
- tests/features/environment.py +551 -551
- dtlpy/assets/__pycache__/__init__.cpython-310.pyc +0 -0
- dtlpy-1.115.44.data/scripts/dlp.bat +0 -2
- dtlpy-1.115.44.dist-info/RECORD +0 -240
- {dtlpy-1.115.44.dist-info → dtlpy-1.117.6.dist-info}/entry_points.txt +0 -0
- {dtlpy-1.115.44.dist-info → dtlpy-1.117.6.dist-info}/top_level.txt +0 -0
dtlpy/repositories/items.py
CHANGED
|
@@ -1,912 +1,912 @@
|
|
|
1
|
-
import logging
|
|
2
|
-
|
|
3
|
-
from .. import entities, exceptions, repositories, miscellaneous, _api_reference
|
|
4
|
-
from ..services.api_client import ApiClient
|
|
5
|
-
|
|
6
|
-
logger = logging.getLogger(name='dtlpy')
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
class Items:
|
|
10
|
-
"""
|
|
11
|
-
Items Repository
|
|
12
|
-
|
|
13
|
-
The Items class allows you to manage items in your datasets.
|
|
14
|
-
For information on actions related to items see https://developers.dataloop.ai/tutorials/data_management/upload_and_manage_items/chapter/
|
|
15
|
-
"""
|
|
16
|
-
|
|
17
|
-
def __init__(self,
|
|
18
|
-
client_api: ApiClient,
|
|
19
|
-
datasets: repositories.Datasets = None,
|
|
20
|
-
dataset: entities.Dataset = None,
|
|
21
|
-
dataset_id=None,
|
|
22
|
-
items_entity=None,
|
|
23
|
-
project=None):
|
|
24
|
-
self._client_api = client_api
|
|
25
|
-
self._dataset = dataset
|
|
26
|
-
self._dataset_id = dataset_id
|
|
27
|
-
self._datasets = datasets
|
|
28
|
-
self._project = project
|
|
29
|
-
# set items entity to represent the item (Item, Codebase, Artifact etc...)
|
|
30
|
-
if items_entity is None:
|
|
31
|
-
self.items_entity = entities.Item
|
|
32
|
-
if self._dataset_id is None and self._dataset is not None:
|
|
33
|
-
self._dataset_id = self._dataset.id
|
|
34
|
-
|
|
35
|
-
############
|
|
36
|
-
# entities #
|
|
37
|
-
############
|
|
38
|
-
@property
|
|
39
|
-
def dataset(self) -> entities.Dataset:
|
|
40
|
-
if self._dataset is None:
|
|
41
|
-
if self._dataset_id is None:
|
|
42
|
-
raise exceptions.PlatformException(
|
|
43
|
-
error='400',
|
|
44
|
-
message='Cannot perform action WITHOUT Dataset entity in Items repository. Please set a dataset')
|
|
45
|
-
self._dataset = self.datasets.get(dataset_id=self._dataset_id, fetch=None)
|
|
46
|
-
assert isinstance(self._dataset, entities.Dataset)
|
|
47
|
-
return self._dataset
|
|
48
|
-
|
|
49
|
-
@dataset.setter
|
|
50
|
-
def dataset(self, dataset: entities.Dataset):
|
|
51
|
-
if not isinstance(dataset, entities.Dataset):
|
|
52
|
-
raise ValueError('Must input a valid Dataset entity')
|
|
53
|
-
self._dataset = dataset
|
|
54
|
-
|
|
55
|
-
@property
|
|
56
|
-
def project(self) -> entities.Project:
|
|
57
|
-
if self._project is None:
|
|
58
|
-
raise exceptions.PlatformException(
|
|
59
|
-
error='400',
|
|
60
|
-
message='Cannot perform action WITHOUT Project entity in Items repository. Please set a project')
|
|
61
|
-
assert isinstance(self._dataset, entities.Dataset)
|
|
62
|
-
return self._project
|
|
63
|
-
|
|
64
|
-
@project.setter
|
|
65
|
-
def project(self, project: entities.Project):
|
|
66
|
-
if not isinstance(project, entities.Project):
|
|
67
|
-
raise ValueError('Must input a valid Dataset entity')
|
|
68
|
-
self._project = project
|
|
69
|
-
|
|
70
|
-
################
|
|
71
|
-
# repositories #
|
|
72
|
-
################
|
|
73
|
-
@property
|
|
74
|
-
def datasets(self) -> repositories.Datasets:
|
|
75
|
-
if self._datasets is None:
|
|
76
|
-
self._datasets = repositories.Datasets(client_api=self._client_api)
|
|
77
|
-
assert isinstance(self._datasets, repositories.Datasets)
|
|
78
|
-
return self._datasets
|
|
79
|
-
|
|
80
|
-
###########
|
|
81
|
-
# methods #
|
|
82
|
-
###########
|
|
83
|
-
|
|
84
|
-
def set_items_entity(self, entity):
|
|
85
|
-
"""
|
|
86
|
-
Set the item entity type to `Artifact <https://dataloop.ai/docs/auto-annotation-service?#uploading-model-weights-as-artifacts>`_, Item, or Codebase.
|
|
87
|
-
|
|
88
|
-
:param entities.Item, entities.Artifact, entities.Codebase entity: entity type [entities.Item, entities.Artifact, entities.Codebase]
|
|
89
|
-
"""
|
|
90
|
-
if entity in [entities.Item, entities.Artifact, entities.Codebase]:
|
|
91
|
-
self.items_entity = entity
|
|
92
|
-
else:
|
|
93
|
-
raise exceptions.PlatformException(error="403",
|
|
94
|
-
message="Unable to set given entity. Entity give: {}".format(entity))
|
|
95
|
-
|
|
96
|
-
def get_all_items(self, filters: entities.Filters = None) -> [entities.Item]:
|
|
97
|
-
"""
|
|
98
|
-
Get all items in dataset.
|
|
99
|
-
|
|
100
|
-
**Prerequisites**: You must be in the role of an *owner* or *developer*.
|
|
101
|
-
|
|
102
|
-
:param dtlpy.entities.filters.Filters filters: dl.Filters entity to filters items
|
|
103
|
-
:return: list of all items
|
|
104
|
-
:rtype: list
|
|
105
|
-
|
|
106
|
-
**Example**:
|
|
107
|
-
|
|
108
|
-
.. code-block:: python
|
|
109
|
-
|
|
110
|
-
dataset.items.get_all_items()
|
|
111
|
-
|
|
112
|
-
"""
|
|
113
|
-
if filters is None:
|
|
114
|
-
filters = entities.Filters()
|
|
115
|
-
filters._user_query = 'false'
|
|
116
|
-
filters.add(field='type', values='file')
|
|
117
|
-
pages = self.list(filters=filters)
|
|
118
|
-
num_items = pages.items_count
|
|
119
|
-
items = [None for _ in range(num_items)]
|
|
120
|
-
for i_item, item in enumerate(pages.all()):
|
|
121
|
-
items[i_item] = item
|
|
122
|
-
items = [item for item in items if item is not None]
|
|
123
|
-
return items
|
|
124
|
-
|
|
125
|
-
def _build_entities_from_response(self, response_items) -> miscellaneous.List[entities.Item]:
|
|
126
|
-
pool = self._client_api.thread_pools(pool_name='entity.create')
|
|
127
|
-
jobs = [None for _ in range(len(response_items))]
|
|
128
|
-
# return triggers list
|
|
129
|
-
for i_item, item in enumerate(response_items):
|
|
130
|
-
jobs[i_item] = pool.submit(self.items_entity._protected_from_json,
|
|
131
|
-
**{'client_api': self._client_api,
|
|
132
|
-
'_json': item,
|
|
133
|
-
'dataset': self.dataset})
|
|
134
|
-
# get all results
|
|
135
|
-
results = [j.result() for j in jobs]
|
|
136
|
-
# log errors
|
|
137
|
-
_ = [logger.warning(r[1]) for r in results if r[0] is False]
|
|
138
|
-
# return good jobs
|
|
139
|
-
items = miscellaneous.List([r[1] for r in results if r[0] is True])
|
|
140
|
-
return items
|
|
141
|
-
|
|
142
|
-
def _list(self, filters: entities.Filters):
|
|
143
|
-
"""
|
|
144
|
-
Get dataset items list This is a browsing endpoint, for any given path item count will be returned,
|
|
145
|
-
user is expected to perform another request then for every folder item to actually get the its item list.
|
|
146
|
-
|
|
147
|
-
:param dtlpy.entities.filters.Filters filters: Filters entity or a dictionary containing filters parameters
|
|
148
|
-
:return: json response
|
|
149
|
-
"""
|
|
150
|
-
# prepare request
|
|
151
|
-
success, response = self._client_api.gen_request(req_type="POST",
|
|
152
|
-
path="/datasets/{}/query".format(self.dataset.id),
|
|
153
|
-
json_req=filters.prepare(),
|
|
154
|
-
headers={'user_query': filters._user_query})
|
|
155
|
-
if not success:
|
|
156
|
-
raise exceptions.PlatformException(response)
|
|
157
|
-
return response.json()
|
|
158
|
-
|
|
159
|
-
@_api_reference.add(path='/datasets/{id}/query', method='post')
|
|
160
|
-
def list(self,
|
|
161
|
-
filters: entities.Filters = None,
|
|
162
|
-
page_offset: int = None,
|
|
163
|
-
page_size: int = None
|
|
164
|
-
) -> entities.PagedEntities:
|
|
165
|
-
"""
|
|
166
|
-
List items in a dataset.
|
|
167
|
-
|
|
168
|
-
**Prerequisites**: You must be in the role of an *owner* or *developer*.
|
|
169
|
-
|
|
170
|
-
:param dtlpy.entities.filters.Filters filters: Filters entity or a dictionary containing filters parameters
|
|
171
|
-
:param int page_offset: start page
|
|
172
|
-
:param int page_size: page size
|
|
173
|
-
:return: Pages object
|
|
174
|
-
:rtype: dtlpy.entities.paged_entities.PagedEntities
|
|
175
|
-
|
|
176
|
-
**Example**:
|
|
177
|
-
|
|
178
|
-
.. code-block:: python
|
|
179
|
-
|
|
180
|
-
dataset.items.list(page_offset=0, page_size=100)
|
|
181
|
-
"""
|
|
182
|
-
# default filters
|
|
183
|
-
if filters is None:
|
|
184
|
-
filters = entities.Filters()
|
|
185
|
-
filters._user_query = 'false'
|
|
186
|
-
# assert type filters
|
|
187
|
-
elif not isinstance(filters, entities.Filters):
|
|
188
|
-
raise exceptions.PlatformException(error='400',
|
|
189
|
-
message='Unknown filters type: {!r}'.format(type(filters)))
|
|
190
|
-
if filters.resource != entities.FiltersResource.ITEM and filters.resource != entities.FiltersResource.ANNOTATION:
|
|
191
|
-
raise exceptions.PlatformException(
|
|
192
|
-
error='400',
|
|
193
|
-
message='Filters resource must to be FiltersResource.ITEM. Got: {!r}'.format(filters.resource))
|
|
194
|
-
|
|
195
|
-
# page size
|
|
196
|
-
if page_size is not None:
|
|
197
|
-
filters.page_size = page_size
|
|
198
|
-
|
|
199
|
-
# page offset
|
|
200
|
-
if page_offset is not None:
|
|
201
|
-
filters.page = page_offset
|
|
202
|
-
|
|
203
|
-
if filters.resource == entities.FiltersResource.ITEM:
|
|
204
|
-
items_repository = self
|
|
205
|
-
else:
|
|
206
|
-
items_repository = repositories.Annotations(client_api=self._client_api,
|
|
207
|
-
dataset=self._dataset)
|
|
208
|
-
|
|
209
|
-
paged = entities.PagedEntities(items_repository=items_repository,
|
|
210
|
-
filters=filters,
|
|
211
|
-
page_offset=filters.page,
|
|
212
|
-
page_size=filters.page_size,
|
|
213
|
-
client_api=self._client_api)
|
|
214
|
-
paged.get_page()
|
|
215
|
-
return paged
|
|
216
|
-
|
|
217
|
-
@_api_reference.add(path='/items/{id}', method='get')
|
|
218
|
-
def get(self,
|
|
219
|
-
filepath: str = None,
|
|
220
|
-
item_id: str = None,
|
|
221
|
-
fetch: bool = None,
|
|
222
|
-
is_dir: bool = False
|
|
223
|
-
) -> entities.Item:
|
|
224
|
-
"""
|
|
225
|
-
Get Item object
|
|
226
|
-
|
|
227
|
-
**Prerequisites**: You must be in the role of an *owner* or *developer*.
|
|
228
|
-
|
|
229
|
-
:param str filepath: optional - search by remote path
|
|
230
|
-
:param str item_id: optional - search by id
|
|
231
|
-
:param bool fetch: optional - fetch entity from platform, default taken from cookie
|
|
232
|
-
:param bool is_dir: True if you want to get an item from dir type
|
|
233
|
-
:return: Item object
|
|
234
|
-
:rtype: dtlpy.entities.item.Item
|
|
235
|
-
|
|
236
|
-
**Example**:
|
|
237
|
-
|
|
238
|
-
.. code-block:: python
|
|
239
|
-
|
|
240
|
-
dataset.items.get(item_id='item_id')
|
|
241
|
-
"""
|
|
242
|
-
if fetch is None:
|
|
243
|
-
fetch = self._client_api.fetch_entities
|
|
244
|
-
|
|
245
|
-
if fetch:
|
|
246
|
-
if item_id is not None:
|
|
247
|
-
success, response = self._client_api.gen_request(req_type="get",
|
|
248
|
-
path="/items/{}".format(item_id))
|
|
249
|
-
if success:
|
|
250
|
-
item = self.items_entity.from_json(client_api=self._client_api,
|
|
251
|
-
_json=response.json(),
|
|
252
|
-
dataset=self._dataset,
|
|
253
|
-
project=self._project)
|
|
254
|
-
# verify input filepath is same as the given id
|
|
255
|
-
if filepath is not None and item.filename != filepath:
|
|
256
|
-
logger.warning(
|
|
257
|
-
"Mismatch found in items.get: filepath is different then item.filename: "
|
|
258
|
-
"{!r} != {!r}".format(
|
|
259
|
-
filepath,
|
|
260
|
-
item.filename))
|
|
261
|
-
else:
|
|
262
|
-
raise exceptions.PlatformException(response)
|
|
263
|
-
elif filepath is not None:
|
|
264
|
-
filters = entities.Filters()
|
|
265
|
-
filters.pop(field='hidden')
|
|
266
|
-
if is_dir:
|
|
267
|
-
filters.add(field='type', values='dir')
|
|
268
|
-
filters.recursive = False
|
|
269
|
-
filters.add(field='filename', values=filepath)
|
|
270
|
-
paged_entity = self.list(filters=filters)
|
|
271
|
-
if len(paged_entity.items) == 0:
|
|
272
|
-
raise exceptions.PlatformException(error='404',
|
|
273
|
-
message='Item not found. filepath= "{}"'.format(filepath))
|
|
274
|
-
elif len(paged_entity.items) > 1:
|
|
275
|
-
raise exceptions.PlatformException(
|
|
276
|
-
error='404',
|
|
277
|
-
message='More than one item found. Please "get" by id. filepath: "{}"'.format(filepath))
|
|
278
|
-
else:
|
|
279
|
-
item = paged_entity.items[0]
|
|
280
|
-
else:
|
|
281
|
-
raise exceptions.PlatformException(error="400",
|
|
282
|
-
message='Must choose by at least one. "filename" or "item_id"')
|
|
283
|
-
else:
|
|
284
|
-
item = entities.Item.from_json(_json={'id': item_id,
|
|
285
|
-
'filename': filepath},
|
|
286
|
-
client_api=self._client_api,
|
|
287
|
-
dataset=self._dataset,
|
|
288
|
-
is_fetched=False,
|
|
289
|
-
project=self._project)
|
|
290
|
-
assert isinstance(item, entities.Item)
|
|
291
|
-
return item
|
|
292
|
-
|
|
293
|
-
@_api_reference.add(path='/items/{id}/clone', method='post')
|
|
294
|
-
def clone(self,
|
|
295
|
-
item_id: str,
|
|
296
|
-
dst_dataset_id: str,
|
|
297
|
-
remote_filepath: str = None,
|
|
298
|
-
metadata: dict = None,
|
|
299
|
-
with_annotations: bool = True,
|
|
300
|
-
with_metadata: bool = True,
|
|
301
|
-
with_task_annotations_status: bool = False,
|
|
302
|
-
allow_many: bool = False,
|
|
303
|
-
wait: bool = True):
|
|
304
|
-
"""
|
|
305
|
-
Clone item. Read more about cloning datatsets and items in our `documentation <https://dataloop.ai/docs/clone-merge-dataset#cloned-dataset>`_ and `SDK documentation <https://developers.dataloop.ai/tutorials/data_management/data_versioning/chapter/>`_.
|
|
306
|
-
|
|
307
|
-
**Prerequisites**: You must be in the role of an *owner* or *developer*.
|
|
308
|
-
|
|
309
|
-
:param str item_id: item to clone
|
|
310
|
-
:param str dst_dataset_id: destination dataset id
|
|
311
|
-
:param str remote_filepath: complete filepath
|
|
312
|
-
:param dict metadata: new metadata to add
|
|
313
|
-
:param bool with_annotations: clone annotations
|
|
314
|
-
:param bool with_metadata: clone metadata
|
|
315
|
-
:param bool with_task_annotations_status: clone task annotations status
|
|
316
|
-
:param bool allow_many: `bool` if True, using multiple clones in single dataset is allowed, (default=False)
|
|
317
|
-
:param bool wait: wait for the command to finish
|
|
318
|
-
:return: Item object
|
|
319
|
-
:rtype: dtlpy.entities.item.Item
|
|
320
|
-
|
|
321
|
-
**Example**:
|
|
322
|
-
|
|
323
|
-
.. code-block:: python
|
|
324
|
-
|
|
325
|
-
dataset.items.clone(item_id='item_id',
|
|
326
|
-
dst_dataset_id='dist_dataset_id',
|
|
327
|
-
with_metadata=True,
|
|
328
|
-
with_task_annotations_status=False,
|
|
329
|
-
with_annotations=False)
|
|
330
|
-
"""
|
|
331
|
-
if metadata is None:
|
|
332
|
-
metadata = dict()
|
|
333
|
-
payload = {"targetDatasetId": dst_dataset_id,
|
|
334
|
-
"remoteFileName": remote_filepath,
|
|
335
|
-
"metadata": metadata,
|
|
336
|
-
"cloneDatasetParams": {
|
|
337
|
-
"withItemsAnnotations": with_annotations,
|
|
338
|
-
"withMetadata": with_metadata,
|
|
339
|
-
"withTaskAnnotationsStatus": with_task_annotations_status},
|
|
340
|
-
"allowMany": allow_many
|
|
341
|
-
}
|
|
342
|
-
success, response = self._client_api.gen_request(req_type="post",
|
|
343
|
-
path="/items/{}/clone".format(item_id),
|
|
344
|
-
json_req=payload)
|
|
345
|
-
# check response
|
|
346
|
-
if not success:
|
|
347
|
-
raise exceptions.PlatformException(response)
|
|
348
|
-
|
|
349
|
-
command = entities.Command.from_json(_json=response.json(),
|
|
350
|
-
client_api=self._client_api)
|
|
351
|
-
if not wait:
|
|
352
|
-
return command
|
|
353
|
-
command = command.wait()
|
|
354
|
-
|
|
355
|
-
if 'returnedModelId' not in command.spec:
|
|
356
|
-
raise exceptions.PlatformException(error='400',
|
|
357
|
-
message="returnedModelId key is missing in command response: {}"
|
|
358
|
-
.format(response))
|
|
359
|
-
cloned_item = self.get(item_id=command.spec['returnedModelId'][0])
|
|
360
|
-
return cloned_item
|
|
361
|
-
|
|
362
|
-
@_api_reference.add(path='/items/{id}', method='delete')
|
|
363
|
-
def delete(self,
|
|
364
|
-
filename: str = None,
|
|
365
|
-
item_id: str = None,
|
|
366
|
-
filters: entities.Filters = None):
|
|
367
|
-
"""
|
|
368
|
-
Delete item from platform.
|
|
369
|
-
|
|
370
|
-
**Prerequisites**: You must be in the role of an *owner* or *developer*.
|
|
371
|
-
|
|
372
|
-
You must provide at least ONE of the following params: item id, filename, filters.
|
|
373
|
-
|
|
374
|
-
:param str filename: optional - search item by remote path
|
|
375
|
-
:param str item_id: optional - search item by id
|
|
376
|
-
:param dtlpy.entities.filters.Filters filters: optional - delete items by filter
|
|
377
|
-
:return: True if success
|
|
378
|
-
:rtype: bool
|
|
379
|
-
|
|
380
|
-
**Example**:
|
|
381
|
-
|
|
382
|
-
.. code-block:: python
|
|
383
|
-
|
|
384
|
-
dataset.items.delete(item_id='item_id')
|
|
385
|
-
"""
|
|
386
|
-
if item_id is not None:
|
|
387
|
-
success, response = self._client_api.gen_request(req_type="delete",
|
|
388
|
-
path="/items/{}".format(item_id),
|
|
389
|
-
)
|
|
390
|
-
elif filename is not None:
|
|
391
|
-
if not filename.startswith("/"):
|
|
392
|
-
filename = "/" + filename
|
|
393
|
-
items = self.get(filepath=filename)
|
|
394
|
-
if not isinstance(items, list):
|
|
395
|
-
items = [items]
|
|
396
|
-
if len(items) == 0:
|
|
397
|
-
raise exceptions.PlatformException("404", "Item not found")
|
|
398
|
-
elif len(items) > 1:
|
|
399
|
-
raise exceptions.PlatformException(error="404", message="More the 1 item exist by the name provided")
|
|
400
|
-
else:
|
|
401
|
-
item_id = items[0].id
|
|
402
|
-
success, response = self._client_api.gen_request(req_type="delete",
|
|
403
|
-
path="/items/{}".format(item_id))
|
|
404
|
-
elif filters is not None:
|
|
405
|
-
# prepare request
|
|
406
|
-
success, response = self._client_api.gen_request(req_type="POST",
|
|
407
|
-
path="/datasets/{}/query".format(self.dataset.id),
|
|
408
|
-
json_req=filters.prepare(operation='delete'))
|
|
409
|
-
else:
|
|
410
|
-
raise exceptions.PlatformException("400", "Must provide item id, filename or filters")
|
|
411
|
-
|
|
412
|
-
# check response
|
|
413
|
-
if success:
|
|
414
|
-
logger.debug("Item/s deleted successfully")
|
|
415
|
-
return success
|
|
416
|
-
else:
|
|
417
|
-
raise exceptions.PlatformException(response)
|
|
418
|
-
|
|
419
|
-
@_api_reference.add(path='/items/{id}', method='patch')
|
|
420
|
-
def update(self,
|
|
421
|
-
item: entities.Item = None,
|
|
422
|
-
filters: entities.Filters = None,
|
|
423
|
-
update_values=None,
|
|
424
|
-
system_update_values=None,
|
|
425
|
-
system_metadata: bool = False):
|
|
426
|
-
"""
|
|
427
|
-
Update item metadata.
|
|
428
|
-
|
|
429
|
-
**Prerequisites**: You must be in the role of an *owner* or *developer*.
|
|
430
|
-
|
|
431
|
-
You must provide at least ONE of the following params: update_values, system_update_values.
|
|
432
|
-
|
|
433
|
-
:param dtlpy.entities.item.Item item: Item object
|
|
434
|
-
:param dtlpy.entities.filters.Filters filters: optional update filtered items by given filter
|
|
435
|
-
:param update_values: optional field to be updated and new values
|
|
436
|
-
:param system_update_values: values in system metadata to be updated
|
|
437
|
-
:param bool system_metadata: True, if you want to update the metadata system
|
|
438
|
-
:return: Item object
|
|
439
|
-
:rtype: dtlpy.entities.item.Item
|
|
440
|
-
|
|
441
|
-
**Example**:
|
|
442
|
-
|
|
443
|
-
.. code-block:: python
|
|
444
|
-
|
|
445
|
-
dataset.items.update(item='item_entity')
|
|
446
|
-
"""
|
|
447
|
-
ref = filters is not None and (filters._ref_task or filters._ref_assignment)
|
|
448
|
-
|
|
449
|
-
if system_update_values and not system_metadata:
|
|
450
|
-
logger.warning('system metadata will not be updated because param system_metadata is False')
|
|
451
|
-
|
|
452
|
-
# check params
|
|
453
|
-
if item is None and filters is None:
|
|
454
|
-
raise exceptions.PlatformException('400', 'must provide either item or filters')
|
|
455
|
-
|
|
456
|
-
value_to_update = update_values or system_update_values
|
|
457
|
-
|
|
458
|
-
if item is None and not ref and not value_to_update:
|
|
459
|
-
raise exceptions.PlatformException('400',
|
|
460
|
-
'Must provide update_values or system_update_values')
|
|
461
|
-
|
|
462
|
-
if item is not None and value_to_update:
|
|
463
|
-
raise exceptions.PlatformException('400',
|
|
464
|
-
'Cannot provide "update_values" or "system_update_values" with a specific "item" for an individual update. '
|
|
465
|
-
'These parameters are intended only for bulk updates using filters.')
|
|
466
|
-
|
|
467
|
-
# update item
|
|
468
|
-
if item is not None:
|
|
469
|
-
json_req = miscellaneous.DictDiffer.diff(origin=item._platform_dict,
|
|
470
|
-
modified=item.to_json())
|
|
471
|
-
if not json_req:
|
|
472
|
-
return item
|
|
473
|
-
url_path = "/items/{}".format(item.id)
|
|
474
|
-
if system_metadata:
|
|
475
|
-
url_path += "?system=true"
|
|
476
|
-
success, response = self._client_api.gen_request(req_type="patch",
|
|
477
|
-
path=url_path,
|
|
478
|
-
json_req=json_req)
|
|
479
|
-
if success:
|
|
480
|
-
logger.debug("Item was updated successfully. Item id: {}".format(item.id))
|
|
481
|
-
return self.items_entity.from_json(client_api=self._client_api,
|
|
482
|
-
_json=response.json(),
|
|
483
|
-
dataset=self._dataset)
|
|
484
|
-
else:
|
|
485
|
-
logger.error("Error while updating item")
|
|
486
|
-
raise exceptions.PlatformException(response)
|
|
487
|
-
# update by filters
|
|
488
|
-
else:
|
|
489
|
-
# prepare request
|
|
490
|
-
prepared_filter = filters.prepare(operation='update',
|
|
491
|
-
system_update=system_update_values,
|
|
492
|
-
system_metadata=system_metadata,
|
|
493
|
-
update=update_values)
|
|
494
|
-
success, response = self._client_api.gen_request(req_type="POST",
|
|
495
|
-
path="/datasets/{}/query".format(self.dataset.id),
|
|
496
|
-
json_req=prepared_filter)
|
|
497
|
-
if not success:
|
|
498
|
-
raise exceptions.PlatformException(response)
|
|
499
|
-
else:
|
|
500
|
-
logger.debug("Items were updated successfully.")
|
|
501
|
-
return response.json()
|
|
502
|
-
|
|
503
|
-
def download(
|
|
504
|
-
self,
|
|
505
|
-
filters: entities.Filters = None,
|
|
506
|
-
items=None,
|
|
507
|
-
# download options
|
|
508
|
-
local_path: str = None,
|
|
509
|
-
file_types: list = None,
|
|
510
|
-
save_locally: bool = True,
|
|
511
|
-
to_array: bool = False,
|
|
512
|
-
annotation_options: entities.ViewAnnotationOptions = None,
|
|
513
|
-
annotation_filters: entities.Filters = None,
|
|
514
|
-
overwrite: bool = False,
|
|
515
|
-
to_items_folder: bool = True,
|
|
516
|
-
thickness: int = 1,
|
|
517
|
-
with_text: bool = False,
|
|
518
|
-
without_relative_path=None,
|
|
519
|
-
avoid_unnecessary_annotation_download: bool = False,
|
|
520
|
-
include_annotations_in_output: bool = True,
|
|
521
|
-
export_png_files: bool = False,
|
|
522
|
-
filter_output_annotations: bool = False,
|
|
523
|
-
alpha: float = 1,
|
|
524
|
-
export_version=entities.ExportVersion.V1,
|
|
525
|
-
dataset_lock: bool = False,
|
|
526
|
-
lock_timeout_sec: int = None,
|
|
527
|
-
export_summary: bool = False,
|
|
528
|
-
raise_on_error: bool = False,
|
|
529
|
-
):
|
|
530
|
-
"""
|
|
531
|
-
Download dataset items by filters.
|
|
532
|
-
|
|
533
|
-
Filters the dataset for items and saves them locally.
|
|
534
|
-
|
|
535
|
-
Optional -- download annotation, mask, instance, and image mask of the item.
|
|
536
|
-
|
|
537
|
-
**Prerequisites**: You must be in the role of an *owner* or *developer*.
|
|
538
|
-
|
|
539
|
-
:param dtlpy.entities.filters.Filters filters: Filters entity or a dictionary containing filters parameters
|
|
540
|
-
:param List[dtlpy.entities.item.Item] or dtlpy.entities.item.Item items: download Item entity or item_id (or a list of item)
|
|
541
|
-
:param str local_path: local folder or filename to save to.
|
|
542
|
-
:param list file_types: a list of file type to download. e.g ['video/webm', 'video/mp4', 'image/jpeg', 'image/png']
|
|
543
|
-
:param bool save_locally: bool. save to disk or return a buffer
|
|
544
|
-
:param bool to_array: returns Ndarray when True and local_path = False
|
|
545
|
-
:param list annotation_options: download annotations options: list(dl.ViewAnnotationOptions)
|
|
546
|
-
:param dtlpy.entities.filters.Filters annotation_filters: Filters entity to filter annotations for download
|
|
547
|
-
:param bool overwrite: optional - default = False
|
|
548
|
-
:param bool dataset_lock: optional - default = False
|
|
549
|
-
:param bool export_summary: optional - default = False
|
|
550
|
-
:param int lock_timeout_sec: optional
|
|
551
|
-
:param bool to_items_folder: Create 'items' folder and download items to it
|
|
552
|
-
:param int thickness: optional - line thickness, if -1 annotation will be filled, default =1
|
|
553
|
-
:param bool with_text: optional - add text to annotations, default = False
|
|
554
|
-
:param bool without_relative_path: bool - download items without the relative path from platform
|
|
555
|
-
:param bool avoid_unnecessary_annotation_download: default - False
|
|
556
|
-
:param bool include_annotations_in_output: default - False , if export should contain annotations
|
|
557
|
-
:param bool export_png_files: default - if True, semantic annotations should be exported as png files
|
|
558
|
-
:param bool filter_output_annotations: default - False, given an export by filter - determine if to filter out annotations
|
|
559
|
-
:param float alpha: opacity value [0 1], default 1
|
|
560
|
-
:param str export_version: exported items will have original extension in filename, `V1` - no original extension in filenames
|
|
561
|
-
:param bool raise_on_error: raise an exception if an error occurs
|
|
562
|
-
:return: generator of local_path per each downloaded item
|
|
563
|
-
:rtype: generator or single item
|
|
564
|
-
|
|
565
|
-
**Example**:
|
|
566
|
-
|
|
567
|
-
.. code-block:: python
|
|
568
|
-
|
|
569
|
-
dataset.items.download(local_path='local_path',
|
|
570
|
-
annotation_options=dl.ViewAnnotationOptions,
|
|
571
|
-
overwrite=False,
|
|
572
|
-
thickness=1,
|
|
573
|
-
with_text=False,
|
|
574
|
-
alpha=1,
|
|
575
|
-
save_locally=True
|
|
576
|
-
)
|
|
577
|
-
"""
|
|
578
|
-
downloader = repositories.Downloader(self)
|
|
579
|
-
return downloader.download(
|
|
580
|
-
filters=filters,
|
|
581
|
-
items=items,
|
|
582
|
-
local_path=local_path,
|
|
583
|
-
file_types=file_types,
|
|
584
|
-
save_locally=save_locally,
|
|
585
|
-
to_array=to_array,
|
|
586
|
-
annotation_options=annotation_options,
|
|
587
|
-
annotation_filters=annotation_filters,
|
|
588
|
-
overwrite=overwrite,
|
|
589
|
-
to_items_folder=to_items_folder,
|
|
590
|
-
thickness=thickness,
|
|
591
|
-
alpha=alpha,
|
|
592
|
-
with_text=with_text,
|
|
593
|
-
without_relative_path=without_relative_path,
|
|
594
|
-
avoid_unnecessary_annotation_download=avoid_unnecessary_annotation_download,
|
|
595
|
-
include_annotations_in_output=include_annotations_in_output,
|
|
596
|
-
export_png_files=export_png_files,
|
|
597
|
-
filter_output_annotations=filter_output_annotations,
|
|
598
|
-
export_version=export_version,
|
|
599
|
-
dataset_lock=dataset_lock,
|
|
600
|
-
lock_timeout_sec=lock_timeout_sec,
|
|
601
|
-
export_summary=export_summary,
|
|
602
|
-
raise_on_error=raise_on_error
|
|
603
|
-
)
|
|
604
|
-
|
|
605
|
-
def upload(
|
|
606
|
-
self,
|
|
607
|
-
# what to upload
|
|
608
|
-
local_path: str,
|
|
609
|
-
local_annotations_path: str = None,
|
|
610
|
-
# upload options
|
|
611
|
-
remote_path: str = "/",
|
|
612
|
-
remote_name: str = None,
|
|
613
|
-
file_types: list = None,
|
|
614
|
-
overwrite: bool = False,
|
|
615
|
-
item_metadata: dict = None,
|
|
616
|
-
output_entity=entities.Item,
|
|
617
|
-
no_output: bool = False,
|
|
618
|
-
export_version: str = entities.ExportVersion.V1,
|
|
619
|
-
item_description: str = None,
|
|
620
|
-
raise_on_error: bool = False,
|
|
621
|
-
return_as_list: bool = False
|
|
622
|
-
):
|
|
623
|
-
"""
|
|
624
|
-
Upload local file to dataset.
|
|
625
|
-
Local filesystem will remain unchanged.
|
|
626
|
-
If "*" at the end of local_path (e.g. "/images/*") items will be uploaded without the head directory.
|
|
627
|
-
|
|
628
|
-
**Prerequisites**: Any user can upload items.
|
|
629
|
-
|
|
630
|
-
:param str local_path: list of local file, local folder, BufferIO, numpy.ndarray or url to upload
|
|
631
|
-
:param str local_annotations_path: path to dataloop format annotations json files.
|
|
632
|
-
:param str remote_path: remote path to save.
|
|
633
|
-
:param str remote_name: remote base name to save. when upload numpy.ndarray as local path, remote_name with .jpg or .png ext is mandatory
|
|
634
|
-
:param list file_types: list of file type to upload. e.g ['.jpg', '.png']. default is all
|
|
635
|
-
:param dict item_metadata: metadata dict to upload to item or ExportMetadata option to export metadata from annotation file
|
|
636
|
-
:param bool overwrite: optional - default = False
|
|
637
|
-
:param output_entity: output type
|
|
638
|
-
:param bool no_output: do not return the items after upload
|
|
639
|
-
:param str export_version: exported items will have original extension in filename, `V1` - no original extension in filenames
|
|
640
|
-
:param str item_description: add a string description to the uploaded item
|
|
641
|
-
:param bool raise_on_error: raise an exception if an error occurs
|
|
642
|
-
:param bool return_as_list: return a list of items instead of a generator
|
|
643
|
-
|
|
644
|
-
:return: Output (generator/single item)
|
|
645
|
-
:rtype: generator or single item
|
|
646
|
-
|
|
647
|
-
**Example**:
|
|
648
|
-
|
|
649
|
-
.. code-block:: python
|
|
650
|
-
|
|
651
|
-
dataset.items.upload(local_path='local_path',
|
|
652
|
-
local_annotations_path='local_annotations_path',
|
|
653
|
-
overwrite=True,
|
|
654
|
-
item_metadata={'Hellow': 'Word'}
|
|
655
|
-
)
|
|
656
|
-
"""
|
|
657
|
-
# initiate and use uploader
|
|
658
|
-
uploader = repositories.Uploader(items_repository=self, output_entity=output_entity, no_output=no_output)
|
|
659
|
-
return uploader.upload(
|
|
660
|
-
local_path=local_path,
|
|
661
|
-
local_annotations_path=local_annotations_path,
|
|
662
|
-
# upload options
|
|
663
|
-
remote_path=remote_path,
|
|
664
|
-
remote_name=remote_name,
|
|
665
|
-
file_types=file_types,
|
|
666
|
-
# config
|
|
667
|
-
overwrite=overwrite,
|
|
668
|
-
# metadata to upload with items
|
|
669
|
-
item_metadata=item_metadata,
|
|
670
|
-
export_version=export_version,
|
|
671
|
-
item_description=item_description,
|
|
672
|
-
raise_on_error=raise_on_error,
|
|
673
|
-
return_as_list=return_as_list
|
|
674
|
-
)
|
|
675
|
-
|
|
676
|
-
@property
|
|
677
|
-
def platform_url(self):
|
|
678
|
-
return self._client_api._get_resource_url(
|
|
679
|
-
"projects/{}/datasets/{}/items".format(self.dataset.project.id, self.dataset.id))
|
|
680
|
-
|
|
681
|
-
def open_in_web(self, filepath=None, item_id=None, item=None):
|
|
682
|
-
"""
|
|
683
|
-
Open the item in web platform
|
|
684
|
-
|
|
685
|
-
**Prerequisites**: You must be in the role of an *owner* or *developer* or be an *annotation manager*/*annotator* with access to that item through task.
|
|
686
|
-
|
|
687
|
-
:param str filepath: item file path
|
|
688
|
-
:param str item_id: item id
|
|
689
|
-
:param dtlpy.entities.item.Item item: item entity
|
|
690
|
-
|
|
691
|
-
**Example**:
|
|
692
|
-
|
|
693
|
-
.. code-block:: python
|
|
694
|
-
|
|
695
|
-
dataset.items.open_in_web(item_id='item_id')
|
|
696
|
-
|
|
697
|
-
"""
|
|
698
|
-
if filepath is not None:
|
|
699
|
-
item = self.get(filepath=filepath)
|
|
700
|
-
if item is not None:
|
|
701
|
-
item.open_in_web()
|
|
702
|
-
elif item_id is not None:
|
|
703
|
-
self._client_api._open_in_web(url=self.platform_url + '/' + str(item_id))
|
|
704
|
-
else:
|
|
705
|
-
self._client_api._open_in_web(url=self.platform_url)
|
|
706
|
-
|
|
707
|
-
def update_status(self,
|
|
708
|
-
status: entities.ItemStatus,
|
|
709
|
-
items=None,
|
|
710
|
-
item_ids=None,
|
|
711
|
-
filters=None,
|
|
712
|
-
dataset=None,
|
|
713
|
-
clear=False):
|
|
714
|
-
"""
|
|
715
|
-
Update item status in task
|
|
716
|
-
|
|
717
|
-
**Prerequisites**: You must be in the role of an *owner* or *developer* or *annotation manager* who has been assigned a task with the item.
|
|
718
|
-
|
|
719
|
-
You must provide at least ONE of the following params: items, item_ids, filters.
|
|
720
|
-
|
|
721
|
-
:param str status: ItemStatus.COMPLETED, ItemStatus.APPROVED, ItemStatus.DISCARDED
|
|
722
|
-
:param list items: list of items
|
|
723
|
-
:param list item_ids: list of items id
|
|
724
|
-
:param dtlpy.entities.filters.Filters filters: Filters entity or a dictionary containing filters parameters
|
|
725
|
-
:param dtlpy.entities.dataset.Dataset dataset: dataset object
|
|
726
|
-
:param bool clear: to delete status
|
|
727
|
-
|
|
728
|
-
**Example**:
|
|
729
|
-
|
|
730
|
-
.. code-block:: python
|
|
731
|
-
|
|
732
|
-
dataset.items.update_status(item_ids='item_id', status=dl.ItemStatus.COMPLETED)
|
|
733
|
-
|
|
734
|
-
"""
|
|
735
|
-
if items is None and item_ids is None and filters is None:
|
|
736
|
-
raise exceptions.PlatformException('400', 'Must provide either items, item_ids or filters')
|
|
737
|
-
|
|
738
|
-
if self._dataset is None and dataset is None:
|
|
739
|
-
raise exceptions.PlatformException('400', 'Please provide dataset')
|
|
740
|
-
elif dataset is None:
|
|
741
|
-
dataset = self._dataset
|
|
742
|
-
|
|
743
|
-
if filters is not None:
|
|
744
|
-
items = dataset.items.list(filters=filters)
|
|
745
|
-
item_count = items.items_count
|
|
746
|
-
elif items is not None:
|
|
747
|
-
if isinstance(items, entities.PagedEntities):
|
|
748
|
-
item_count = items.items_count
|
|
749
|
-
else:
|
|
750
|
-
if not isinstance(items, list):
|
|
751
|
-
items = [items]
|
|
752
|
-
item_count = len(items)
|
|
753
|
-
items = [items]
|
|
754
|
-
else:
|
|
755
|
-
if not isinstance(item_ids, list):
|
|
756
|
-
item_ids = [item_ids]
|
|
757
|
-
item_count = len(item_ids)
|
|
758
|
-
items = [[dataset.items.get(item_id=item_id, fetch=False) for item_id in item_ids]]
|
|
759
|
-
|
|
760
|
-
pool = self._client_api.thread_pools(pool_name='item.status_update')
|
|
761
|
-
jobs = [None for _ in range(item_count)]
|
|
762
|
-
# call multiprocess wrapper to run service on each item in list
|
|
763
|
-
for page in items:
|
|
764
|
-
for i_item, item in enumerate(page):
|
|
765
|
-
jobs[i_item] = pool.submit(item.update_status,
|
|
766
|
-
**{'status': status,
|
|
767
|
-
'clear': clear})
|
|
768
|
-
|
|
769
|
-
# get all results
|
|
770
|
-
results = [j.result() for j in jobs]
|
|
771
|
-
out_success = [r for r in results if r is True]
|
|
772
|
-
out_errors = [r for r in results if r is False]
|
|
773
|
-
if len(out_errors) == 0:
|
|
774
|
-
logger.debug('Item/s updated successfully. {}/{}'.format(len(out_success), len(results)))
|
|
775
|
-
else:
|
|
776
|
-
logger.error(out_errors)
|
|
777
|
-
logger.error('Item/s updated with {} errors'.format(len(out_errors)))
|
|
778
|
-
|
|
779
|
-
def make_dir(self, directory, dataset: entities.Dataset = None) -> entities.Item:
|
|
780
|
-
"""
|
|
781
|
-
Create a directory in a dataset.
|
|
782
|
-
|
|
783
|
-
**Prerequisites**: All users.
|
|
784
|
-
|
|
785
|
-
:param str directory: name of directory
|
|
786
|
-
:param dtlpy.entities.dataset.Dataset dataset: dataset object
|
|
787
|
-
:return: Item object
|
|
788
|
-
:rtype: dtlpy.entities.item.Item
|
|
789
|
-
|
|
790
|
-
**Example**:
|
|
791
|
-
|
|
792
|
-
.. code-block:: python
|
|
793
|
-
|
|
794
|
-
dataset.items.make_dir(directory='directory_name')
|
|
795
|
-
"""
|
|
796
|
-
if self._dataset_id is None and dataset is None:
|
|
797
|
-
raise exceptions.PlatformException('400', 'Please provide parameter dataset')
|
|
798
|
-
|
|
799
|
-
payload = {
|
|
800
|
-
'type': 'dir',
|
|
801
|
-
'path': directory
|
|
802
|
-
}
|
|
803
|
-
headers = {'content-type': 'application/x-www-form-urlencoded'}
|
|
804
|
-
success, response = self._client_api.gen_request(req_type="post",
|
|
805
|
-
headers=headers,
|
|
806
|
-
path="/datasets/{}/items".format(self._dataset_id),
|
|
807
|
-
data=payload)
|
|
808
|
-
if success:
|
|
809
|
-
item = self.items_entity.from_json(client_api=self._client_api,
|
|
810
|
-
_json=response.json(),
|
|
811
|
-
dataset=self._dataset)
|
|
812
|
-
else:
|
|
813
|
-
raise exceptions.PlatformException(response)
|
|
814
|
-
|
|
815
|
-
return item
|
|
816
|
-
|
|
817
|
-
def move_items(self,
|
|
818
|
-
destination: str,
|
|
819
|
-
filters: entities.Filters = None,
|
|
820
|
-
items=None,
|
|
821
|
-
dataset: entities.Dataset = None
|
|
822
|
-
) -> bool:
|
|
823
|
-
"""
|
|
824
|
-
Move items to another directory.
|
|
825
|
-
If directory does not exist we will create it
|
|
826
|
-
|
|
827
|
-
**Prerequisites**: You must be in the role of an *owner* or *developer*.
|
|
828
|
-
|
|
829
|
-
:param str destination: destination directory
|
|
830
|
-
:param dtlpy.entities.filters.Filters filters: optional - either this or items. Query of items to move
|
|
831
|
-
:param items: optional - either this or filters. A list of items to move
|
|
832
|
-
:param dtlpy.entities.dataset.Dataset dataset: dataset object
|
|
833
|
-
:return: True if success
|
|
834
|
-
:rtype: bool
|
|
835
|
-
|
|
836
|
-
**Example**:
|
|
837
|
-
|
|
838
|
-
.. code-block:: python
|
|
839
|
-
|
|
840
|
-
dataset.items.move_items(destination='directory_name')
|
|
841
|
-
"""
|
|
842
|
-
if filters is None and items is None:
|
|
843
|
-
raise exceptions.PlatformException('400', 'Must provide either filters or items')
|
|
844
|
-
|
|
845
|
-
dest_dir_filter = entities.Filters(resource=entities.FiltersResource.ITEM, field='type', values='dir')
|
|
846
|
-
dest_dir_filter.recursive = False
|
|
847
|
-
dest_dir_filter.add(field='filename', values=destination)
|
|
848
|
-
dirs_page = self.list(filters=dest_dir_filter)
|
|
849
|
-
|
|
850
|
-
if dirs_page.items_count == 0:
|
|
851
|
-
directory = self.make_dir(directory=destination, dataset=dataset)
|
|
852
|
-
elif dirs_page.items_count == 1:
|
|
853
|
-
directory = dirs_page.items[0]
|
|
854
|
-
else:
|
|
855
|
-
raise exceptions.PlatformException('404', 'More than one directory by the name of: {}'.format(destination))
|
|
856
|
-
|
|
857
|
-
if filters is not None:
|
|
858
|
-
items = self.list(filters=filters)
|
|
859
|
-
elif isinstance(items, list):
|
|
860
|
-
items = [items]
|
|
861
|
-
elif not isinstance(items, entities.PagedEntities):
|
|
862
|
-
raise exceptions.PlatformException('400', 'items must be a list of items or a pages entity not {}'.format(
|
|
863
|
-
type(items)))
|
|
864
|
-
|
|
865
|
-
item_ids = list()
|
|
866
|
-
for page in items:
|
|
867
|
-
for item in page:
|
|
868
|
-
item_ids.append(item.id)
|
|
869
|
-
|
|
870
|
-
success, response = self._client_api.gen_request(req_type="put",
|
|
871
|
-
path="/datasets/{}/items/{}".format(self._dataset_id,
|
|
872
|
-
directory.id),
|
|
873
|
-
json_req=item_ids)
|
|
874
|
-
if not success:
|
|
875
|
-
raise exceptions.PlatformException(response)
|
|
876
|
-
|
|
877
|
-
return success
|
|
878
|
-
|
|
879
|
-
def task_scores(self, item_id: str, task_id: str, page_offset: int = 0, page_size: int = 100):
|
|
880
|
-
"""
|
|
881
|
-
Get item score
|
|
882
|
-
|
|
883
|
-
**Prerequisites**: You must be able to read the task
|
|
884
|
-
|
|
885
|
-
:param str item_id: item id
|
|
886
|
-
:param str task_id: task id
|
|
887
|
-
:param int page_offset: start page
|
|
888
|
-
:param int page_size: page size
|
|
889
|
-
:return: page of item scores
|
|
890
|
-
|
|
891
|
-
**Example**:
|
|
892
|
-
|
|
893
|
-
.. code-block:: python
|
|
894
|
-
|
|
895
|
-
dataset.items.item_score(item_id='item_id', task_id='task_id')
|
|
896
|
-
|
|
897
|
-
"""
|
|
898
|
-
|
|
899
|
-
if item_id is None:
|
|
900
|
-
raise exceptions.PlatformException('400', 'Must provide item id')
|
|
901
|
-
|
|
902
|
-
if task_id is None:
|
|
903
|
-
raise exceptions.PlatformException('400', 'Must provide task id')
|
|
904
|
-
|
|
905
|
-
success, response = self._client_api.gen_request(req_type="get",
|
|
906
|
-
path="/scores/tasks/{}/items/{}?page={}&pageSize={}"
|
|
907
|
-
.format(task_id, item_id, page_offset, page_size))
|
|
908
|
-
if success:
|
|
909
|
-
return response.json()
|
|
910
|
-
else:
|
|
911
|
-
raise exceptions.PlatformException(response)
|
|
912
|
-
|
|
1
|
+
import logging
|
|
2
|
+
|
|
3
|
+
from .. import entities, exceptions, repositories, miscellaneous, _api_reference
|
|
4
|
+
from ..services.api_client import ApiClient
|
|
5
|
+
|
|
6
|
+
logger = logging.getLogger(name='dtlpy')
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class Items:
|
|
10
|
+
"""
|
|
11
|
+
Items Repository
|
|
12
|
+
|
|
13
|
+
The Items class allows you to manage items in your datasets.
|
|
14
|
+
For information on actions related to items see https://developers.dataloop.ai/tutorials/data_management/upload_and_manage_items/chapter/
|
|
15
|
+
"""
|
|
16
|
+
|
|
17
|
+
def __init__(self,
|
|
18
|
+
client_api: ApiClient,
|
|
19
|
+
datasets: repositories.Datasets = None,
|
|
20
|
+
dataset: entities.Dataset = None,
|
|
21
|
+
dataset_id=None,
|
|
22
|
+
items_entity=None,
|
|
23
|
+
project=None):
|
|
24
|
+
self._client_api = client_api
|
|
25
|
+
self._dataset = dataset
|
|
26
|
+
self._dataset_id = dataset_id
|
|
27
|
+
self._datasets = datasets
|
|
28
|
+
self._project = project
|
|
29
|
+
# set items entity to represent the item (Item, Codebase, Artifact etc...)
|
|
30
|
+
if items_entity is None:
|
|
31
|
+
self.items_entity = entities.Item
|
|
32
|
+
if self._dataset_id is None and self._dataset is not None:
|
|
33
|
+
self._dataset_id = self._dataset.id
|
|
34
|
+
|
|
35
|
+
############
|
|
36
|
+
# entities #
|
|
37
|
+
############
|
|
38
|
+
@property
|
|
39
|
+
def dataset(self) -> entities.Dataset:
|
|
40
|
+
if self._dataset is None:
|
|
41
|
+
if self._dataset_id is None:
|
|
42
|
+
raise exceptions.PlatformException(
|
|
43
|
+
error='400',
|
|
44
|
+
message='Cannot perform action WITHOUT Dataset entity in Items repository. Please set a dataset')
|
|
45
|
+
self._dataset = self.datasets.get(dataset_id=self._dataset_id, fetch=None)
|
|
46
|
+
assert isinstance(self._dataset, entities.Dataset)
|
|
47
|
+
return self._dataset
|
|
48
|
+
|
|
49
|
+
@dataset.setter
|
|
50
|
+
def dataset(self, dataset: entities.Dataset):
|
|
51
|
+
if not isinstance(dataset, entities.Dataset):
|
|
52
|
+
raise ValueError('Must input a valid Dataset entity')
|
|
53
|
+
self._dataset = dataset
|
|
54
|
+
|
|
55
|
+
@property
|
|
56
|
+
def project(self) -> entities.Project:
|
|
57
|
+
if self._project is None:
|
|
58
|
+
raise exceptions.PlatformException(
|
|
59
|
+
error='400',
|
|
60
|
+
message='Cannot perform action WITHOUT Project entity in Items repository. Please set a project')
|
|
61
|
+
assert isinstance(self._dataset, entities.Dataset)
|
|
62
|
+
return self._project
|
|
63
|
+
|
|
64
|
+
@project.setter
|
|
65
|
+
def project(self, project: entities.Project):
|
|
66
|
+
if not isinstance(project, entities.Project):
|
|
67
|
+
raise ValueError('Must input a valid Dataset entity')
|
|
68
|
+
self._project = project
|
|
69
|
+
|
|
70
|
+
################
|
|
71
|
+
# repositories #
|
|
72
|
+
################
|
|
73
|
+
@property
|
|
74
|
+
def datasets(self) -> repositories.Datasets:
|
|
75
|
+
if self._datasets is None:
|
|
76
|
+
self._datasets = repositories.Datasets(client_api=self._client_api)
|
|
77
|
+
assert isinstance(self._datasets, repositories.Datasets)
|
|
78
|
+
return self._datasets
|
|
79
|
+
|
|
80
|
+
###########
|
|
81
|
+
# methods #
|
|
82
|
+
###########
|
|
83
|
+
|
|
84
|
+
def set_items_entity(self, entity):
|
|
85
|
+
"""
|
|
86
|
+
Set the item entity type to `Artifact <https://dataloop.ai/docs/auto-annotation-service?#uploading-model-weights-as-artifacts>`_, Item, or Codebase.
|
|
87
|
+
|
|
88
|
+
:param entities.Item, entities.Artifact, entities.Codebase entity: entity type [entities.Item, entities.Artifact, entities.Codebase]
|
|
89
|
+
"""
|
|
90
|
+
if entity in [entities.Item, entities.Artifact, entities.Codebase]:
|
|
91
|
+
self.items_entity = entity
|
|
92
|
+
else:
|
|
93
|
+
raise exceptions.PlatformException(error="403",
|
|
94
|
+
message="Unable to set given entity. Entity give: {}".format(entity))
|
|
95
|
+
|
|
96
|
+
def get_all_items(self, filters: entities.Filters = None) -> [entities.Item]:
|
|
97
|
+
"""
|
|
98
|
+
Get all items in dataset.
|
|
99
|
+
|
|
100
|
+
**Prerequisites**: You must be in the role of an *owner* or *developer*.
|
|
101
|
+
|
|
102
|
+
:param dtlpy.entities.filters.Filters filters: dl.Filters entity to filters items
|
|
103
|
+
:return: list of all items
|
|
104
|
+
:rtype: list
|
|
105
|
+
|
|
106
|
+
**Example**:
|
|
107
|
+
|
|
108
|
+
.. code-block:: python
|
|
109
|
+
|
|
110
|
+
dataset.items.get_all_items()
|
|
111
|
+
|
|
112
|
+
"""
|
|
113
|
+
if filters is None:
|
|
114
|
+
filters = entities.Filters()
|
|
115
|
+
filters._user_query = 'false'
|
|
116
|
+
filters.add(field='type', values='file')
|
|
117
|
+
pages = self.list(filters=filters)
|
|
118
|
+
num_items = pages.items_count
|
|
119
|
+
items = [None for _ in range(num_items)]
|
|
120
|
+
for i_item, item in enumerate(pages.all()):
|
|
121
|
+
items[i_item] = item
|
|
122
|
+
items = [item for item in items if item is not None]
|
|
123
|
+
return items
|
|
124
|
+
|
|
125
|
+
def _build_entities_from_response(self, response_items) -> miscellaneous.List[entities.Item]:
|
|
126
|
+
pool = self._client_api.thread_pools(pool_name='entity.create')
|
|
127
|
+
jobs = [None for _ in range(len(response_items))]
|
|
128
|
+
# return triggers list
|
|
129
|
+
for i_item, item in enumerate(response_items):
|
|
130
|
+
jobs[i_item] = pool.submit(self.items_entity._protected_from_json,
|
|
131
|
+
**{'client_api': self._client_api,
|
|
132
|
+
'_json': item,
|
|
133
|
+
'dataset': self.dataset})
|
|
134
|
+
# get all results
|
|
135
|
+
results = [j.result() for j in jobs]
|
|
136
|
+
# log errors
|
|
137
|
+
_ = [logger.warning(r[1]) for r in results if r[0] is False]
|
|
138
|
+
# return good jobs
|
|
139
|
+
items = miscellaneous.List([r[1] for r in results if r[0] is True])
|
|
140
|
+
return items
|
|
141
|
+
|
|
142
|
+
def _list(self, filters: entities.Filters):
|
|
143
|
+
"""
|
|
144
|
+
Get dataset items list This is a browsing endpoint, for any given path item count will be returned,
|
|
145
|
+
user is expected to perform another request then for every folder item to actually get the its item list.
|
|
146
|
+
|
|
147
|
+
:param dtlpy.entities.filters.Filters filters: Filters entity or a dictionary containing filters parameters
|
|
148
|
+
:return: json response
|
|
149
|
+
"""
|
|
150
|
+
# prepare request
|
|
151
|
+
success, response = self._client_api.gen_request(req_type="POST",
|
|
152
|
+
path="/datasets/{}/query".format(self.dataset.id),
|
|
153
|
+
json_req=filters.prepare(),
|
|
154
|
+
headers={'user_query': filters._user_query})
|
|
155
|
+
if not success:
|
|
156
|
+
raise exceptions.PlatformException(response)
|
|
157
|
+
return response.json()
|
|
158
|
+
|
|
159
|
+
@_api_reference.add(path='/datasets/{id}/query', method='post')
|
|
160
|
+
def list(self,
|
|
161
|
+
filters: entities.Filters = None,
|
|
162
|
+
page_offset: int = None,
|
|
163
|
+
page_size: int = None
|
|
164
|
+
) -> entities.PagedEntities:
|
|
165
|
+
"""
|
|
166
|
+
List items in a dataset.
|
|
167
|
+
|
|
168
|
+
**Prerequisites**: You must be in the role of an *owner* or *developer*.
|
|
169
|
+
|
|
170
|
+
:param dtlpy.entities.filters.Filters filters: Filters entity or a dictionary containing filters parameters
|
|
171
|
+
:param int page_offset: start page
|
|
172
|
+
:param int page_size: page size
|
|
173
|
+
:return: Pages object
|
|
174
|
+
:rtype: dtlpy.entities.paged_entities.PagedEntities
|
|
175
|
+
|
|
176
|
+
**Example**:
|
|
177
|
+
|
|
178
|
+
.. code-block:: python
|
|
179
|
+
|
|
180
|
+
dataset.items.list(page_offset=0, page_size=100)
|
|
181
|
+
"""
|
|
182
|
+
# default filters
|
|
183
|
+
if filters is None:
|
|
184
|
+
filters = entities.Filters()
|
|
185
|
+
filters._user_query = 'false'
|
|
186
|
+
# assert type filters
|
|
187
|
+
elif not isinstance(filters, entities.Filters):
|
|
188
|
+
raise exceptions.PlatformException(error='400',
|
|
189
|
+
message='Unknown filters type: {!r}'.format(type(filters)))
|
|
190
|
+
if filters.resource != entities.FiltersResource.ITEM and filters.resource != entities.FiltersResource.ANNOTATION:
|
|
191
|
+
raise exceptions.PlatformException(
|
|
192
|
+
error='400',
|
|
193
|
+
message='Filters resource must to be FiltersResource.ITEM. Got: {!r}'.format(filters.resource))
|
|
194
|
+
|
|
195
|
+
# page size
|
|
196
|
+
if page_size is not None:
|
|
197
|
+
filters.page_size = page_size
|
|
198
|
+
|
|
199
|
+
# page offset
|
|
200
|
+
if page_offset is not None:
|
|
201
|
+
filters.page = page_offset
|
|
202
|
+
|
|
203
|
+
if filters.resource == entities.FiltersResource.ITEM:
|
|
204
|
+
items_repository = self
|
|
205
|
+
else:
|
|
206
|
+
items_repository = repositories.Annotations(client_api=self._client_api,
|
|
207
|
+
dataset=self._dataset)
|
|
208
|
+
|
|
209
|
+
paged = entities.PagedEntities(items_repository=items_repository,
|
|
210
|
+
filters=filters,
|
|
211
|
+
page_offset=filters.page,
|
|
212
|
+
page_size=filters.page_size,
|
|
213
|
+
client_api=self._client_api)
|
|
214
|
+
paged.get_page()
|
|
215
|
+
return paged
|
|
216
|
+
|
|
217
|
+
@_api_reference.add(path='/items/{id}', method='get')
|
|
218
|
+
def get(self,
|
|
219
|
+
filepath: str = None,
|
|
220
|
+
item_id: str = None,
|
|
221
|
+
fetch: bool = None,
|
|
222
|
+
is_dir: bool = False
|
|
223
|
+
) -> entities.Item:
|
|
224
|
+
"""
|
|
225
|
+
Get Item object
|
|
226
|
+
|
|
227
|
+
**Prerequisites**: You must be in the role of an *owner* or *developer*.
|
|
228
|
+
|
|
229
|
+
:param str filepath: optional - search by remote path
|
|
230
|
+
:param str item_id: optional - search by id
|
|
231
|
+
:param bool fetch: optional - fetch entity from platform, default taken from cookie
|
|
232
|
+
:param bool is_dir: True if you want to get an item from dir type
|
|
233
|
+
:return: Item object
|
|
234
|
+
:rtype: dtlpy.entities.item.Item
|
|
235
|
+
|
|
236
|
+
**Example**:
|
|
237
|
+
|
|
238
|
+
.. code-block:: python
|
|
239
|
+
|
|
240
|
+
dataset.items.get(item_id='item_id')
|
|
241
|
+
"""
|
|
242
|
+
if fetch is None:
|
|
243
|
+
fetch = self._client_api.fetch_entities
|
|
244
|
+
|
|
245
|
+
if fetch:
|
|
246
|
+
if item_id is not None:
|
|
247
|
+
success, response = self._client_api.gen_request(req_type="get",
|
|
248
|
+
path="/items/{}".format(item_id))
|
|
249
|
+
if success:
|
|
250
|
+
item = self.items_entity.from_json(client_api=self._client_api,
|
|
251
|
+
_json=response.json(),
|
|
252
|
+
dataset=self._dataset,
|
|
253
|
+
project=self._project)
|
|
254
|
+
# verify input filepath is same as the given id
|
|
255
|
+
if filepath is not None and item.filename != filepath:
|
|
256
|
+
logger.warning(
|
|
257
|
+
"Mismatch found in items.get: filepath is different then item.filename: "
|
|
258
|
+
"{!r} != {!r}".format(
|
|
259
|
+
filepath,
|
|
260
|
+
item.filename))
|
|
261
|
+
else:
|
|
262
|
+
raise exceptions.PlatformException(response)
|
|
263
|
+
elif filepath is not None:
|
|
264
|
+
filters = entities.Filters()
|
|
265
|
+
filters.pop(field='hidden')
|
|
266
|
+
if is_dir:
|
|
267
|
+
filters.add(field='type', values='dir')
|
|
268
|
+
filters.recursive = False
|
|
269
|
+
filters.add(field='filename', values=filepath)
|
|
270
|
+
paged_entity = self.list(filters=filters)
|
|
271
|
+
if len(paged_entity.items) == 0:
|
|
272
|
+
raise exceptions.PlatformException(error='404',
|
|
273
|
+
message='Item not found. filepath= "{}"'.format(filepath))
|
|
274
|
+
elif len(paged_entity.items) > 1:
|
|
275
|
+
raise exceptions.PlatformException(
|
|
276
|
+
error='404',
|
|
277
|
+
message='More than one item found. Please "get" by id. filepath: "{}"'.format(filepath))
|
|
278
|
+
else:
|
|
279
|
+
item = paged_entity.items[0]
|
|
280
|
+
else:
|
|
281
|
+
raise exceptions.PlatformException(error="400",
|
|
282
|
+
message='Must choose by at least one. "filename" or "item_id"')
|
|
283
|
+
else:
|
|
284
|
+
item = entities.Item.from_json(_json={'id': item_id,
|
|
285
|
+
'filename': filepath},
|
|
286
|
+
client_api=self._client_api,
|
|
287
|
+
dataset=self._dataset,
|
|
288
|
+
is_fetched=False,
|
|
289
|
+
project=self._project)
|
|
290
|
+
assert isinstance(item, entities.Item)
|
|
291
|
+
return item
|
|
292
|
+
|
|
293
|
+
@_api_reference.add(path='/items/{id}/clone', method='post')
|
|
294
|
+
def clone(self,
|
|
295
|
+
item_id: str,
|
|
296
|
+
dst_dataset_id: str,
|
|
297
|
+
remote_filepath: str = None,
|
|
298
|
+
metadata: dict = None,
|
|
299
|
+
with_annotations: bool = True,
|
|
300
|
+
with_metadata: bool = True,
|
|
301
|
+
with_task_annotations_status: bool = False,
|
|
302
|
+
allow_many: bool = False,
|
|
303
|
+
wait: bool = True):
|
|
304
|
+
"""
|
|
305
|
+
Clone item. Read more about cloning datatsets and items in our `documentation <https://dataloop.ai/docs/clone-merge-dataset#cloned-dataset>`_ and `SDK documentation <https://developers.dataloop.ai/tutorials/data_management/data_versioning/chapter/>`_.
|
|
306
|
+
|
|
307
|
+
**Prerequisites**: You must be in the role of an *owner* or *developer*.
|
|
308
|
+
|
|
309
|
+
:param str item_id: item to clone
|
|
310
|
+
:param str dst_dataset_id: destination dataset id
|
|
311
|
+
:param str remote_filepath: complete filepath
|
|
312
|
+
:param dict metadata: new metadata to add
|
|
313
|
+
:param bool with_annotations: clone annotations
|
|
314
|
+
:param bool with_metadata: clone metadata
|
|
315
|
+
:param bool with_task_annotations_status: clone task annotations status
|
|
316
|
+
:param bool allow_many: `bool` if True, using multiple clones in single dataset is allowed, (default=False)
|
|
317
|
+
:param bool wait: wait for the command to finish
|
|
318
|
+
:return: Item object
|
|
319
|
+
:rtype: dtlpy.entities.item.Item
|
|
320
|
+
|
|
321
|
+
**Example**:
|
|
322
|
+
|
|
323
|
+
.. code-block:: python
|
|
324
|
+
|
|
325
|
+
dataset.items.clone(item_id='item_id',
|
|
326
|
+
dst_dataset_id='dist_dataset_id',
|
|
327
|
+
with_metadata=True,
|
|
328
|
+
with_task_annotations_status=False,
|
|
329
|
+
with_annotations=False)
|
|
330
|
+
"""
|
|
331
|
+
if metadata is None:
|
|
332
|
+
metadata = dict()
|
|
333
|
+
payload = {"targetDatasetId": dst_dataset_id,
|
|
334
|
+
"remoteFileName": remote_filepath,
|
|
335
|
+
"metadata": metadata,
|
|
336
|
+
"cloneDatasetParams": {
|
|
337
|
+
"withItemsAnnotations": with_annotations,
|
|
338
|
+
"withMetadata": with_metadata,
|
|
339
|
+
"withTaskAnnotationsStatus": with_task_annotations_status},
|
|
340
|
+
"allowMany": allow_many
|
|
341
|
+
}
|
|
342
|
+
success, response = self._client_api.gen_request(req_type="post",
|
|
343
|
+
path="/items/{}/clone".format(item_id),
|
|
344
|
+
json_req=payload)
|
|
345
|
+
# check response
|
|
346
|
+
if not success:
|
|
347
|
+
raise exceptions.PlatformException(response)
|
|
348
|
+
|
|
349
|
+
command = entities.Command.from_json(_json=response.json(),
|
|
350
|
+
client_api=self._client_api)
|
|
351
|
+
if not wait:
|
|
352
|
+
return command
|
|
353
|
+
command = command.wait()
|
|
354
|
+
|
|
355
|
+
if 'returnedModelId' not in command.spec:
|
|
356
|
+
raise exceptions.PlatformException(error='400',
|
|
357
|
+
message="returnedModelId key is missing in command response: {}"
|
|
358
|
+
.format(response))
|
|
359
|
+
cloned_item = self.get(item_id=command.spec['returnedModelId'][0])
|
|
360
|
+
return cloned_item
|
|
361
|
+
|
|
362
|
+
@_api_reference.add(path='/items/{id}', method='delete')
|
|
363
|
+
def delete(self,
|
|
364
|
+
filename: str = None,
|
|
365
|
+
item_id: str = None,
|
|
366
|
+
filters: entities.Filters = None):
|
|
367
|
+
"""
|
|
368
|
+
Delete item from platform.
|
|
369
|
+
|
|
370
|
+
**Prerequisites**: You must be in the role of an *owner* or *developer*.
|
|
371
|
+
|
|
372
|
+
You must provide at least ONE of the following params: item id, filename, filters.
|
|
373
|
+
|
|
374
|
+
:param str filename: optional - search item by remote path
|
|
375
|
+
:param str item_id: optional - search item by id
|
|
376
|
+
:param dtlpy.entities.filters.Filters filters: optional - delete items by filter
|
|
377
|
+
:return: True if success
|
|
378
|
+
:rtype: bool
|
|
379
|
+
|
|
380
|
+
**Example**:
|
|
381
|
+
|
|
382
|
+
.. code-block:: python
|
|
383
|
+
|
|
384
|
+
dataset.items.delete(item_id='item_id')
|
|
385
|
+
"""
|
|
386
|
+
if item_id is not None:
|
|
387
|
+
success, response = self._client_api.gen_request(req_type="delete",
|
|
388
|
+
path="/items/{}".format(item_id),
|
|
389
|
+
)
|
|
390
|
+
elif filename is not None:
|
|
391
|
+
if not filename.startswith("/"):
|
|
392
|
+
filename = "/" + filename
|
|
393
|
+
items = self.get(filepath=filename)
|
|
394
|
+
if not isinstance(items, list):
|
|
395
|
+
items = [items]
|
|
396
|
+
if len(items) == 0:
|
|
397
|
+
raise exceptions.PlatformException("404", "Item not found")
|
|
398
|
+
elif len(items) > 1:
|
|
399
|
+
raise exceptions.PlatformException(error="404", message="More the 1 item exist by the name provided")
|
|
400
|
+
else:
|
|
401
|
+
item_id = items[0].id
|
|
402
|
+
success, response = self._client_api.gen_request(req_type="delete",
|
|
403
|
+
path="/items/{}".format(item_id))
|
|
404
|
+
elif filters is not None:
|
|
405
|
+
# prepare request
|
|
406
|
+
success, response = self._client_api.gen_request(req_type="POST",
|
|
407
|
+
path="/datasets/{}/query".format(self.dataset.id),
|
|
408
|
+
json_req=filters.prepare(operation='delete'))
|
|
409
|
+
else:
|
|
410
|
+
raise exceptions.PlatformException("400", "Must provide item id, filename or filters")
|
|
411
|
+
|
|
412
|
+
# check response
|
|
413
|
+
if success:
|
|
414
|
+
logger.debug("Item/s deleted successfully")
|
|
415
|
+
return success
|
|
416
|
+
else:
|
|
417
|
+
raise exceptions.PlatformException(response)
|
|
418
|
+
|
|
419
|
+
@_api_reference.add(path='/items/{id}', method='patch')
|
|
420
|
+
def update(self,
|
|
421
|
+
item: entities.Item = None,
|
|
422
|
+
filters: entities.Filters = None,
|
|
423
|
+
update_values=None,
|
|
424
|
+
system_update_values=None,
|
|
425
|
+
system_metadata: bool = False):
|
|
426
|
+
"""
|
|
427
|
+
Update item metadata.
|
|
428
|
+
|
|
429
|
+
**Prerequisites**: You must be in the role of an *owner* or *developer*.
|
|
430
|
+
|
|
431
|
+
You must provide at least ONE of the following params: update_values, system_update_values.
|
|
432
|
+
|
|
433
|
+
:param dtlpy.entities.item.Item item: Item object
|
|
434
|
+
:param dtlpy.entities.filters.Filters filters: optional update filtered items by given filter
|
|
435
|
+
:param update_values: optional field to be updated and new values
|
|
436
|
+
:param system_update_values: values in system metadata to be updated
|
|
437
|
+
:param bool system_metadata: True, if you want to update the metadata system
|
|
438
|
+
:return: Item object
|
|
439
|
+
:rtype: dtlpy.entities.item.Item
|
|
440
|
+
|
|
441
|
+
**Example**:
|
|
442
|
+
|
|
443
|
+
.. code-block:: python
|
|
444
|
+
|
|
445
|
+
dataset.items.update(item='item_entity')
|
|
446
|
+
"""
|
|
447
|
+
ref = filters is not None and (filters._ref_task or filters._ref_assignment)
|
|
448
|
+
|
|
449
|
+
if system_update_values and not system_metadata:
|
|
450
|
+
logger.warning('system metadata will not be updated because param system_metadata is False')
|
|
451
|
+
|
|
452
|
+
# check params
|
|
453
|
+
if item is None and filters is None:
|
|
454
|
+
raise exceptions.PlatformException('400', 'must provide either item or filters')
|
|
455
|
+
|
|
456
|
+
value_to_update = update_values or system_update_values
|
|
457
|
+
|
|
458
|
+
if item is None and not ref and not value_to_update:
|
|
459
|
+
raise exceptions.PlatformException('400',
|
|
460
|
+
'Must provide update_values or system_update_values')
|
|
461
|
+
|
|
462
|
+
if item is not None and value_to_update:
|
|
463
|
+
raise exceptions.PlatformException('400',
|
|
464
|
+
'Cannot provide "update_values" or "system_update_values" with a specific "item" for an individual update. '
|
|
465
|
+
'These parameters are intended only for bulk updates using filters.')
|
|
466
|
+
|
|
467
|
+
# update item
|
|
468
|
+
if item is not None:
|
|
469
|
+
json_req = miscellaneous.DictDiffer.diff(origin=item._platform_dict,
|
|
470
|
+
modified=item.to_json())
|
|
471
|
+
if not json_req:
|
|
472
|
+
return item
|
|
473
|
+
url_path = "/items/{}".format(item.id)
|
|
474
|
+
if system_metadata:
|
|
475
|
+
url_path += "?system=true"
|
|
476
|
+
success, response = self._client_api.gen_request(req_type="patch",
|
|
477
|
+
path=url_path,
|
|
478
|
+
json_req=json_req)
|
|
479
|
+
if success:
|
|
480
|
+
logger.debug("Item was updated successfully. Item id: {}".format(item.id))
|
|
481
|
+
return self.items_entity.from_json(client_api=self._client_api,
|
|
482
|
+
_json=response.json(),
|
|
483
|
+
dataset=self._dataset)
|
|
484
|
+
else:
|
|
485
|
+
logger.error("Error while updating item")
|
|
486
|
+
raise exceptions.PlatformException(response)
|
|
487
|
+
# update by filters
|
|
488
|
+
else:
|
|
489
|
+
# prepare request
|
|
490
|
+
prepared_filter = filters.prepare(operation='update',
|
|
491
|
+
system_update=system_update_values,
|
|
492
|
+
system_metadata=system_metadata,
|
|
493
|
+
update=update_values)
|
|
494
|
+
success, response = self._client_api.gen_request(req_type="POST",
|
|
495
|
+
path="/datasets/{}/query".format(self.dataset.id),
|
|
496
|
+
json_req=prepared_filter)
|
|
497
|
+
if not success:
|
|
498
|
+
raise exceptions.PlatformException(response)
|
|
499
|
+
else:
|
|
500
|
+
logger.debug("Items were updated successfully.")
|
|
501
|
+
return response.json()
|
|
502
|
+
|
|
503
|
+
def download(
|
|
504
|
+
self,
|
|
505
|
+
filters: entities.Filters = None,
|
|
506
|
+
items=None,
|
|
507
|
+
# download options
|
|
508
|
+
local_path: str = None,
|
|
509
|
+
file_types: list = None,
|
|
510
|
+
save_locally: bool = True,
|
|
511
|
+
to_array: bool = False,
|
|
512
|
+
annotation_options: entities.ViewAnnotationOptions = None,
|
|
513
|
+
annotation_filters: entities.Filters = None,
|
|
514
|
+
overwrite: bool = False,
|
|
515
|
+
to_items_folder: bool = True,
|
|
516
|
+
thickness: int = 1,
|
|
517
|
+
with_text: bool = False,
|
|
518
|
+
without_relative_path=None,
|
|
519
|
+
avoid_unnecessary_annotation_download: bool = False,
|
|
520
|
+
include_annotations_in_output: bool = True,
|
|
521
|
+
export_png_files: bool = False,
|
|
522
|
+
filter_output_annotations: bool = False,
|
|
523
|
+
alpha: float = 1,
|
|
524
|
+
export_version=entities.ExportVersion.V1,
|
|
525
|
+
dataset_lock: bool = False,
|
|
526
|
+
lock_timeout_sec: int = None,
|
|
527
|
+
export_summary: bool = False,
|
|
528
|
+
raise_on_error: bool = False,
|
|
529
|
+
):
|
|
530
|
+
"""
|
|
531
|
+
Download dataset items by filters.
|
|
532
|
+
|
|
533
|
+
Filters the dataset for items and saves them locally.
|
|
534
|
+
|
|
535
|
+
Optional -- download annotation, mask, instance, and image mask of the item.
|
|
536
|
+
|
|
537
|
+
**Prerequisites**: You must be in the role of an *owner* or *developer*.
|
|
538
|
+
|
|
539
|
+
:param dtlpy.entities.filters.Filters filters: Filters entity or a dictionary containing filters parameters
|
|
540
|
+
:param List[dtlpy.entities.item.Item] or dtlpy.entities.item.Item items: download Item entity or item_id (or a list of item)
|
|
541
|
+
:param str local_path: local folder or filename to save to.
|
|
542
|
+
:param list file_types: a list of file type to download. e.g ['video/webm', 'video/mp4', 'image/jpeg', 'image/png']
|
|
543
|
+
:param bool save_locally: bool. save to disk or return a buffer
|
|
544
|
+
:param bool to_array: returns Ndarray when True and local_path = False
|
|
545
|
+
:param list annotation_options: download annotations options: list(dl.ViewAnnotationOptions)
|
|
546
|
+
:param dtlpy.entities.filters.Filters annotation_filters: Filters entity to filter annotations for download
|
|
547
|
+
:param bool overwrite: optional - default = False
|
|
548
|
+
:param bool dataset_lock: optional - default = False
|
|
549
|
+
:param bool export_summary: optional - default = False
|
|
550
|
+
:param int lock_timeout_sec: optional
|
|
551
|
+
:param bool to_items_folder: Create 'items' folder and download items to it
|
|
552
|
+
:param int thickness: optional - line thickness, if -1 annotation will be filled, default =1
|
|
553
|
+
:param bool with_text: optional - add text to annotations, default = False
|
|
554
|
+
:param bool without_relative_path: bool - download items without the relative path from platform
|
|
555
|
+
:param bool avoid_unnecessary_annotation_download: default - False
|
|
556
|
+
:param bool include_annotations_in_output: default - False , if export should contain annotations
|
|
557
|
+
:param bool export_png_files: default - if True, semantic annotations should be exported as png files
|
|
558
|
+
:param bool filter_output_annotations: default - False, given an export by filter - determine if to filter out annotations
|
|
559
|
+
:param float alpha: opacity value [0 1], default 1
|
|
560
|
+
:param str export_version: exported items will have original extension in filename, `V1` - no original extension in filenames
|
|
561
|
+
:param bool raise_on_error: raise an exception if an error occurs
|
|
562
|
+
:return: generator of local_path per each downloaded item
|
|
563
|
+
:rtype: generator or single item
|
|
564
|
+
|
|
565
|
+
**Example**:
|
|
566
|
+
|
|
567
|
+
.. code-block:: python
|
|
568
|
+
|
|
569
|
+
dataset.items.download(local_path='local_path',
|
|
570
|
+
annotation_options=dl.ViewAnnotationOptions,
|
|
571
|
+
overwrite=False,
|
|
572
|
+
thickness=1,
|
|
573
|
+
with_text=False,
|
|
574
|
+
alpha=1,
|
|
575
|
+
save_locally=True
|
|
576
|
+
)
|
|
577
|
+
"""
|
|
578
|
+
downloader = repositories.Downloader(self)
|
|
579
|
+
return downloader.download(
|
|
580
|
+
filters=filters,
|
|
581
|
+
items=items,
|
|
582
|
+
local_path=local_path,
|
|
583
|
+
file_types=file_types,
|
|
584
|
+
save_locally=save_locally,
|
|
585
|
+
to_array=to_array,
|
|
586
|
+
annotation_options=annotation_options,
|
|
587
|
+
annotation_filters=annotation_filters,
|
|
588
|
+
overwrite=overwrite,
|
|
589
|
+
to_items_folder=to_items_folder,
|
|
590
|
+
thickness=thickness,
|
|
591
|
+
alpha=alpha,
|
|
592
|
+
with_text=with_text,
|
|
593
|
+
without_relative_path=without_relative_path,
|
|
594
|
+
avoid_unnecessary_annotation_download=avoid_unnecessary_annotation_download,
|
|
595
|
+
include_annotations_in_output=include_annotations_in_output,
|
|
596
|
+
export_png_files=export_png_files,
|
|
597
|
+
filter_output_annotations=filter_output_annotations,
|
|
598
|
+
export_version=export_version,
|
|
599
|
+
dataset_lock=dataset_lock,
|
|
600
|
+
lock_timeout_sec=lock_timeout_sec,
|
|
601
|
+
export_summary=export_summary,
|
|
602
|
+
raise_on_error=raise_on_error
|
|
603
|
+
)
|
|
604
|
+
|
|
605
|
+
def upload(
|
|
606
|
+
self,
|
|
607
|
+
# what to upload
|
|
608
|
+
local_path: str,
|
|
609
|
+
local_annotations_path: str = None,
|
|
610
|
+
# upload options
|
|
611
|
+
remote_path: str = "/",
|
|
612
|
+
remote_name: str = None,
|
|
613
|
+
file_types: list = None,
|
|
614
|
+
overwrite: bool = False,
|
|
615
|
+
item_metadata: dict = None,
|
|
616
|
+
output_entity=entities.Item,
|
|
617
|
+
no_output: bool = False,
|
|
618
|
+
export_version: str = entities.ExportVersion.V1,
|
|
619
|
+
item_description: str = None,
|
|
620
|
+
raise_on_error: bool = False,
|
|
621
|
+
return_as_list: bool = False
|
|
622
|
+
):
|
|
623
|
+
"""
|
|
624
|
+
Upload local file to dataset.
|
|
625
|
+
Local filesystem will remain unchanged.
|
|
626
|
+
If "*" at the end of local_path (e.g. "/images/*") items will be uploaded without the head directory.
|
|
627
|
+
|
|
628
|
+
**Prerequisites**: Any user can upload items.
|
|
629
|
+
|
|
630
|
+
:param str local_path: list of local file, local folder, BufferIO, numpy.ndarray or url to upload
|
|
631
|
+
:param str local_annotations_path: path to dataloop format annotations json files.
|
|
632
|
+
:param str remote_path: remote path to save.
|
|
633
|
+
:param str remote_name: remote base name to save. when upload numpy.ndarray as local path, remote_name with .jpg or .png ext is mandatory
|
|
634
|
+
:param list file_types: list of file type to upload. e.g ['.jpg', '.png']. default is all
|
|
635
|
+
:param dict item_metadata: metadata dict to upload to item or ExportMetadata option to export metadata from annotation file
|
|
636
|
+
:param bool overwrite: optional - default = False
|
|
637
|
+
:param output_entity: output type
|
|
638
|
+
:param bool no_output: do not return the items after upload
|
|
639
|
+
:param str export_version: exported items will have original extension in filename, `V1` - no original extension in filenames
|
|
640
|
+
:param str item_description: add a string description to the uploaded item
|
|
641
|
+
:param bool raise_on_error: raise an exception if an error occurs
|
|
642
|
+
:param bool return_as_list: return a list of items instead of a generator
|
|
643
|
+
|
|
644
|
+
:return: Output (generator/single item)
|
|
645
|
+
:rtype: generator or single item
|
|
646
|
+
|
|
647
|
+
**Example**:
|
|
648
|
+
|
|
649
|
+
.. code-block:: python
|
|
650
|
+
|
|
651
|
+
dataset.items.upload(local_path='local_path',
|
|
652
|
+
local_annotations_path='local_annotations_path',
|
|
653
|
+
overwrite=True,
|
|
654
|
+
item_metadata={'Hellow': 'Word'}
|
|
655
|
+
)
|
|
656
|
+
"""
|
|
657
|
+
# initiate and use uploader
|
|
658
|
+
uploader = repositories.Uploader(items_repository=self, output_entity=output_entity, no_output=no_output)
|
|
659
|
+
return uploader.upload(
|
|
660
|
+
local_path=local_path,
|
|
661
|
+
local_annotations_path=local_annotations_path,
|
|
662
|
+
# upload options
|
|
663
|
+
remote_path=remote_path,
|
|
664
|
+
remote_name=remote_name,
|
|
665
|
+
file_types=file_types,
|
|
666
|
+
# config
|
|
667
|
+
overwrite=overwrite,
|
|
668
|
+
# metadata to upload with items
|
|
669
|
+
item_metadata=item_metadata,
|
|
670
|
+
export_version=export_version,
|
|
671
|
+
item_description=item_description,
|
|
672
|
+
raise_on_error=raise_on_error,
|
|
673
|
+
return_as_list=return_as_list
|
|
674
|
+
)
|
|
675
|
+
|
|
676
|
+
@property
|
|
677
|
+
def platform_url(self):
|
|
678
|
+
return self._client_api._get_resource_url(
|
|
679
|
+
"projects/{}/datasets/{}/items".format(self.dataset.project.id, self.dataset.id))
|
|
680
|
+
|
|
681
|
+
def open_in_web(self, filepath=None, item_id=None, item=None):
|
|
682
|
+
"""
|
|
683
|
+
Open the item in web platform
|
|
684
|
+
|
|
685
|
+
**Prerequisites**: You must be in the role of an *owner* or *developer* or be an *annotation manager*/*annotator* with access to that item through task.
|
|
686
|
+
|
|
687
|
+
:param str filepath: item file path
|
|
688
|
+
:param str item_id: item id
|
|
689
|
+
:param dtlpy.entities.item.Item item: item entity
|
|
690
|
+
|
|
691
|
+
**Example**:
|
|
692
|
+
|
|
693
|
+
.. code-block:: python
|
|
694
|
+
|
|
695
|
+
dataset.items.open_in_web(item_id='item_id')
|
|
696
|
+
|
|
697
|
+
"""
|
|
698
|
+
if filepath is not None:
|
|
699
|
+
item = self.get(filepath=filepath)
|
|
700
|
+
if item is not None:
|
|
701
|
+
item.open_in_web()
|
|
702
|
+
elif item_id is not None:
|
|
703
|
+
self._client_api._open_in_web(url=self.platform_url + '/' + str(item_id))
|
|
704
|
+
else:
|
|
705
|
+
self._client_api._open_in_web(url=self.platform_url)
|
|
706
|
+
|
|
707
|
+
def update_status(self,
|
|
708
|
+
status: entities.ItemStatus,
|
|
709
|
+
items=None,
|
|
710
|
+
item_ids=None,
|
|
711
|
+
filters=None,
|
|
712
|
+
dataset=None,
|
|
713
|
+
clear=False):
|
|
714
|
+
"""
|
|
715
|
+
Update item status in task
|
|
716
|
+
|
|
717
|
+
**Prerequisites**: You must be in the role of an *owner* or *developer* or *annotation manager* who has been assigned a task with the item.
|
|
718
|
+
|
|
719
|
+
You must provide at least ONE of the following params: items, item_ids, filters.
|
|
720
|
+
|
|
721
|
+
:param str status: ItemStatus.COMPLETED, ItemStatus.APPROVED, ItemStatus.DISCARDED
|
|
722
|
+
:param list items: list of items
|
|
723
|
+
:param list item_ids: list of items id
|
|
724
|
+
:param dtlpy.entities.filters.Filters filters: Filters entity or a dictionary containing filters parameters
|
|
725
|
+
:param dtlpy.entities.dataset.Dataset dataset: dataset object
|
|
726
|
+
:param bool clear: to delete status
|
|
727
|
+
|
|
728
|
+
**Example**:
|
|
729
|
+
|
|
730
|
+
.. code-block:: python
|
|
731
|
+
|
|
732
|
+
dataset.items.update_status(item_ids='item_id', status=dl.ItemStatus.COMPLETED)
|
|
733
|
+
|
|
734
|
+
"""
|
|
735
|
+
if items is None and item_ids is None and filters is None:
|
|
736
|
+
raise exceptions.PlatformException('400', 'Must provide either items, item_ids or filters')
|
|
737
|
+
|
|
738
|
+
if self._dataset is None and dataset is None:
|
|
739
|
+
raise exceptions.PlatformException('400', 'Please provide dataset')
|
|
740
|
+
elif dataset is None:
|
|
741
|
+
dataset = self._dataset
|
|
742
|
+
|
|
743
|
+
if filters is not None:
|
|
744
|
+
items = dataset.items.list(filters=filters)
|
|
745
|
+
item_count = items.items_count
|
|
746
|
+
elif items is not None:
|
|
747
|
+
if isinstance(items, entities.PagedEntities):
|
|
748
|
+
item_count = items.items_count
|
|
749
|
+
else:
|
|
750
|
+
if not isinstance(items, list):
|
|
751
|
+
items = [items]
|
|
752
|
+
item_count = len(items)
|
|
753
|
+
items = [items]
|
|
754
|
+
else:
|
|
755
|
+
if not isinstance(item_ids, list):
|
|
756
|
+
item_ids = [item_ids]
|
|
757
|
+
item_count = len(item_ids)
|
|
758
|
+
items = [[dataset.items.get(item_id=item_id, fetch=False) for item_id in item_ids]]
|
|
759
|
+
|
|
760
|
+
pool = self._client_api.thread_pools(pool_name='item.status_update')
|
|
761
|
+
jobs = [None for _ in range(item_count)]
|
|
762
|
+
# call multiprocess wrapper to run service on each item in list
|
|
763
|
+
for page in items:
|
|
764
|
+
for i_item, item in enumerate(page):
|
|
765
|
+
jobs[i_item] = pool.submit(item.update_status,
|
|
766
|
+
**{'status': status,
|
|
767
|
+
'clear': clear})
|
|
768
|
+
|
|
769
|
+
# get all results
|
|
770
|
+
results = [j.result() for j in jobs]
|
|
771
|
+
out_success = [r for r in results if r is True]
|
|
772
|
+
out_errors = [r for r in results if r is False]
|
|
773
|
+
if len(out_errors) == 0:
|
|
774
|
+
logger.debug('Item/s updated successfully. {}/{}'.format(len(out_success), len(results)))
|
|
775
|
+
else:
|
|
776
|
+
logger.error(out_errors)
|
|
777
|
+
logger.error('Item/s updated with {} errors'.format(len(out_errors)))
|
|
778
|
+
|
|
779
|
+
def make_dir(self, directory, dataset: entities.Dataset = None) -> entities.Item:
|
|
780
|
+
"""
|
|
781
|
+
Create a directory in a dataset.
|
|
782
|
+
|
|
783
|
+
**Prerequisites**: All users.
|
|
784
|
+
|
|
785
|
+
:param str directory: name of directory
|
|
786
|
+
:param dtlpy.entities.dataset.Dataset dataset: dataset object
|
|
787
|
+
:return: Item object
|
|
788
|
+
:rtype: dtlpy.entities.item.Item
|
|
789
|
+
|
|
790
|
+
**Example**:
|
|
791
|
+
|
|
792
|
+
.. code-block:: python
|
|
793
|
+
|
|
794
|
+
dataset.items.make_dir(directory='directory_name')
|
|
795
|
+
"""
|
|
796
|
+
if self._dataset_id is None and dataset is None:
|
|
797
|
+
raise exceptions.PlatformException('400', 'Please provide parameter dataset')
|
|
798
|
+
|
|
799
|
+
payload = {
|
|
800
|
+
'type': 'dir',
|
|
801
|
+
'path': directory
|
|
802
|
+
}
|
|
803
|
+
headers = {'content-type': 'application/x-www-form-urlencoded'}
|
|
804
|
+
success, response = self._client_api.gen_request(req_type="post",
|
|
805
|
+
headers=headers,
|
|
806
|
+
path="/datasets/{}/items".format(self._dataset_id),
|
|
807
|
+
data=payload)
|
|
808
|
+
if success:
|
|
809
|
+
item = self.items_entity.from_json(client_api=self._client_api,
|
|
810
|
+
_json=response.json(),
|
|
811
|
+
dataset=self._dataset)
|
|
812
|
+
else:
|
|
813
|
+
raise exceptions.PlatformException(response)
|
|
814
|
+
|
|
815
|
+
return item
|
|
816
|
+
|
|
817
|
+
def move_items(self,
|
|
818
|
+
destination: str,
|
|
819
|
+
filters: entities.Filters = None,
|
|
820
|
+
items=None,
|
|
821
|
+
dataset: entities.Dataset = None
|
|
822
|
+
) -> bool:
|
|
823
|
+
"""
|
|
824
|
+
Move items to another directory.
|
|
825
|
+
If directory does not exist we will create it
|
|
826
|
+
|
|
827
|
+
**Prerequisites**: You must be in the role of an *owner* or *developer*.
|
|
828
|
+
|
|
829
|
+
:param str destination: destination directory
|
|
830
|
+
:param dtlpy.entities.filters.Filters filters: optional - either this or items. Query of items to move
|
|
831
|
+
:param items: optional - either this or filters. A list of items to move
|
|
832
|
+
:param dtlpy.entities.dataset.Dataset dataset: dataset object
|
|
833
|
+
:return: True if success
|
|
834
|
+
:rtype: bool
|
|
835
|
+
|
|
836
|
+
**Example**:
|
|
837
|
+
|
|
838
|
+
.. code-block:: python
|
|
839
|
+
|
|
840
|
+
dataset.items.move_items(destination='directory_name')
|
|
841
|
+
"""
|
|
842
|
+
if filters is None and items is None:
|
|
843
|
+
raise exceptions.PlatformException('400', 'Must provide either filters or items')
|
|
844
|
+
|
|
845
|
+
dest_dir_filter = entities.Filters(resource=entities.FiltersResource.ITEM, field='type', values='dir')
|
|
846
|
+
dest_dir_filter.recursive = False
|
|
847
|
+
dest_dir_filter.add(field='filename', values=destination)
|
|
848
|
+
dirs_page = self.list(filters=dest_dir_filter)
|
|
849
|
+
|
|
850
|
+
if dirs_page.items_count == 0:
|
|
851
|
+
directory = self.make_dir(directory=destination, dataset=dataset)
|
|
852
|
+
elif dirs_page.items_count == 1:
|
|
853
|
+
directory = dirs_page.items[0]
|
|
854
|
+
else:
|
|
855
|
+
raise exceptions.PlatformException('404', 'More than one directory by the name of: {}'.format(destination))
|
|
856
|
+
|
|
857
|
+
if filters is not None:
|
|
858
|
+
items = self.list(filters=filters)
|
|
859
|
+
elif isinstance(items, list):
|
|
860
|
+
items = [items]
|
|
861
|
+
elif not isinstance(items, entities.PagedEntities):
|
|
862
|
+
raise exceptions.PlatformException('400', 'items must be a list of items or a pages entity not {}'.format(
|
|
863
|
+
type(items)))
|
|
864
|
+
|
|
865
|
+
item_ids = list()
|
|
866
|
+
for page in items:
|
|
867
|
+
for item in page:
|
|
868
|
+
item_ids.append(item.id)
|
|
869
|
+
|
|
870
|
+
success, response = self._client_api.gen_request(req_type="put",
|
|
871
|
+
path="/datasets/{}/items/{}".format(self._dataset_id,
|
|
872
|
+
directory.id),
|
|
873
|
+
json_req=item_ids)
|
|
874
|
+
if not success:
|
|
875
|
+
raise exceptions.PlatformException(response)
|
|
876
|
+
|
|
877
|
+
return success
|
|
878
|
+
|
|
879
|
+
def task_scores(self, item_id: str, task_id: str, page_offset: int = 0, page_size: int = 100):
|
|
880
|
+
"""
|
|
881
|
+
Get item score
|
|
882
|
+
|
|
883
|
+
**Prerequisites**: You must be able to read the task
|
|
884
|
+
|
|
885
|
+
:param str item_id: item id
|
|
886
|
+
:param str task_id: task id
|
|
887
|
+
:param int page_offset: start page
|
|
888
|
+
:param int page_size: page size
|
|
889
|
+
:return: page of item scores
|
|
890
|
+
|
|
891
|
+
**Example**:
|
|
892
|
+
|
|
893
|
+
.. code-block:: python
|
|
894
|
+
|
|
895
|
+
dataset.items.item_score(item_id='item_id', task_id='task_id')
|
|
896
|
+
|
|
897
|
+
"""
|
|
898
|
+
|
|
899
|
+
if item_id is None:
|
|
900
|
+
raise exceptions.PlatformException('400', 'Must provide item id')
|
|
901
|
+
|
|
902
|
+
if task_id is None:
|
|
903
|
+
raise exceptions.PlatformException('400', 'Must provide task id')
|
|
904
|
+
|
|
905
|
+
success, response = self._client_api.gen_request(req_type="get",
|
|
906
|
+
path="/scores/tasks/{}/items/{}?page={}&pageSize={}"
|
|
907
|
+
.format(task_id, item_id, page_offset, page_size))
|
|
908
|
+
if success:
|
|
909
|
+
return response.json()
|
|
910
|
+
else:
|
|
911
|
+
raise exceptions.PlatformException(response)
|
|
912
|
+
|