dtlpy 1.115.44__py3-none-any.whl → 1.117.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dtlpy/__init__.py +491 -491
- dtlpy/__version__.py +1 -1
- dtlpy/assets/__init__.py +26 -26
- dtlpy/assets/code_server/config.yaml +2 -2
- dtlpy/assets/code_server/installation.sh +24 -24
- dtlpy/assets/code_server/launch.json +13 -13
- dtlpy/assets/code_server/settings.json +2 -2
- dtlpy/assets/main.py +53 -53
- dtlpy/assets/main_partial.py +18 -18
- dtlpy/assets/mock.json +11 -11
- dtlpy/assets/model_adapter.py +83 -83
- dtlpy/assets/package.json +61 -61
- dtlpy/assets/package_catalog.json +29 -29
- dtlpy/assets/package_gitignore +307 -307
- dtlpy/assets/service_runners/__init__.py +33 -33
- dtlpy/assets/service_runners/converter.py +96 -96
- dtlpy/assets/service_runners/multi_method.py +49 -49
- dtlpy/assets/service_runners/multi_method_annotation.py +54 -54
- dtlpy/assets/service_runners/multi_method_dataset.py +55 -55
- dtlpy/assets/service_runners/multi_method_item.py +52 -52
- dtlpy/assets/service_runners/multi_method_json.py +52 -52
- dtlpy/assets/service_runners/single_method.py +37 -37
- dtlpy/assets/service_runners/single_method_annotation.py +43 -43
- dtlpy/assets/service_runners/single_method_dataset.py +43 -43
- dtlpy/assets/service_runners/single_method_item.py +41 -41
- dtlpy/assets/service_runners/single_method_json.py +42 -42
- dtlpy/assets/service_runners/single_method_multi_input.py +45 -45
- dtlpy/assets/voc_annotation_template.xml +23 -23
- dtlpy/caches/base_cache.py +32 -32
- dtlpy/caches/cache.py +473 -473
- dtlpy/caches/dl_cache.py +201 -201
- dtlpy/caches/filesystem_cache.py +89 -89
- dtlpy/caches/redis_cache.py +84 -84
- dtlpy/dlp/__init__.py +20 -20
- dtlpy/dlp/cli_utilities.py +367 -367
- dtlpy/dlp/command_executor.py +764 -764
- dtlpy/dlp/dlp +1 -1
- dtlpy/dlp/dlp.bat +1 -1
- dtlpy/dlp/dlp.py +128 -128
- dtlpy/dlp/parser.py +651 -651
- dtlpy/entities/__init__.py +83 -83
- dtlpy/entities/analytic.py +347 -347
- dtlpy/entities/annotation.py +1879 -1879
- dtlpy/entities/annotation_collection.py +699 -699
- dtlpy/entities/annotation_definitions/__init__.py +20 -20
- dtlpy/entities/annotation_definitions/base_annotation_definition.py +100 -100
- dtlpy/entities/annotation_definitions/box.py +195 -195
- dtlpy/entities/annotation_definitions/classification.py +67 -67
- dtlpy/entities/annotation_definitions/comparison.py +72 -72
- dtlpy/entities/annotation_definitions/cube.py +204 -204
- dtlpy/entities/annotation_definitions/cube_3d.py +149 -149
- dtlpy/entities/annotation_definitions/description.py +32 -32
- dtlpy/entities/annotation_definitions/ellipse.py +124 -124
- dtlpy/entities/annotation_definitions/free_text.py +62 -62
- dtlpy/entities/annotation_definitions/gis.py +69 -69
- dtlpy/entities/annotation_definitions/note.py +139 -139
- dtlpy/entities/annotation_definitions/point.py +117 -117
- dtlpy/entities/annotation_definitions/polygon.py +182 -182
- dtlpy/entities/annotation_definitions/polyline.py +111 -111
- dtlpy/entities/annotation_definitions/pose.py +92 -92
- dtlpy/entities/annotation_definitions/ref_image.py +86 -86
- dtlpy/entities/annotation_definitions/segmentation.py +240 -240
- dtlpy/entities/annotation_definitions/subtitle.py +34 -34
- dtlpy/entities/annotation_definitions/text.py +85 -85
- dtlpy/entities/annotation_definitions/undefined_annotation.py +74 -74
- dtlpy/entities/app.py +220 -220
- dtlpy/entities/app_module.py +107 -107
- dtlpy/entities/artifact.py +174 -174
- dtlpy/entities/assignment.py +399 -399
- dtlpy/entities/base_entity.py +214 -214
- dtlpy/entities/bot.py +113 -113
- dtlpy/entities/codebase.py +292 -292
- dtlpy/entities/collection.py +38 -38
- dtlpy/entities/command.py +169 -169
- dtlpy/entities/compute.py +449 -449
- dtlpy/entities/dataset.py +1299 -1299
- dtlpy/entities/directory_tree.py +44 -44
- dtlpy/entities/dpk.py +470 -470
- dtlpy/entities/driver.py +235 -235
- dtlpy/entities/execution.py +397 -397
- dtlpy/entities/feature.py +124 -124
- dtlpy/entities/feature_set.py +152 -145
- dtlpy/entities/filters.py +798 -798
- dtlpy/entities/gis_item.py +107 -107
- dtlpy/entities/integration.py +184 -184
- dtlpy/entities/item.py +975 -959
- dtlpy/entities/label.py +123 -123
- dtlpy/entities/links.py +85 -85
- dtlpy/entities/message.py +175 -175
- dtlpy/entities/model.py +684 -684
- dtlpy/entities/node.py +1005 -1005
- dtlpy/entities/ontology.py +810 -803
- dtlpy/entities/organization.py +287 -287
- dtlpy/entities/package.py +657 -657
- dtlpy/entities/package_defaults.py +5 -5
- dtlpy/entities/package_function.py +185 -185
- dtlpy/entities/package_module.py +113 -113
- dtlpy/entities/package_slot.py +118 -118
- dtlpy/entities/paged_entities.py +299 -299
- dtlpy/entities/pipeline.py +624 -624
- dtlpy/entities/pipeline_execution.py +279 -279
- dtlpy/entities/project.py +394 -394
- dtlpy/entities/prompt_item.py +505 -505
- dtlpy/entities/recipe.py +301 -301
- dtlpy/entities/reflect_dict.py +102 -102
- dtlpy/entities/resource_execution.py +138 -138
- dtlpy/entities/service.py +974 -963
- dtlpy/entities/service_driver.py +117 -117
- dtlpy/entities/setting.py +294 -294
- dtlpy/entities/task.py +495 -495
- dtlpy/entities/time_series.py +143 -143
- dtlpy/entities/trigger.py +426 -426
- dtlpy/entities/user.py +118 -118
- dtlpy/entities/webhook.py +124 -124
- dtlpy/examples/__init__.py +19 -19
- dtlpy/examples/add_labels.py +135 -135
- dtlpy/examples/add_metadata_to_item.py +21 -21
- dtlpy/examples/annotate_items_using_model.py +65 -65
- dtlpy/examples/annotate_video_using_model_and_tracker.py +75 -75
- dtlpy/examples/annotations_convert_to_voc.py +9 -9
- dtlpy/examples/annotations_convert_to_yolo.py +9 -9
- dtlpy/examples/convert_annotation_types.py +51 -51
- dtlpy/examples/converter.py +143 -143
- dtlpy/examples/copy_annotations.py +22 -22
- dtlpy/examples/copy_folder.py +31 -31
- dtlpy/examples/create_annotations.py +51 -51
- dtlpy/examples/create_video_annotations.py +83 -83
- dtlpy/examples/delete_annotations.py +26 -26
- dtlpy/examples/filters.py +113 -113
- dtlpy/examples/move_item.py +23 -23
- dtlpy/examples/play_video_annotation.py +13 -13
- dtlpy/examples/show_item_and_mask.py +53 -53
- dtlpy/examples/triggers.py +49 -49
- dtlpy/examples/upload_batch_of_items.py +20 -20
- dtlpy/examples/upload_items_and_custom_format_annotations.py +55 -55
- dtlpy/examples/upload_items_with_modalities.py +43 -43
- dtlpy/examples/upload_segmentation_annotations_from_mask_image.py +44 -44
- dtlpy/examples/upload_yolo_format_annotations.py +70 -70
- dtlpy/exceptions.py +125 -125
- dtlpy/miscellaneous/__init__.py +20 -20
- dtlpy/miscellaneous/dict_differ.py +95 -95
- dtlpy/miscellaneous/git_utils.py +217 -217
- dtlpy/miscellaneous/json_utils.py +14 -14
- dtlpy/miscellaneous/list_print.py +105 -105
- dtlpy/miscellaneous/zipping.py +130 -130
- dtlpy/ml/__init__.py +20 -20
- dtlpy/ml/base_feature_extractor_adapter.py +27 -27
- dtlpy/ml/base_model_adapter.py +1287 -1230
- dtlpy/ml/metrics.py +461 -461
- dtlpy/ml/predictions_utils.py +274 -274
- dtlpy/ml/summary_writer.py +57 -57
- dtlpy/ml/train_utils.py +60 -60
- dtlpy/new_instance.py +252 -252
- dtlpy/repositories/__init__.py +56 -56
- dtlpy/repositories/analytics.py +85 -85
- dtlpy/repositories/annotations.py +916 -916
- dtlpy/repositories/apps.py +383 -383
- dtlpy/repositories/artifacts.py +452 -452
- dtlpy/repositories/assignments.py +599 -599
- dtlpy/repositories/bots.py +213 -213
- dtlpy/repositories/codebases.py +559 -559
- dtlpy/repositories/collections.py +332 -332
- dtlpy/repositories/commands.py +152 -152
- dtlpy/repositories/compositions.py +61 -61
- dtlpy/repositories/computes.py +439 -439
- dtlpy/repositories/datasets.py +1585 -1504
- dtlpy/repositories/downloader.py +1157 -923
- dtlpy/repositories/dpks.py +433 -433
- dtlpy/repositories/drivers.py +482 -482
- dtlpy/repositories/executions.py +815 -815
- dtlpy/repositories/feature_sets.py +256 -226
- dtlpy/repositories/features.py +255 -255
- dtlpy/repositories/integrations.py +484 -484
- dtlpy/repositories/items.py +912 -912
- dtlpy/repositories/messages.py +94 -94
- dtlpy/repositories/models.py +1000 -1000
- dtlpy/repositories/nodes.py +80 -80
- dtlpy/repositories/ontologies.py +511 -511
- dtlpy/repositories/organizations.py +525 -525
- dtlpy/repositories/packages.py +1941 -1941
- dtlpy/repositories/pipeline_executions.py +451 -451
- dtlpy/repositories/pipelines.py +640 -640
- dtlpy/repositories/projects.py +539 -539
- dtlpy/repositories/recipes.py +429 -399
- dtlpy/repositories/resource_executions.py +137 -137
- dtlpy/repositories/schema.py +120 -120
- dtlpy/repositories/service_drivers.py +213 -213
- dtlpy/repositories/services.py +1704 -1704
- dtlpy/repositories/settings.py +339 -339
- dtlpy/repositories/tasks.py +1477 -1477
- dtlpy/repositories/times_series.py +278 -278
- dtlpy/repositories/triggers.py +536 -536
- dtlpy/repositories/upload_element.py +257 -257
- dtlpy/repositories/uploader.py +661 -661
- dtlpy/repositories/webhooks.py +249 -249
- dtlpy/services/__init__.py +22 -22
- dtlpy/services/aihttp_retry.py +131 -131
- dtlpy/services/api_client.py +1786 -1785
- dtlpy/services/api_reference.py +40 -40
- dtlpy/services/async_utils.py +133 -133
- dtlpy/services/calls_counter.py +44 -44
- dtlpy/services/check_sdk.py +68 -68
- dtlpy/services/cookie.py +115 -115
- dtlpy/services/create_logger.py +156 -156
- dtlpy/services/events.py +84 -84
- dtlpy/services/logins.py +235 -235
- dtlpy/services/reporter.py +256 -256
- dtlpy/services/service_defaults.py +91 -91
- dtlpy/utilities/__init__.py +20 -20
- dtlpy/utilities/annotations/__init__.py +16 -16
- dtlpy/utilities/annotations/annotation_converters.py +269 -269
- dtlpy/utilities/base_package_runner.py +285 -264
- dtlpy/utilities/converter.py +1650 -1650
- dtlpy/utilities/dataset_generators/__init__.py +1 -1
- dtlpy/utilities/dataset_generators/dataset_generator.py +670 -670
- dtlpy/utilities/dataset_generators/dataset_generator_tensorflow.py +23 -23
- dtlpy/utilities/dataset_generators/dataset_generator_torch.py +21 -21
- dtlpy/utilities/local_development/__init__.py +1 -1
- dtlpy/utilities/local_development/local_session.py +179 -179
- dtlpy/utilities/reports/__init__.py +2 -2
- dtlpy/utilities/reports/figures.py +343 -343
- dtlpy/utilities/reports/report.py +71 -71
- dtlpy/utilities/videos/__init__.py +17 -17
- dtlpy/utilities/videos/video_player.py +598 -598
- dtlpy/utilities/videos/videos.py +470 -470
- {dtlpy-1.115.44.data → dtlpy-1.117.6.data}/scripts/dlp +1 -1
- dtlpy-1.117.6.data/scripts/dlp.bat +2 -0
- {dtlpy-1.115.44.data → dtlpy-1.117.6.data}/scripts/dlp.py +128 -128
- {dtlpy-1.115.44.dist-info → dtlpy-1.117.6.dist-info}/METADATA +186 -186
- dtlpy-1.117.6.dist-info/RECORD +239 -0
- {dtlpy-1.115.44.dist-info → dtlpy-1.117.6.dist-info}/WHEEL +1 -1
- {dtlpy-1.115.44.dist-info → dtlpy-1.117.6.dist-info}/licenses/LICENSE +200 -200
- tests/features/environment.py +551 -551
- dtlpy/assets/__pycache__/__init__.cpython-310.pyc +0 -0
- dtlpy-1.115.44.data/scripts/dlp.bat +0 -2
- dtlpy-1.115.44.dist-info/RECORD +0 -240
- {dtlpy-1.115.44.dist-info → dtlpy-1.117.6.dist-info}/entry_points.txt +0 -0
- {dtlpy-1.115.44.dist-info → dtlpy-1.117.6.dist-info}/top_level.txt +0 -0
dtlpy/repositories/tasks.py
CHANGED
|
@@ -1,1477 +1,1477 @@
|
|
|
1
|
-
import datetime
|
|
2
|
-
import logging
|
|
3
|
-
import json
|
|
4
|
-
from typing import Union, List
|
|
5
|
-
import warnings
|
|
6
|
-
|
|
7
|
-
from .. import exceptions, miscellaneous, entities, repositories, _api_reference
|
|
8
|
-
from ..services.api_client import ApiClient
|
|
9
|
-
|
|
10
|
-
logger = logging.getLogger(name="dtlpy")
|
|
11
|
-
URL_PATH = "/annotationtasks"
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
class Tasks:
|
|
15
|
-
"""
|
|
16
|
-
Tasks Repository
|
|
17
|
-
|
|
18
|
-
The Tasks class allows the user to manage tasks and their properties.
|
|
19
|
-
For more information, read in our developers' documentation about `Creating Tasks <https://developers.dataloop.ai/tutorials/task_workflows/create_a_task/chapter/>`_, and `Redistributing and Reassigning Tasks <https://developers.dataloop.ai/tutorials/task_workflows/redistributing_and_reassigning_a_task/chapter/>`_.
|
|
20
|
-
"""
|
|
21
|
-
|
|
22
|
-
def __init__(
|
|
23
|
-
self,
|
|
24
|
-
client_api: ApiClient,
|
|
25
|
-
project: entities.Project = None,
|
|
26
|
-
dataset: entities.Dataset = None,
|
|
27
|
-
project_id: str = None,
|
|
28
|
-
):
|
|
29
|
-
self._client_api = client_api
|
|
30
|
-
self._project = project
|
|
31
|
-
self._dataset = dataset
|
|
32
|
-
self._assignments = None
|
|
33
|
-
self._project_id = project_id
|
|
34
|
-
|
|
35
|
-
############
|
|
36
|
-
# entities #
|
|
37
|
-
############
|
|
38
|
-
@property
|
|
39
|
-
def project(self) -> entities.Project:
|
|
40
|
-
if self._project is None and self._project_id is None:
|
|
41
|
-
if self._dataset is None:
|
|
42
|
-
raise exceptions.PlatformException(
|
|
43
|
-
error="2001",
|
|
44
|
-
message='Missing "project". need to set a Project entity or use project.tasks repository',
|
|
45
|
-
)
|
|
46
|
-
else:
|
|
47
|
-
self._project = self._dataset.project
|
|
48
|
-
self._project_id = self._project.id
|
|
49
|
-
if self._project is None and self._project_id is not None:
|
|
50
|
-
self._project = self._client_api.projects.get(project_id=self._project_id)
|
|
51
|
-
return self._project
|
|
52
|
-
|
|
53
|
-
@project.setter
|
|
54
|
-
def project(self, project: entities.Project):
|
|
55
|
-
if not isinstance(project, entities.Project):
|
|
56
|
-
raise ValueError("Must input a valid Project entity")
|
|
57
|
-
self._project = project
|
|
58
|
-
|
|
59
|
-
@property
|
|
60
|
-
def dataset(self) -> entities.Dataset:
|
|
61
|
-
if self._dataset is None:
|
|
62
|
-
raise exceptions.PlatformException(
|
|
63
|
-
error="2001", message='Missing "dataset". need to set a Dataset entity or use dataset.tasks repository'
|
|
64
|
-
)
|
|
65
|
-
assert isinstance(self._dataset, entities.Dataset)
|
|
66
|
-
return self._dataset
|
|
67
|
-
|
|
68
|
-
@dataset.setter
|
|
69
|
-
def dataset(self, dataset: entities.Dataset):
|
|
70
|
-
if not isinstance(dataset, entities.Dataset):
|
|
71
|
-
raise ValueError("Must input a valid Dataset entity")
|
|
72
|
-
self._dataset = dataset
|
|
73
|
-
|
|
74
|
-
@property
|
|
75
|
-
def assignments(self) -> repositories.Assignments:
|
|
76
|
-
if self._assignments is None:
|
|
77
|
-
self._assignments = repositories.Assignments(client_api=self._client_api, project=self._project)
|
|
78
|
-
assert isinstance(self._assignments, repositories.Assignments)
|
|
79
|
-
return self._assignments
|
|
80
|
-
|
|
81
|
-
def _build_entities_from_response(self, response_items) -> miscellaneous.List[entities.Task]:
|
|
82
|
-
pool = self._client_api.thread_pools(pool_name="entity.create")
|
|
83
|
-
jobs = [None for _ in range(len(response_items))]
|
|
84
|
-
|
|
85
|
-
for i_task, task in enumerate(response_items):
|
|
86
|
-
jobs[i_task] = pool.submit(
|
|
87
|
-
entities.Task._protected_from_json,
|
|
88
|
-
**{"client_api": self._client_api, "_json": task, "project": self._project, "dataset": self._dataset},
|
|
89
|
-
)
|
|
90
|
-
|
|
91
|
-
# get all results
|
|
92
|
-
results = [j.result() for j in jobs]
|
|
93
|
-
# log errors
|
|
94
|
-
_ = [logger.warning(r[1]) for r in results if r[0] is False]
|
|
95
|
-
# return good jobs
|
|
96
|
-
tasks = miscellaneous.List([r[1] for r in results if r[0] is True])
|
|
97
|
-
return tasks
|
|
98
|
-
|
|
99
|
-
def _list(self, filters: entities.Filters):
|
|
100
|
-
url = "{}/query".format(URL_PATH)
|
|
101
|
-
query = filters.prepare()
|
|
102
|
-
query["context"] = dict(projectIds=[self._project_id])
|
|
103
|
-
success, response = self._client_api.gen_request(req_type="post", path=url, json_req=filters.prepare())
|
|
104
|
-
|
|
105
|
-
if not success:
|
|
106
|
-
raise exceptions.PlatformException(response)
|
|
107
|
-
return response.json()
|
|
108
|
-
|
|
109
|
-
def query(self, filters=None, project_ids=None):
|
|
110
|
-
"""
|
|
111
|
-
List all tasks by filter.
|
|
112
|
-
|
|
113
|
-
**Prerequisites**: You must be in the role of an *owner* or *developer* or *annotation manager* who has been assigned the task.
|
|
114
|
-
|
|
115
|
-
:param dtlpy.entities.filters.Filters filters: Filters entity or a dictionary containing filters parameters
|
|
116
|
-
:param list project_ids: list of project ids of the required tasks
|
|
117
|
-
:return: Paged entity - task pages generator
|
|
118
|
-
:rtype: dtlpy.entities.paged_entities.PagedEntities
|
|
119
|
-
|
|
120
|
-
**Example**:
|
|
121
|
-
|
|
122
|
-
.. code-block:: python
|
|
123
|
-
|
|
124
|
-
dataset.tasks.query(project_ids='project_ids')
|
|
125
|
-
"""
|
|
126
|
-
if project_ids is None:
|
|
127
|
-
if self._project_id is not None:
|
|
128
|
-
project_ids = self._project_id
|
|
129
|
-
else:
|
|
130
|
-
project_ids = self.project.id
|
|
131
|
-
|
|
132
|
-
if not isinstance(project_ids, list):
|
|
133
|
-
project_ids = [project_ids]
|
|
134
|
-
|
|
135
|
-
if filters is None:
|
|
136
|
-
filters = entities.Filters(resource=entities.FiltersResource.TASK)
|
|
137
|
-
else:
|
|
138
|
-
if not isinstance(filters, entities.Filters):
|
|
139
|
-
raise exceptions.PlatformException("400", "Unknown filters type")
|
|
140
|
-
if filters.resource != entities.FiltersResource.TASK:
|
|
141
|
-
raise exceptions.PlatformException("400", "Filter resource must be task")
|
|
142
|
-
|
|
143
|
-
if filters.context is None:
|
|
144
|
-
filters.context = {"projectIds": project_ids}
|
|
145
|
-
|
|
146
|
-
if self._project_id is not None:
|
|
147
|
-
filters.add(field="projectId", values=self._project_id)
|
|
148
|
-
|
|
149
|
-
if self._dataset is not None:
|
|
150
|
-
filters.add(field="datasetId", values=self._dataset.id)
|
|
151
|
-
|
|
152
|
-
paged = entities.PagedEntities(
|
|
153
|
-
items_repository=self,
|
|
154
|
-
filters=filters,
|
|
155
|
-
page_offset=filters.page,
|
|
156
|
-
page_size=filters.page_size,
|
|
157
|
-
project_id=self._project_id,
|
|
158
|
-
client_api=self._client_api,
|
|
159
|
-
)
|
|
160
|
-
paged.get_page()
|
|
161
|
-
return paged
|
|
162
|
-
|
|
163
|
-
###########
|
|
164
|
-
# methods #
|
|
165
|
-
###########
|
|
166
|
-
@_api_reference.add(path="/annotationtasks/query", method="post")
|
|
167
|
-
def list(
|
|
168
|
-
self,
|
|
169
|
-
project_ids=None,
|
|
170
|
-
status=None,
|
|
171
|
-
task_name=None,
|
|
172
|
-
pages_size=None,
|
|
173
|
-
page_offset=None,
|
|
174
|
-
recipe=None,
|
|
175
|
-
creator=None,
|
|
176
|
-
assignments=None,
|
|
177
|
-
min_date=None,
|
|
178
|
-
max_date=None,
|
|
179
|
-
filters: entities.Filters = None,
|
|
180
|
-
) -> Union[miscellaneous.List[entities.Task], entities.PagedEntities]:
|
|
181
|
-
"""
|
|
182
|
-
List all tasks.
|
|
183
|
-
|
|
184
|
-
**Prerequisites**: You must be in the role of an *owner* or *developer* or *annotation manager* who has been assigned the task.
|
|
185
|
-
|
|
186
|
-
:param project_ids: search tasks by given list of project ids
|
|
187
|
-
:param str status: search tasks by a given task status
|
|
188
|
-
:param str task_name: search tasks by a given task name
|
|
189
|
-
:param int pages_size: pages size of the output generator
|
|
190
|
-
:param int page_offset: page offset of the output generator
|
|
191
|
-
:param dtlpy.entities.recipe.Recipe recipe: Search tasks that use a given recipe. Provide the required recipe object
|
|
192
|
-
:param str creator: search tasks created by a given creator (user email)
|
|
193
|
-
:param dtlpy.entities.assignment.Assignment recipe assignments: assignments object
|
|
194
|
-
:param double min_date: search all tasks created AFTER a given date, use a milliseconds format. For example: 1661780622008
|
|
195
|
-
:param double max_date: search all tasks created BEFORE a given date, use a milliseconds format. For example: 1661780622008
|
|
196
|
-
:param dtlpy.entities.filters.Filters filters: dl.Filters entity to filters tasks using DQL
|
|
197
|
-
:return: List of Task objects
|
|
198
|
-
|
|
199
|
-
**Example**:
|
|
200
|
-
|
|
201
|
-
.. code-block:: python
|
|
202
|
-
|
|
203
|
-
dataset.tasks.list(project_ids='project_ids',pages_size=100, page_offset=0)
|
|
204
|
-
"""
|
|
205
|
-
# url
|
|
206
|
-
url = URL_PATH + "/query"
|
|
207
|
-
|
|
208
|
-
if filters is None:
|
|
209
|
-
filters = entities.Filters(use_defaults=False, resource=entities.FiltersResource.TASK)
|
|
210
|
-
else:
|
|
211
|
-
return self.query(filters=filters, project_ids=project_ids)
|
|
212
|
-
|
|
213
|
-
if self._dataset is not None:
|
|
214
|
-
filters.add(field="datasetId", values=self._dataset.id)
|
|
215
|
-
|
|
216
|
-
if project_ids is not None:
|
|
217
|
-
if not isinstance(project_ids, list):
|
|
218
|
-
project_ids = [project_ids]
|
|
219
|
-
elif self._project_id is not None:
|
|
220
|
-
project_ids = [self._project_id]
|
|
221
|
-
else:
|
|
222
|
-
project_ids = [self.project.id]
|
|
223
|
-
filters.context = {"projectIds": project_ids}
|
|
224
|
-
|
|
225
|
-
if assignments is not None:
|
|
226
|
-
if not isinstance(assignments, list):
|
|
227
|
-
assignments = [assignments]
|
|
228
|
-
assignments = [
|
|
229
|
-
assignments_entity.id if isinstance(assignments_entity, entities.Assignment) else assignments_entity
|
|
230
|
-
for assignments_entity in assignments
|
|
231
|
-
]
|
|
232
|
-
filters.add(field="assignmentIds", values=assignments, operator=entities.FiltersOperations.IN)
|
|
233
|
-
if status is not None:
|
|
234
|
-
filters.add(field="status", values=status)
|
|
235
|
-
if task_name is not None:
|
|
236
|
-
filters.add(field="name", values=task_name)
|
|
237
|
-
if pages_size is not None:
|
|
238
|
-
filters.page_size = pages_size
|
|
239
|
-
if pages_size is None:
|
|
240
|
-
filters.page_size = 500
|
|
241
|
-
if page_offset is not None:
|
|
242
|
-
filters.page = page_offset
|
|
243
|
-
if recipe is not None:
|
|
244
|
-
if not isinstance(recipe, list):
|
|
245
|
-
recipe = [recipe]
|
|
246
|
-
recipe = [
|
|
247
|
-
recipe_entity.id if isinstance(recipe_entity, entities.Recipe) else recipe_entity
|
|
248
|
-
for recipe_entity in recipe
|
|
249
|
-
]
|
|
250
|
-
filters.add(field="recipeId", values=recipe, operator=entities.FiltersOperations.IN)
|
|
251
|
-
if creator is not None:
|
|
252
|
-
filters.add(field="creator", values=creator)
|
|
253
|
-
if min_date is not None:
|
|
254
|
-
filters.add(field="dueDate", values=min_date, operator=entities.FiltersOperations.GREATER_THAN)
|
|
255
|
-
if max_date is not None:
|
|
256
|
-
filters.add(field="dueDate", values=max_date, operator=entities.FiltersOperations.LESS_THAN)
|
|
257
|
-
|
|
258
|
-
success, response = self._client_api.gen_request(req_type="post", path=url, json_req=filters.prepare())
|
|
259
|
-
if success:
|
|
260
|
-
tasks = miscellaneous.List(
|
|
261
|
-
[
|
|
262
|
-
entities.Task.from_json(
|
|
263
|
-
client_api=self._client_api, _json=_json, project=self._project, dataset=self._dataset
|
|
264
|
-
)
|
|
265
|
-
for _json in response.json()["items"]
|
|
266
|
-
]
|
|
267
|
-
)
|
|
268
|
-
else:
|
|
269
|
-
logger.error("Platform error getting annotation task")
|
|
270
|
-
raise exceptions.PlatformException(response)
|
|
271
|
-
|
|
272
|
-
return tasks
|
|
273
|
-
|
|
274
|
-
@_api_reference.add(path="/annotationtasks/{id}", method="get")
|
|
275
|
-
def get(self, task_name=None, task_id=None) -> entities.Task:
|
|
276
|
-
"""
|
|
277
|
-
Get a Task object to use in your code.
|
|
278
|
-
|
|
279
|
-
**Prerequisites**: You must be in the role of an *owner* or *developer* or *annotation manager* who has been assigned the task.
|
|
280
|
-
|
|
281
|
-
:param str task_name: optional - search by name
|
|
282
|
-
:param str task_id: optional - search by id
|
|
283
|
-
:return: task object
|
|
284
|
-
:rtype: dtlpy.entities.task.Task
|
|
285
|
-
|
|
286
|
-
**Example**:
|
|
287
|
-
|
|
288
|
-
.. code-block:: python
|
|
289
|
-
|
|
290
|
-
dataset.tasks.get(task_id='task_id')
|
|
291
|
-
"""
|
|
292
|
-
|
|
293
|
-
# url
|
|
294
|
-
url = URL_PATH
|
|
295
|
-
|
|
296
|
-
if task_id is not None:
|
|
297
|
-
url = "{}/{}".format(url, task_id)
|
|
298
|
-
success, response = self._client_api.gen_request(req_type="get", path=url)
|
|
299
|
-
if not success:
|
|
300
|
-
raise exceptions.PlatformException(response)
|
|
301
|
-
else:
|
|
302
|
-
task = entities.Task.from_json(
|
|
303
|
-
_json=response.json(), client_api=self._client_api, project=self._project, dataset=self._dataset
|
|
304
|
-
)
|
|
305
|
-
# verify input task name is same as the given id
|
|
306
|
-
if task_name is not None and task.name != task_name:
|
|
307
|
-
logger.warning(
|
|
308
|
-
"Mismatch found in tasks.get: task_name is different then task.name:"
|
|
309
|
-
" {!r} != {!r}".format(task_name, task.name)
|
|
310
|
-
)
|
|
311
|
-
elif task_name is not None:
|
|
312
|
-
tasks = self.list(
|
|
313
|
-
filters=entities.Filters(field="name", values=task_name, resource=entities.FiltersResource.TASK)
|
|
314
|
-
)
|
|
315
|
-
if tasks.items_count == 0:
|
|
316
|
-
raise exceptions.PlatformException("404", "Annotation task not found")
|
|
317
|
-
elif tasks.items_count > 1:
|
|
318
|
-
raise exceptions.PlatformException(
|
|
319
|
-
"404", f"More than one Annotation task exist with the same name: {task_name}"
|
|
320
|
-
)
|
|
321
|
-
else:
|
|
322
|
-
task = tasks[0][0]
|
|
323
|
-
else:
|
|
324
|
-
raise exceptions.PlatformException("400", "Must provide either Annotation task name or Annotation task id")
|
|
325
|
-
|
|
326
|
-
assert isinstance(task, entities.Task)
|
|
327
|
-
return task
|
|
328
|
-
|
|
329
|
-
@property
|
|
330
|
-
def platform_url(self):
|
|
331
|
-
return self._client_api._get_resource_url(f"projects/{self.project.id}/tasks")
|
|
332
|
-
|
|
333
|
-
def open_in_web(self, task_name: str = None, task_id: str = None, task: entities.Task = None):
|
|
334
|
-
"""
|
|
335
|
-
Open the task in the web platform.
|
|
336
|
-
|
|
337
|
-
**Prerequisites**: You must be in the role of an *owner* or *developer* or *annotation manager* who has been assigned the task.
|
|
338
|
-
|
|
339
|
-
:param str task_name: the name of the task
|
|
340
|
-
:param str task_id: the Id of the task
|
|
341
|
-
:param dtlpy.entities.task.Task task: the task object
|
|
342
|
-
|
|
343
|
-
**Example**:
|
|
344
|
-
|
|
345
|
-
.. code-block:: python
|
|
346
|
-
|
|
347
|
-
dataset.tasks.open_in_web(task_id='task_id')
|
|
348
|
-
"""
|
|
349
|
-
if task_name is not None:
|
|
350
|
-
task = self.get(task_name=task_name)
|
|
351
|
-
if task is not None:
|
|
352
|
-
task.open_in_web()
|
|
353
|
-
elif task_id is not None:
|
|
354
|
-
self._client_api._open_in_web(url=self.platform_url + "/" + str(task_id))
|
|
355
|
-
else:
|
|
356
|
-
self._client_api._open_in_web(url=self.platform_url)
|
|
357
|
-
|
|
358
|
-
@_api_reference.add(path="/annotationtasks/{id}", method="delete")
|
|
359
|
-
def delete(self, task: entities.Task = None, task_name: str = None, task_id: str = None, wait: bool = True):
|
|
360
|
-
"""
|
|
361
|
-
Delete the Task.
|
|
362
|
-
|
|
363
|
-
**Prerequisites**: You must be in the role of an *owner* or *developer* or *annotation manager* who created that task.
|
|
364
|
-
|
|
365
|
-
:param dtlpy.entities.task.Task task: the task object
|
|
366
|
-
:param str task_name: the name of the task
|
|
367
|
-
:param str task_id: the Id of the task
|
|
368
|
-
:param bool wait: wait until delete task finish
|
|
369
|
-
:return: True is success
|
|
370
|
-
:rtype: bool
|
|
371
|
-
|
|
372
|
-
**Example**:
|
|
373
|
-
|
|
374
|
-
.. code-block:: python
|
|
375
|
-
|
|
376
|
-
dataset.tasks.delete(task_id='task_id')
|
|
377
|
-
"""
|
|
378
|
-
if task_id is None:
|
|
379
|
-
if task is None:
|
|
380
|
-
if task_name is None:
|
|
381
|
-
raise exceptions.PlatformException(
|
|
382
|
-
"400", "Must provide either annotation task, " "annotation task name or annotation task id"
|
|
383
|
-
)
|
|
384
|
-
else:
|
|
385
|
-
task = self.get(task_name=task_name)
|
|
386
|
-
task_id = task.id
|
|
387
|
-
|
|
388
|
-
url = URL_PATH
|
|
389
|
-
url = f"{url}/{task_id}"
|
|
390
|
-
success, response = self._client_api.gen_request(req_type="delete", path=url, json_req={"asynced": wait})
|
|
391
|
-
|
|
392
|
-
if not success:
|
|
393
|
-
raise exceptions.PlatformException(response)
|
|
394
|
-
response_json = response.json()
|
|
395
|
-
command = entities.Command.from_json(_json=response_json, client_api=self._client_api)
|
|
396
|
-
if not wait:
|
|
397
|
-
return command
|
|
398
|
-
command = command.wait(timeout=0)
|
|
399
|
-
if "deleteTaskId" not in command.spec:
|
|
400
|
-
raise exceptions.PlatformException(
|
|
401
|
-
error="400", message="deleteTaskId key is missing in command response: {}".format(response)
|
|
402
|
-
)
|
|
403
|
-
return True
|
|
404
|
-
|
|
405
|
-
@_api_reference.add(path="/annotationtasks/{id}", method="patch")
|
|
406
|
-
def update(self, task: entities.Task = None, system_metadata=False) -> entities.Task:
|
|
407
|
-
"""
|
|
408
|
-
Update a Task.
|
|
409
|
-
|
|
410
|
-
**Prerequisites**: You must be in the role of an *owner* or *developer* or *annotation manager* who created that task.
|
|
411
|
-
|
|
412
|
-
:param dtlpy.entities.task.Task task: the task object
|
|
413
|
-
:param bool system_metadata: DEPRECATED
|
|
414
|
-
:return: Task object
|
|
415
|
-
:rtype: dtlpy.entities.task.Task
|
|
416
|
-
|
|
417
|
-
**Example**:
|
|
418
|
-
|
|
419
|
-
.. code-block:: python
|
|
420
|
-
|
|
421
|
-
dataset.tasks.update(task='task_entity')
|
|
422
|
-
"""
|
|
423
|
-
url = URL_PATH
|
|
424
|
-
url = f"{url}/{task.id}"
|
|
425
|
-
|
|
426
|
-
if system_metadata:
|
|
427
|
-
warnings.warn(
|
|
428
|
-
"Task system metadata updates are not permitted. Please store custom metadata in 'task.metadata['user']' instead.",
|
|
429
|
-
DeprecationWarning,
|
|
430
|
-
)
|
|
431
|
-
|
|
432
|
-
success, response = self._client_api.gen_request(req_type="patch", path=url, json_req=task.to_json())
|
|
433
|
-
if success:
|
|
434
|
-
return entities.Task.from_json(
|
|
435
|
-
_json=response.json(), client_api=self._client_api, project=self._project, dataset=self._dataset
|
|
436
|
-
)
|
|
437
|
-
else:
|
|
438
|
-
raise exceptions.PlatformException(response)
|
|
439
|
-
|
|
440
|
-
def create_qa_task(self,
|
|
441
|
-
task: entities.Task,
|
|
442
|
-
assignee_ids,
|
|
443
|
-
due_date=None,
|
|
444
|
-
filters=None,
|
|
445
|
-
items=None,
|
|
446
|
-
query=None,
|
|
447
|
-
workload=None,
|
|
448
|
-
metadata=None,
|
|
449
|
-
available_actions=None,
|
|
450
|
-
wait=True,
|
|
451
|
-
batch_size=None,
|
|
452
|
-
max_batch_workload=None,
|
|
453
|
-
allowed_assignees=None,
|
|
454
|
-
priority=entities.TaskPriority.MEDIUM
|
|
455
|
-
) -> entities.Task:
|
|
456
|
-
"""
|
|
457
|
-
Create a new QA Task.
|
|
458
|
-
|
|
459
|
-
**Prerequisites**: You must be in the role of an *owner*, *developer*, or *annotation manager* who has been assigned to be *owner* of the annotation task.
|
|
460
|
-
|
|
461
|
-
:param dtlpy.entities.task.Task task: the parent annotation task object
|
|
462
|
-
:param list assignee_ids: list the QA task assignees (contributors) that should be working on the task. Provide a list of users' emails
|
|
463
|
-
:param float due_date: date by which the QA task should be finished; for example, due_date=datetime.datetime(day=1, month=1, year=2029).timestamp()
|
|
464
|
-
:param entities.Filters filters: dl.Filters entity to filter items for the task
|
|
465
|
-
:param List[entities.Item] items: list of items (item Id or objects) to insert to the task
|
|
466
|
-
:param dict DQL query: filter items for the task
|
|
467
|
-
:param List[WorkloadUnit] workload: list of WorkloadUnit objects. Customize distribution (percentage) between the task assignees. For example: [dl.WorkloadUnit(annotator@hi.com, 80), dl.WorkloadUnit(annotator2@hi.com, 20)]
|
|
468
|
-
:param dict metadata: metadata for the task
|
|
469
|
-
:param list available_actions: list of available actions (statuses) that will be available for the task items; The default statuses are: "approved" and "discard"
|
|
470
|
-
:param bool wait: wait until create task finish
|
|
471
|
-
:param int batch_size: Pulling batch size (items), use with pulling allocation method. Restrictions - Min 3, max 100
|
|
472
|
-
:param int max_batch_workload: Max items in assignment, use with pulling allocation method. Restrictions - Min batchSize + 2, max batchSize * 2
|
|
473
|
-
:param list allowed_assignees: list the task assignees (contributors) that should be working on the task. Provide a list of users' emails
|
|
474
|
-
:param entities.TaskPriority priority: priority of the task options in entities.TaskPriority
|
|
475
|
-
:return: task object
|
|
476
|
-
:rtype: dtlpy.entities.task.Task
|
|
477
|
-
|
|
478
|
-
**Example**:
|
|
479
|
-
|
|
480
|
-
.. code-block:: python
|
|
481
|
-
|
|
482
|
-
dataset.tasks.create_qa_task(task= 'task_entity',
|
|
483
|
-
due_date = datetime.datetime(day= 1, month= 1, year= 2029).timestamp(),
|
|
484
|
-
assignee_ids =[ 'annotator1@dataloop.ai', 'annotator2@dataloop.ai'])
|
|
485
|
-
"""
|
|
486
|
-
source_filter = entities.filters.SingleFilter(
|
|
487
|
-
field='metadata.system.refs',
|
|
488
|
-
values={
|
|
489
|
-
"id": task.id,
|
|
490
|
-
"type": "task",
|
|
491
|
-
"metadata":
|
|
492
|
-
{
|
|
493
|
-
"status":
|
|
494
|
-
{
|
|
495
|
-
"$exists": True
|
|
496
|
-
}
|
|
497
|
-
}
|
|
498
|
-
},
|
|
499
|
-
operator=entities.FiltersOperations.MATCH
|
|
500
|
-
)
|
|
501
|
-
|
|
502
|
-
if query is not None:
|
|
503
|
-
and_list = query.get('filter', query).get('$and', None)
|
|
504
|
-
if and_list is not None:
|
|
505
|
-
and_list.append(source_filter.prepare())
|
|
506
|
-
else:
|
|
507
|
-
if 'filter' not in query:
|
|
508
|
-
query['filter'] = {}
|
|
509
|
-
query['filter']['$and'] = [source_filter.prepare()]
|
|
510
|
-
|
|
511
|
-
else:
|
|
512
|
-
if filters is None and items is None:
|
|
513
|
-
filters = entities.Filters()
|
|
514
|
-
if filters:
|
|
515
|
-
filters.and_filter_list.append(source_filter)
|
|
516
|
-
|
|
517
|
-
return self.create(task_name='{}_qa'.format(task.name),
|
|
518
|
-
task_type='qa',
|
|
519
|
-
task_parent_id=task.id,
|
|
520
|
-
assignee_ids=assignee_ids,
|
|
521
|
-
workload=workload,
|
|
522
|
-
task_owner=task.creator,
|
|
523
|
-
project_id=task.project_id,
|
|
524
|
-
recipe_id=task.recipe_id,
|
|
525
|
-
due_date=due_date,
|
|
526
|
-
filters=filters,
|
|
527
|
-
items=items,
|
|
528
|
-
query=query,
|
|
529
|
-
metadata=metadata,
|
|
530
|
-
available_actions=available_actions,
|
|
531
|
-
wait=wait,
|
|
532
|
-
batch_size=batch_size,
|
|
533
|
-
max_batch_workload=max_batch_workload,
|
|
534
|
-
allowed_assignees=allowed_assignees,
|
|
535
|
-
priority=priority
|
|
536
|
-
)
|
|
537
|
-
|
|
538
|
-
def create_honeypot_task(
|
|
539
|
-
self,
|
|
540
|
-
name: str,
|
|
541
|
-
dataset: entities.Dataset = None,
|
|
542
|
-
due_date: float = None,
|
|
543
|
-
filters: entities.Filters = None,
|
|
544
|
-
owner: str = None,
|
|
545
|
-
recipe_id: str = None,
|
|
546
|
-
assignee_ids: List[str] = None,
|
|
547
|
-
workload=None,
|
|
548
|
-
available_actions=None,
|
|
549
|
-
priority=entities.TaskPriority.MEDIUM,
|
|
550
|
-
consensus_percentage=None,
|
|
551
|
-
consensus_assignees=None,
|
|
552
|
-
scoring=True,
|
|
553
|
-
limit=None,
|
|
554
|
-
wait=True,
|
|
555
|
-
enforce_video_conversion=True,
|
|
556
|
-
) -> entities.Task:
|
|
557
|
-
"""
|
|
558
|
-
Create a new Consensus Task.
|
|
559
|
-
|
|
560
|
-
**Prerequisites**: You must be in the role of an *owner*, *developer*, or *annotation manager* who has been assigned to be *owner* of the annotation task.
|
|
561
|
-
|
|
562
|
-
:param str name: the name of the task
|
|
563
|
-
:param entities.Dataset dataset: dataset object, the dataset that refer to the task
|
|
564
|
-
:param float due_date: date by which the task should be finished; for example, due_date=datetime.datetime(day=1, month=1, year=2029).timestamp()
|
|
565
|
-
:param entities.Filters filters: dl.Filters entity to filter items for the task
|
|
566
|
-
:param str owner: task owner. Provide user email
|
|
567
|
-
:param str recipe_id: recipe id for the task
|
|
568
|
-
:param list assignee_ids: list the task assignees (contributors) that should be working on the task. Provide a list of users' emails
|
|
569
|
-
:param workload: list of WorkloadUnit objects. Customize distribution (percentage) between the task assignees
|
|
570
|
-
:param list available_actions: list of available actions (statuses) that will be available for the task items
|
|
571
|
-
:param entities.TaskPriority priority: priority of the task options in entities.TaskPriority
|
|
572
|
-
:param str consensus_task_type: consensus task type - "consensus", "qualification", or "honeypot"
|
|
573
|
-
:param int consensus_percentage: percentage of items to be copied to multiple annotators (consensus items)
|
|
574
|
-
:param int consensus_assignees: the number of different annotators per item (number of copies per item)
|
|
575
|
-
:param bool scoring: create a scoring app in project
|
|
576
|
-
:param int limit: the limit items that the task can include
|
|
577
|
-
:param bool wait: wait until create task finish
|
|
578
|
-
:param bool enforce_video_conversion: Enforce WEBM conversion on video items for frame-accurate annotations
|
|
579
|
-
:return: Task object
|
|
580
|
-
:rtype: dtlpy.entities.task.Task
|
|
581
|
-
|
|
582
|
-
**Example**:
|
|
583
|
-
|
|
584
|
-
.. code-block:: python
|
|
585
|
-
|
|
586
|
-
# Create a consensus task
|
|
587
|
-
dataset.tasks.create_consensus_task(name='my_consensus_task',
|
|
588
|
-
assignee_ids=['annotator1@dataloop.ai', 'annotator2@dataloop.ai'],
|
|
589
|
-
consensus_percentage=66,
|
|
590
|
-
consensus_assignees=2)
|
|
591
|
-
"""
|
|
592
|
-
return self.create_consensus_task(
|
|
593
|
-
name=name,
|
|
594
|
-
dataset=dataset,
|
|
595
|
-
due_date=due_date,
|
|
596
|
-
filters=filters,
|
|
597
|
-
owner=owner,
|
|
598
|
-
recipe_id=recipe_id,
|
|
599
|
-
assignee_ids=assignee_ids,
|
|
600
|
-
workload=workload,
|
|
601
|
-
available_actions=available_actions,
|
|
602
|
-
priority=priority,
|
|
603
|
-
consensus_task_type=entities.ConsensusTaskType.HONEYPOT,
|
|
604
|
-
consensus_percentage=consensus_percentage,
|
|
605
|
-
consensus_assignees=consensus_assignees,
|
|
606
|
-
scoring=scoring,
|
|
607
|
-
limit=limit,
|
|
608
|
-
wait=wait,
|
|
609
|
-
enforce_video_conversion=enforce_video_conversion,
|
|
610
|
-
)
|
|
611
|
-
|
|
612
|
-
def create_qualification_task(
|
|
613
|
-
self,
|
|
614
|
-
name: str,
|
|
615
|
-
dataset: entities.Dataset = None,
|
|
616
|
-
due_date: float = None,
|
|
617
|
-
filters: entities.Filters = None,
|
|
618
|
-
owner: str = None,
|
|
619
|
-
recipe_id: str = None,
|
|
620
|
-
assignee_ids: List[str] = None,
|
|
621
|
-
workload=None,
|
|
622
|
-
available_actions=None,
|
|
623
|
-
priority=entities.TaskPriority.MEDIUM,
|
|
624
|
-
consensus_percentage=None,
|
|
625
|
-
consensus_assignees=None,
|
|
626
|
-
limit=None,
|
|
627
|
-
wait=True,
|
|
628
|
-
enforce_video_conversion=True,
|
|
629
|
-
) -> entities.Task:
|
|
630
|
-
"""
|
|
631
|
-
Create a new Qualification Task.
|
|
632
|
-
"""
|
|
633
|
-
return self.create_consensus_task(
|
|
634
|
-
name=name,
|
|
635
|
-
dataset=dataset,
|
|
636
|
-
due_date=due_date,
|
|
637
|
-
filters=filters,
|
|
638
|
-
owner=owner,
|
|
639
|
-
recipe_id=recipe_id,
|
|
640
|
-
assignee_ids=assignee_ids,
|
|
641
|
-
workload=workload,
|
|
642
|
-
available_actions=available_actions,
|
|
643
|
-
priority=priority,
|
|
644
|
-
consensus_task_type=entities.ConsensusTaskType.QUALIFICATION,
|
|
645
|
-
consensus_percentage=consensus_percentage,
|
|
646
|
-
consensus_assignees=consensus_assignees,
|
|
647
|
-
scoring=True,
|
|
648
|
-
limit=limit,
|
|
649
|
-
wait=wait,
|
|
650
|
-
enforce_video_conversion=enforce_video_conversion,
|
|
651
|
-
)
|
|
652
|
-
|
|
653
|
-
def create_consensus_task(
|
|
654
|
-
self,
|
|
655
|
-
name: str,
|
|
656
|
-
dataset: entities.Dataset = None,
|
|
657
|
-
due_date: float = None,
|
|
658
|
-
filters: entities.Filters = None,
|
|
659
|
-
owner: str = None,
|
|
660
|
-
recipe_id: str = None,
|
|
661
|
-
assignee_ids: List[str] = None,
|
|
662
|
-
workload=None,
|
|
663
|
-
available_actions=None,
|
|
664
|
-
priority=entities.TaskPriority.MEDIUM,
|
|
665
|
-
metadata=None,
|
|
666
|
-
consensus_task_type: entities.ConsensusTaskType = entities.ConsensusTaskType.CONSENSUS,
|
|
667
|
-
consensus_percentage=None,
|
|
668
|
-
consensus_assignees=None,
|
|
669
|
-
scoring=True,
|
|
670
|
-
limit=None,
|
|
671
|
-
wait=True,
|
|
672
|
-
enforce_video_conversion=True,
|
|
673
|
-
) -> entities.Task:
|
|
674
|
-
"""
|
|
675
|
-
Create a new Consensus Task.
|
|
676
|
-
|
|
677
|
-
**Prerequisites**: You must be in the role of an *owner*, *developer*, or *annotation manager* who has been assigned to be *owner* of the annotation task.
|
|
678
|
-
|
|
679
|
-
:param str name: the name of the task
|
|
680
|
-
:param entities.Dataset dataset: dataset object, the dataset that refer to the task
|
|
681
|
-
:param float due_date: date by which the task should be finished; for example, due_date=datetime.datetime(day=1, month=1, year=2029).timestamp()
|
|
682
|
-
:param entities.Filters filters: dl.Filters entity to filter items for the task
|
|
683
|
-
:param str owner: task owner. Provide user email
|
|
684
|
-
:param str recipe_id: recipe id for the task
|
|
685
|
-
:param list assignee_ids: list the task assignees (contributors) that should be working on the task. Provide a list of users' emails
|
|
686
|
-
:param workload: list of WorkloadUnit objects. Customize distribution (percentage) between the task assignees
|
|
687
|
-
:param list available_actions: list of available actions (statuses) that will be available for the task items
|
|
688
|
-
:param entities.TaskPriority priority: priority of the task options in entities.TaskPriority
|
|
689
|
-
:param dict metadata: metadata for the task
|
|
690
|
-
:param str consensus_task_type: consensus task type - "consensus", "qualification", or "honeypot"
|
|
691
|
-
:param int consensus_percentage: percentage of items to be copied to multiple annotators (consensus items)
|
|
692
|
-
:param int consensus_assignees: the number of different annotators per item (number of copies per item)
|
|
693
|
-
:param bool scoring: create a scoring app in project
|
|
694
|
-
:param int limit: the limit items that the task can include
|
|
695
|
-
:param bool wait: wait until create task finish
|
|
696
|
-
:param bool enforce_video_conversion: Enforce WEBM conversion on video items for frame-accurate annotations
|
|
697
|
-
:return: Task object
|
|
698
|
-
:rtype: dtlpy.entities.task.Task
|
|
699
|
-
|
|
700
|
-
**Example**:
|
|
701
|
-
|
|
702
|
-
.. code-block:: python
|
|
703
|
-
|
|
704
|
-
# Create a consensus task
|
|
705
|
-
dataset.tasks.create_consensus_task(name='my_consensus_task',
|
|
706
|
-
assignee_ids=['annotator1@dataloop.ai', 'annotator2@dataloop.ai'],
|
|
707
|
-
consensus_percentage=66,
|
|
708
|
-
consensus_assignees=2)
|
|
709
|
-
"""
|
|
710
|
-
|
|
711
|
-
if dataset is None:
|
|
712
|
-
dataset = self.dataset
|
|
713
|
-
|
|
714
|
-
if due_date is None:
|
|
715
|
-
due_date = (datetime.datetime.now() + datetime.timedelta(days=7)).timestamp()
|
|
716
|
-
|
|
717
|
-
if filters is None:
|
|
718
|
-
filters = entities.Filters()
|
|
719
|
-
|
|
720
|
-
if owner is None:
|
|
721
|
-
owner = self._client_api.info()["user_email"]
|
|
722
|
-
|
|
723
|
-
if recipe_id is None:
|
|
724
|
-
recipe_id = dataset.get_recipe_ids()[0]
|
|
725
|
-
|
|
726
|
-
if workload is None and assignee_ids is not None:
|
|
727
|
-
workload = entities.Workload.generate(assignee_ids=assignee_ids)
|
|
728
|
-
|
|
729
|
-
# Handle metadata for consensus tasks
|
|
730
|
-
if metadata is None:
|
|
731
|
-
metadata = {}
|
|
732
|
-
if "system" not in metadata:
|
|
733
|
-
metadata["system"] = {}
|
|
734
|
-
if assignee_ids is not None:
|
|
735
|
-
metadata["system"]["allowedAssignees"] = assignee_ids
|
|
736
|
-
if consensus_task_type is not None:
|
|
737
|
-
metadata["system"]["consensusTaskType"] = consensus_task_type
|
|
738
|
-
metadata = self._add_task_metadata_params(
|
|
739
|
-
metadata=metadata, input_value=consensus_percentage, input_name="consensusPercentage"
|
|
740
|
-
)
|
|
741
|
-
metadata = self._add_task_metadata_params(
|
|
742
|
-
metadata=metadata, input_value=consensus_assignees, input_name="consensusAssignees"
|
|
743
|
-
)
|
|
744
|
-
metadata = self._add_task_metadata_params(metadata=metadata, input_value=scoring, input_name="scoring")
|
|
745
|
-
|
|
746
|
-
# Create payload for consensus task
|
|
747
|
-
payload = {
|
|
748
|
-
"name": name,
|
|
749
|
-
"query": "{}".format(json.dumps(filters.prepare()).replace("'", '"')),
|
|
750
|
-
"taskOwner": owner,
|
|
751
|
-
"spec": {"type": "annotation"},
|
|
752
|
-
"datasetId": dataset.id,
|
|
753
|
-
"projectId": self.project.id,
|
|
754
|
-
"assignmentIds": [],
|
|
755
|
-
"recipeId": recipe_id,
|
|
756
|
-
"dueDate": due_date * 1000,
|
|
757
|
-
"asynced": wait,
|
|
758
|
-
"priority": priority,
|
|
759
|
-
"percentage": True,
|
|
760
|
-
}
|
|
761
|
-
|
|
762
|
-
# Add workload if provided
|
|
763
|
-
if workload:
|
|
764
|
-
payload["workload"] = workload.to_json()
|
|
765
|
-
|
|
766
|
-
# Add limit if provided
|
|
767
|
-
if limit:
|
|
768
|
-
payload["limit"] = limit
|
|
769
|
-
|
|
770
|
-
# Add available actions if provided
|
|
771
|
-
if available_actions is not None:
|
|
772
|
-
payload["availableActions"] = [action.to_json() for action in available_actions]
|
|
773
|
-
|
|
774
|
-
# Handle video conversion
|
|
775
|
-
if not enforce_video_conversion:
|
|
776
|
-
payload["disableWebm"] = not enforce_video_conversion
|
|
777
|
-
|
|
778
|
-
# Handle metadata for consensus tasks
|
|
779
|
-
if metadata is not None:
|
|
780
|
-
payload["metadata"] = metadata
|
|
781
|
-
|
|
782
|
-
return self._create_task(payload, wait=wait)
|
|
783
|
-
|
|
784
|
-
def _add_task_metadata_params(self, metadata, input_value, input_name):
|
|
785
|
-
if input_value is not None and not isinstance(input_value, int):
|
|
786
|
-
raise exceptions.PlatformException(error="400", message=f"{input_name} must be a numbers")
|
|
787
|
-
if input_value is not None:
|
|
788
|
-
metadata["system"][input_name] = input_value
|
|
789
|
-
return metadata
|
|
790
|
-
|
|
791
|
-
def create_labeling_task(
|
|
792
|
-
self,
|
|
793
|
-
name: str,
|
|
794
|
-
dataset: entities.Dataset = None,
|
|
795
|
-
due_date: float = None,
|
|
796
|
-
filters: entities.Filters = None,
|
|
797
|
-
owner: str = None,
|
|
798
|
-
recipe_id: str = None,
|
|
799
|
-
assignee_ids: List[str] = None,
|
|
800
|
-
workload=None,
|
|
801
|
-
available_actions=None,
|
|
802
|
-
priority=entities.TaskPriority.MEDIUM,
|
|
803
|
-
metadata=None,
|
|
804
|
-
batch_size=None,
|
|
805
|
-
max_batch_workload=None,
|
|
806
|
-
allowed_assignees=None,
|
|
807
|
-
limit=None,
|
|
808
|
-
wait=True,
|
|
809
|
-
enforce_video_conversion=True,
|
|
810
|
-
) -> entities.Task:
|
|
811
|
-
"""
|
|
812
|
-
Create a new Annotation Task (Distribution or Pulling).
|
|
813
|
-
|
|
814
|
-
**Prerequisites**: You must be in the role of an *owner*, *developer*, or *annotation manager* who has been assigned to be *owner* of the annotation task.
|
|
815
|
-
|
|
816
|
-
:param str name: the name of the task
|
|
817
|
-
:param entities.Dataset dataset: dataset object, the dataset that refer to the task
|
|
818
|
-
:param float due_date: date by which the task should be finished; for example, due_date=datetime.datetime(day=1, month=1, year=2029).timestamp()
|
|
819
|
-
:param entities.Filters filters: dl.Filters entity to filter items for the task
|
|
820
|
-
:param str owner: task owner. Provide user email
|
|
821
|
-
:param str recipe_id: recipe id for the task
|
|
822
|
-
:param list assignee_ids: list the task assignees (contributors) that should be working on the task. Provide a list of users' emails
|
|
823
|
-
:param workload: list of WorkloadUnit objects. Customize distribution (percentage) between the task assignees
|
|
824
|
-
:param list available_actions: list of available actions (statuses) that will be available for the task items
|
|
825
|
-
:param entities.TaskPriority priority: priority of the task options in entities.TaskPriority
|
|
826
|
-
:param dict metadata: metadata for the task
|
|
827
|
-
:param int batch_size: Pulling batch size (items), use with pulling allocation method. Restrictions - Min 3, max 100
|
|
828
|
-
:param int max_batch_workload: Max items in assignment, use with pulling allocation method. Restrictions - Min batchSize + 2, max batchSize * 2
|
|
829
|
-
:param list allowed_assignees: list the task assignees (contributors) that should be working on the task. Provide a list of users' emails
|
|
830
|
-
:param int limit: the limit items that the task can include
|
|
831
|
-
:param bool wait: wait until create task finish
|
|
832
|
-
:param bool enforce_video_conversion: Enforce WEBM conversion on video items for frame-accurate annotations
|
|
833
|
-
:return: Task object
|
|
834
|
-
:rtype: dtlpy.entities.task.Task
|
|
835
|
-
|
|
836
|
-
**Example**:
|
|
837
|
-
|
|
838
|
-
.. code-block:: python
|
|
839
|
-
|
|
840
|
-
# Create a distribution task
|
|
841
|
-
dataset.tasks.create_labeling(name='my_distribution_task',
|
|
842
|
-
assignee_ids=['annotator1@dataloop.ai', 'annotator2@dataloop.ai'])
|
|
843
|
-
|
|
844
|
-
# Create a pulling task
|
|
845
|
-
dataset.tasks.create_labeling(name='my_pulling_task',
|
|
846
|
-
assignee_ids=['annotator1@dataloop.ai', 'annotator2@dataloop.ai'],
|
|
847
|
-
batch_size=5,
|
|
848
|
-
max_batch_workload=7)
|
|
849
|
-
"""
|
|
850
|
-
|
|
851
|
-
if dataset is None:
|
|
852
|
-
dataset = self.dataset
|
|
853
|
-
|
|
854
|
-
if due_date is None:
|
|
855
|
-
due_date = (datetime.datetime.now() + datetime.timedelta(days=7)).timestamp()
|
|
856
|
-
|
|
857
|
-
if filters is None:
|
|
858
|
-
filters = entities.Filters()
|
|
859
|
-
|
|
860
|
-
if owner is None:
|
|
861
|
-
owner = self._client_api.info()["user_email"]
|
|
862
|
-
|
|
863
|
-
if recipe_id is None:
|
|
864
|
-
recipe_id = dataset.get_recipe_ids()[0]
|
|
865
|
-
|
|
866
|
-
if workload is None and assignee_ids is not None:
|
|
867
|
-
workload = entities.Workload.generate(assignee_ids=assignee_ids)
|
|
868
|
-
|
|
869
|
-
if metadata is None:
|
|
870
|
-
metadata = {}
|
|
871
|
-
if any([batch_size, max_batch_workload]):
|
|
872
|
-
if "system" not in metadata:
|
|
873
|
-
metadata["system"] = {}
|
|
874
|
-
if allowed_assignees is not None or assignee_ids is not None:
|
|
875
|
-
metadata["system"]["allowedAssignees"] = allowed_assignees if allowed_assignees else assignee_ids
|
|
876
|
-
metadata = self._add_task_metadata_params(metadata=metadata, input_value=batch_size, input_name="batchSize")
|
|
877
|
-
metadata = self._add_task_metadata_params(
|
|
878
|
-
metadata=metadata, input_value=max_batch_workload, input_name="maxBatchWorkload"
|
|
879
|
-
)
|
|
880
|
-
|
|
881
|
-
# Create payload for annotation task
|
|
882
|
-
payload = {
|
|
883
|
-
"name": name,
|
|
884
|
-
"query": "{}".format(json.dumps(filters.prepare()).replace("'", '"')),
|
|
885
|
-
"taskOwner": owner,
|
|
886
|
-
"spec": {"type": "annotation"},
|
|
887
|
-
"datasetId": dataset.id,
|
|
888
|
-
"projectId": self.project.id,
|
|
889
|
-
"assignmentIds": [],
|
|
890
|
-
"recipeId": recipe_id,
|
|
891
|
-
"dueDate": due_date * 1000,
|
|
892
|
-
"asynced": wait,
|
|
893
|
-
"priority": priority,
|
|
894
|
-
}
|
|
895
|
-
|
|
896
|
-
# Add workload if provided
|
|
897
|
-
if workload:
|
|
898
|
-
payload["workload"] = workload.to_json()
|
|
899
|
-
|
|
900
|
-
# Add limit if provided
|
|
901
|
-
if limit:
|
|
902
|
-
payload["limit"] = limit
|
|
903
|
-
|
|
904
|
-
# Add available actions if provided
|
|
905
|
-
if available_actions is not None:
|
|
906
|
-
payload["availableActions"] = [action.to_json() for action in available_actions]
|
|
907
|
-
|
|
908
|
-
# Handle video conversion
|
|
909
|
-
if not enforce_video_conversion:
|
|
910
|
-
payload["disableWebm"] = not enforce_video_conversion
|
|
911
|
-
|
|
912
|
-
# Handle metadata for pulling tasks
|
|
913
|
-
if metadata is not None:
|
|
914
|
-
payload["metadata"] = metadata
|
|
915
|
-
|
|
916
|
-
return self._create_task(payload, wait=wait)
|
|
917
|
-
|
|
918
|
-
def _create_task(self, payload: dict, wait: bool = True) -> entities.Task:
|
|
919
|
-
"""
|
|
920
|
-
Private function to create a task from a prepared payload.
|
|
921
|
-
|
|
922
|
-
:param dict payload: the prepared payload for task creation
|
|
923
|
-
:param bool wait: whether to wait for task creation to complete
|
|
924
|
-
:return: created Task object
|
|
925
|
-
:rtype: dtlpy.entities.task.Task
|
|
926
|
-
"""
|
|
927
|
-
success, response = self._client_api.gen_request(req_type="post", path=URL_PATH, json_req=payload)
|
|
928
|
-
if success:
|
|
929
|
-
response_json = response.json()
|
|
930
|
-
if payload.get("checkIfExist") is not None and "name" in response_json:
|
|
931
|
-
return entities.Task.from_json(
|
|
932
|
-
_json=response.json(), client_api=self._client_api, project=self._project, dataset=self._dataset
|
|
933
|
-
)
|
|
934
|
-
|
|
935
|
-
command = entities.Command.from_json(_json=response_json, client_api=self._client_api)
|
|
936
|
-
if not wait:
|
|
937
|
-
return command
|
|
938
|
-
command = command.wait(timeout=0)
|
|
939
|
-
if "createTaskPayload" not in command.spec:
|
|
940
|
-
raise exceptions.PlatformException(
|
|
941
|
-
error="400", message="createTaskPayload key is missing in command response: {}".format(response)
|
|
942
|
-
)
|
|
943
|
-
task = self.get(task_id=command.spec["createdTaskId"])
|
|
944
|
-
else:
|
|
945
|
-
raise exceptions.PlatformException(response)
|
|
946
|
-
|
|
947
|
-
assert isinstance(task, entities.Task)
|
|
948
|
-
return task
|
|
949
|
-
|
|
950
|
-
@_api_reference.add(path="/annotationtasks", method="post")
|
|
951
|
-
def create(
|
|
952
|
-
self,
|
|
953
|
-
task_name,
|
|
954
|
-
due_date=None,
|
|
955
|
-
assignee_ids=None,
|
|
956
|
-
workload=None,
|
|
957
|
-
dataset=None,
|
|
958
|
-
task_owner=None,
|
|
959
|
-
task_type="annotation",
|
|
960
|
-
task_parent_id=None,
|
|
961
|
-
project_id=None,
|
|
962
|
-
recipe_id=None,
|
|
963
|
-
assignments_ids=None,
|
|
964
|
-
metadata=None,
|
|
965
|
-
filters=None,
|
|
966
|
-
items=None,
|
|
967
|
-
query=None,
|
|
968
|
-
available_actions=None,
|
|
969
|
-
wait=True,
|
|
970
|
-
check_if_exist: entities.Filters = False,
|
|
971
|
-
limit=None,
|
|
972
|
-
batch_size=None,
|
|
973
|
-
max_batch_workload=None,
|
|
974
|
-
allowed_assignees=None,
|
|
975
|
-
priority=entities.TaskPriority.MEDIUM,
|
|
976
|
-
consensus_task_type=None,
|
|
977
|
-
consensus_percentage=None,
|
|
978
|
-
consensus_assignees=None,
|
|
979
|
-
scoring=True,
|
|
980
|
-
enforce_video_conversion=True,
|
|
981
|
-
) -> entities.Task:
|
|
982
|
-
"""
|
|
983
|
-
Create a new Task (Annotation or QA).
|
|
984
|
-
|
|
985
|
-
**Prerequisites**: You must be in the role of an *owner*, *developer*, or *annotation manager* who has been assigned to be *owner* of the annotation task.
|
|
986
|
-
|
|
987
|
-
:param str task_name: the name of the task
|
|
988
|
-
:param float due_date: date by which the task should be finished; for example, due_date=datetime.datetime(day=1, month=1, year=2029).timestamp()
|
|
989
|
-
:param list assignee_ids: list the task assignees (contributors) that should be working on the task. Provide a list of users' emails
|
|
990
|
-
:param List[WorkloadUnit] List[WorkloadUnit] workload: list of WorkloadUnit objects. Customize distribution (percentage) between the task assignees. For example: [dl.WorkloadUnit(annotator@hi.com, 80), dl.WorkloadUnit(annotator2@hi.com, 20)]
|
|
991
|
-
:param entities.Dataset dataset: dataset object, the dataset that refer to the task
|
|
992
|
-
:param str task_owner: task owner. Provide user email
|
|
993
|
-
:param str task_type: task type "annotation" or "qa"
|
|
994
|
-
:param str task_parent_id: optional if type is qa - parent annotation task id
|
|
995
|
-
:param str project_id: the Id of the project where task will be created
|
|
996
|
-
:param str recipe_id: recipe id for the task
|
|
997
|
-
:param list assignments_ids: assignments ids to the task
|
|
998
|
-
:param dict metadata: metadata for the task
|
|
999
|
-
:param entities.Filters filters: dl.Filters entity to filter items for the task
|
|
1000
|
-
:param List[entities.Item] items: list of items (item Id or objects) to insert to the task
|
|
1001
|
-
:param dict DQL query: filter items for the task
|
|
1002
|
-
:param list available_actions: list of available actions (statuses) that will be available for the task items; The default statuses are: "completed" and "discard"
|
|
1003
|
-
:param bool wait: wait until create task finish
|
|
1004
|
-
:param entities.Filters check_if_exist: dl.Filters check if task exist according to filter
|
|
1005
|
-
:param int limit: the limit items that the task can include
|
|
1006
|
-
:param int batch_size: Pulling batch size (items), use with pulling allocation method. Restrictions - Min 3, max 100
|
|
1007
|
-
:param int max_batch_workload: max_batch_workload: Max items in assignment, use with pulling allocation method. Restrictions - Min batchSize + 2, max batchSize * 2
|
|
1008
|
-
:param list allowed_assignees: list the task assignees (contributors) that should be working on the task. Provide a list of users' emails
|
|
1009
|
-
:param entities.TaskPriority priority: priority of the task options in entities.TaskPriority
|
|
1010
|
-
:param entities.ConsensusTaskType consensus_task_type: consensus_task_type of the task options in entities.ConsensusTaskType
|
|
1011
|
-
:param int consensus_percentage: percentage of items to be copied to multiple annotators (consensus items)
|
|
1012
|
-
:param int consensus_assignees: the number of different annotators per item (number of copies per item)
|
|
1013
|
-
:param bool scoring: create a scoring app in project
|
|
1014
|
-
:param bool enforce_video_conversion: Enforce WEBM conversion on video items for frame-accurate annotations. WEBM Conversion will be executed as a project service and incurs compute costs. Service compute resources can be set according to planned workload.
|
|
1015
|
-
:return: Task object
|
|
1016
|
-
:rtype: dtlpy.entities.task.Task
|
|
1017
|
-
|
|
1018
|
-
**Example**:
|
|
1019
|
-
|
|
1020
|
-
.. code-block:: python
|
|
1021
|
-
|
|
1022
|
-
dataset.tasks.create(task= 'task_entity',
|
|
1023
|
-
due_date = datetime.datetime(day= 1, month= 1, year= 2029).timestamp(),
|
|
1024
|
-
assignee_ids =[ 'annotator1@dataloop.ai', 'annotator2@dataloop.ai'],
|
|
1025
|
-
available_actions=[dl.ItemAction("discard"), dl.ItemAction("to-check")])
|
|
1026
|
-
"""
|
|
1027
|
-
|
|
1028
|
-
if dataset is None and self._dataset is None:
|
|
1029
|
-
raise exceptions.PlatformException("400", "Please provide param dataset")
|
|
1030
|
-
if due_date is None:
|
|
1031
|
-
due_date = (datetime.datetime.now() + datetime.timedelta(days=7)).timestamp()
|
|
1032
|
-
if query is None:
|
|
1033
|
-
if filters is None and items is None:
|
|
1034
|
-
query = entities.Filters().prepare()
|
|
1035
|
-
elif filters is None:
|
|
1036
|
-
item_list = list()
|
|
1037
|
-
if isinstance(items, entities.PagedEntities):
|
|
1038
|
-
for page in items:
|
|
1039
|
-
for item in page:
|
|
1040
|
-
item_list.append(item)
|
|
1041
|
-
elif isinstance(items, list):
|
|
1042
|
-
item_list = items
|
|
1043
|
-
elif isinstance(items, entities.Item):
|
|
1044
|
-
item_list.append(items)
|
|
1045
|
-
else:
|
|
1046
|
-
raise exceptions.PlatformException("400", "Unknown items type")
|
|
1047
|
-
query = entities.Filters(
|
|
1048
|
-
field="id",
|
|
1049
|
-
values=[item.id for item in item_list],
|
|
1050
|
-
operator=entities.FiltersOperations.IN,
|
|
1051
|
-
use_defaults=False,
|
|
1052
|
-
).prepare()
|
|
1053
|
-
else:
|
|
1054
|
-
query = filters.prepare()
|
|
1055
|
-
|
|
1056
|
-
if dataset is None:
|
|
1057
|
-
dataset = self._dataset
|
|
1058
|
-
|
|
1059
|
-
if task_owner is None:
|
|
1060
|
-
task_owner = self._client_api.info()["user_email"]
|
|
1061
|
-
|
|
1062
|
-
if task_type not in ["annotation", "qa"]:
|
|
1063
|
-
raise ValueError('task_type must be one of: "annotation", "qa". got: {}'.format(task_type))
|
|
1064
|
-
|
|
1065
|
-
if recipe_id is None:
|
|
1066
|
-
recipe_id = dataset.get_recipe_ids()[0]
|
|
1067
|
-
|
|
1068
|
-
if project_id is None:
|
|
1069
|
-
if self._project_id is not None:
|
|
1070
|
-
project_id = self._project_id
|
|
1071
|
-
else:
|
|
1072
|
-
project_id = self.project.id
|
|
1073
|
-
|
|
1074
|
-
if workload is None and assignee_ids is not None:
|
|
1075
|
-
workload = entities.Workload.generate(assignee_ids=assignee_ids)
|
|
1076
|
-
|
|
1077
|
-
if assignments_ids is None:
|
|
1078
|
-
assignments_ids = list()
|
|
1079
|
-
|
|
1080
|
-
payload = {
|
|
1081
|
-
"name": task_name,
|
|
1082
|
-
"query": "{}".format(json.dumps(query).replace("'", '"')),
|
|
1083
|
-
"taskOwner": task_owner,
|
|
1084
|
-
"spec": {"type": task_type},
|
|
1085
|
-
"datasetId": dataset.id,
|
|
1086
|
-
"projectId": project_id,
|
|
1087
|
-
"assignmentIds": assignments_ids,
|
|
1088
|
-
"recipeId": recipe_id,
|
|
1089
|
-
"dueDate": due_date * 1000,
|
|
1090
|
-
"asynced": wait,
|
|
1091
|
-
"priority": priority,
|
|
1092
|
-
}
|
|
1093
|
-
|
|
1094
|
-
if check_if_exist:
|
|
1095
|
-
if check_if_exist.resource != entities.FiltersResource.TASK:
|
|
1096
|
-
raise exceptions.PlatformException(
|
|
1097
|
-
"407",
|
|
1098
|
-
"Filter resource for check_if_exist param must be {}, got {}".format(
|
|
1099
|
-
entities.FiltersResource.TASK, check_if_exist.resource
|
|
1100
|
-
),
|
|
1101
|
-
)
|
|
1102
|
-
payload["checkIfExist"] = {"query": check_if_exist.prepare()}
|
|
1103
|
-
|
|
1104
|
-
if workload:
|
|
1105
|
-
payload["workload"] = workload.to_json()
|
|
1106
|
-
|
|
1107
|
-
if limit:
|
|
1108
|
-
payload["limit"] = limit
|
|
1109
|
-
|
|
1110
|
-
if available_actions is not None:
|
|
1111
|
-
payload["availableActions"] = [action.to_json() for action in available_actions]
|
|
1112
|
-
|
|
1113
|
-
if task_parent_id is not None:
|
|
1114
|
-
payload["spec"]["parentTaskId"] = task_parent_id
|
|
1115
|
-
|
|
1116
|
-
if not enforce_video_conversion:
|
|
1117
|
-
payload["disableWebm"] = not enforce_video_conversion
|
|
1118
|
-
|
|
1119
|
-
is_pulling = any([batch_size, max_batch_workload])
|
|
1120
|
-
is_consensus = any([consensus_percentage, consensus_assignees, consensus_task_type])
|
|
1121
|
-
if is_pulling and is_consensus:
|
|
1122
|
-
raise exceptions.PlatformException(error="400", message="Consensus can not work as a pulling task")
|
|
1123
|
-
if any([is_pulling, is_consensus]):
|
|
1124
|
-
if metadata is None:
|
|
1125
|
-
metadata = {}
|
|
1126
|
-
if "system" not in metadata:
|
|
1127
|
-
metadata["system"] = {}
|
|
1128
|
-
if allowed_assignees is not None or assignee_ids is not None:
|
|
1129
|
-
metadata["system"]["allowedAssignees"] = allowed_assignees if allowed_assignees else assignee_ids
|
|
1130
|
-
if consensus_task_type is not None:
|
|
1131
|
-
metadata["system"]["consensusTaskType"] = consensus_task_type
|
|
1132
|
-
metadata = self._add_task_metadata_params(metadata=metadata, input_value=batch_size, input_name="batchSize")
|
|
1133
|
-
metadata = self._add_task_metadata_params(
|
|
1134
|
-
metadata=metadata, input_value=max_batch_workload, input_name="maxBatchWorkload"
|
|
1135
|
-
)
|
|
1136
|
-
metadata = self._add_task_metadata_params(
|
|
1137
|
-
metadata=metadata, input_value=consensus_percentage, input_name="consensusPercentage"
|
|
1138
|
-
)
|
|
1139
|
-
metadata = self._add_task_metadata_params(
|
|
1140
|
-
metadata=metadata, input_value=consensus_assignees, input_name="consensusAssignees"
|
|
1141
|
-
)
|
|
1142
|
-
metadata = self._add_task_metadata_params(metadata=metadata, input_value=scoring, input_name="scoring")
|
|
1143
|
-
|
|
1144
|
-
if metadata is not None:
|
|
1145
|
-
payload["metadata"] = metadata
|
|
1146
|
-
|
|
1147
|
-
return self._create_task(payload, wait=wait)
|
|
1148
|
-
|
|
1149
|
-
def __item_operations(self, dataset: entities.Dataset, op, task=None, task_id=None, filters=None, items=None):
|
|
1150
|
-
|
|
1151
|
-
if task is None and task_id is None:
|
|
1152
|
-
raise exceptions.PlatformException("400", "Must provide either task or task id")
|
|
1153
|
-
elif task_id is None:
|
|
1154
|
-
task_id = task.id
|
|
1155
|
-
|
|
1156
|
-
try:
|
|
1157
|
-
if filters is None and items is None:
|
|
1158
|
-
raise exceptions.PlatformException("400", "Must provide either filters or items list")
|
|
1159
|
-
|
|
1160
|
-
if filters is None:
|
|
1161
|
-
filters = entities.Filters(
|
|
1162
|
-
field="id",
|
|
1163
|
-
values=[item.id for item in items],
|
|
1164
|
-
operator=entities.FiltersOperations.IN,
|
|
1165
|
-
use_defaults=False,
|
|
1166
|
-
)
|
|
1167
|
-
|
|
1168
|
-
if op == "delete":
|
|
1169
|
-
if task is None:
|
|
1170
|
-
task = self.get(task_id=task_id)
|
|
1171
|
-
assignment_ids = task.assignmentIds
|
|
1172
|
-
filters._ref_assignment = True
|
|
1173
|
-
filters._ref_assignment_id = assignment_ids
|
|
1174
|
-
|
|
1175
|
-
filters._ref_task = True
|
|
1176
|
-
filters._ref_task_id = task_id
|
|
1177
|
-
filters._ref_op = op
|
|
1178
|
-
return dataset.items.update(filters=filters)
|
|
1179
|
-
finally:
|
|
1180
|
-
if filters is not None:
|
|
1181
|
-
filters._nullify_refs()
|
|
1182
|
-
|
|
1183
|
-
@_api_reference.add(path="/annotationtasks/{id}/addToTask", method="post")
|
|
1184
|
-
def add_items(
|
|
1185
|
-
self,
|
|
1186
|
-
task: entities.Task = None,
|
|
1187
|
-
task_id=None,
|
|
1188
|
-
filters: entities.Filters = None,
|
|
1189
|
-
items=None,
|
|
1190
|
-
assignee_ids=None,
|
|
1191
|
-
query=None,
|
|
1192
|
-
workload=None,
|
|
1193
|
-
limit=None,
|
|
1194
|
-
wait=True,
|
|
1195
|
-
) -> entities.Task:
|
|
1196
|
-
"""
|
|
1197
|
-
Add items to a Task.
|
|
1198
|
-
|
|
1199
|
-
**Prerequisites**: You must be in the role of an *owner*, *developer*, or *annotation manager* who has been assigned to be *owner* of the annotation task.
|
|
1200
|
-
|
|
1201
|
-
:param dtlpy.entities.task.Task task: task object
|
|
1202
|
-
:param str task_id: the Id of the task
|
|
1203
|
-
:param dtlpy.entities.filters.Filters filters: Filters entity or a dictionary containing filters parameters
|
|
1204
|
-
:param list items: list of items (item Ids or objects) to add to the task
|
|
1205
|
-
:param list assignee_ids: list to assignee who works in the task
|
|
1206
|
-
:param dict query: query to filter the items for the task
|
|
1207
|
-
:param list workload: list of WorkloadUnit objects. Customize distribution (percentage) between the task assignees. For example: [dl.WorkloadUnit(annotator@hi.com, 80), dl.WorkloadUnit(annotator2@hi.com, 20)]
|
|
1208
|
-
:param int limit: the limit items that task can include
|
|
1209
|
-
:param bool wait: wait until add items will to finish
|
|
1210
|
-
:return: task entity
|
|
1211
|
-
:rtype: dtlpy.entities.task.Task
|
|
1212
|
-
|
|
1213
|
-
**Example**:
|
|
1214
|
-
|
|
1215
|
-
.. code-block:: python
|
|
1216
|
-
|
|
1217
|
-
dataset.tasks.add_items(task= 'task_entity',
|
|
1218
|
-
items = [items])
|
|
1219
|
-
"""
|
|
1220
|
-
if filters is None and items is None and query is None:
|
|
1221
|
-
raise exceptions.PlatformException("400", "Must provide either filters, query or items list")
|
|
1222
|
-
|
|
1223
|
-
if task is None and task_id is None:
|
|
1224
|
-
raise exceptions.PlatformException("400", "Must provide either task or task_id")
|
|
1225
|
-
|
|
1226
|
-
if query is None:
|
|
1227
|
-
if filters is None:
|
|
1228
|
-
if not isinstance(items, list):
|
|
1229
|
-
items = [items]
|
|
1230
|
-
filters = entities.Filters(
|
|
1231
|
-
field="id",
|
|
1232
|
-
values=[item.id for item in items],
|
|
1233
|
-
operator=entities.FiltersOperations.IN,
|
|
1234
|
-
use_defaults=False,
|
|
1235
|
-
)
|
|
1236
|
-
query = filters.prepare()
|
|
1237
|
-
|
|
1238
|
-
if workload is None and assignee_ids is not None:
|
|
1239
|
-
workload = entities.Workload.generate(assignee_ids=assignee_ids)
|
|
1240
|
-
|
|
1241
|
-
if task_id is None:
|
|
1242
|
-
task_id = task.id
|
|
1243
|
-
|
|
1244
|
-
payload = {"query": "{}".format(json.dumps(query).replace("'", '"'))}
|
|
1245
|
-
|
|
1246
|
-
if workload is not None:
|
|
1247
|
-
payload["workload"] = workload.to_json()
|
|
1248
|
-
|
|
1249
|
-
if limit is not None:
|
|
1250
|
-
payload["limit"] = limit
|
|
1251
|
-
|
|
1252
|
-
payload["asynced"] = wait
|
|
1253
|
-
|
|
1254
|
-
url = "{}/{}/addToTask".format(URL_PATH, task_id)
|
|
1255
|
-
|
|
1256
|
-
success, response = self._client_api.gen_request(req_type="post", path=url, json_req=payload)
|
|
1257
|
-
|
|
1258
|
-
if success:
|
|
1259
|
-
command = entities.Command.from_json(_json=response.json(), client_api=self._client_api)
|
|
1260
|
-
if not wait:
|
|
1261
|
-
return command
|
|
1262
|
-
backoff_factor = 2
|
|
1263
|
-
if command.type == "BulkAddToTaskSetting":
|
|
1264
|
-
backoff_factor = 8
|
|
1265
|
-
command = command.wait(timeout=0, backoff_factor=backoff_factor)
|
|
1266
|
-
if task is None:
|
|
1267
|
-
task = self.get(task_id=task_id)
|
|
1268
|
-
if "addToTaskPayload" not in command.spec:
|
|
1269
|
-
raise exceptions.PlatformException(
|
|
1270
|
-
error="400", message="addToTaskPayload key is missing in command response: {}".format(response)
|
|
1271
|
-
)
|
|
1272
|
-
else:
|
|
1273
|
-
raise exceptions.PlatformException(response)
|
|
1274
|
-
|
|
1275
|
-
assert isinstance(task, entities.Task)
|
|
1276
|
-
return task
|
|
1277
|
-
|
|
1278
|
-
# @_api_reference.add(path='/annotationtasks/{id}/removeFromTask', method='post')
|
|
1279
|
-
def remove_items(
|
|
1280
|
-
self,
|
|
1281
|
-
task: entities.Task = None,
|
|
1282
|
-
task_id=None,
|
|
1283
|
-
filters: entities.Filters = None,
|
|
1284
|
-
query=None,
|
|
1285
|
-
items=None,
|
|
1286
|
-
wait=True,
|
|
1287
|
-
):
|
|
1288
|
-
"""
|
|
1289
|
-
remove items from Task.
|
|
1290
|
-
|
|
1291
|
-
**Prerequisites**: You must be in the role of an *owner*, *developer*, or *annotation manager* who has been assigned to be *owner* of the annotation task.
|
|
1292
|
-
|
|
1293
|
-
:param dtlpy.entities.task.Task task: task object
|
|
1294
|
-
:param str task_id: the Id of the task
|
|
1295
|
-
:param dtlpy.entities.filters.Filters filters: Filters entity or a dictionary containing filters parameters
|
|
1296
|
-
:param dict query: query to filter the items use it
|
|
1297
|
-
:param list items: list of items to add to the task
|
|
1298
|
-
:param bool wait: wait until remove items finish
|
|
1299
|
-
:return: True if success and an error if failed
|
|
1300
|
-
:rtype: bool
|
|
1301
|
-
|
|
1302
|
-
**Examples**:
|
|
1303
|
-
|
|
1304
|
-
.. code-block:: python
|
|
1305
|
-
|
|
1306
|
-
dataset.tasks.remove_items(task= 'task_entity',
|
|
1307
|
-
items = [items])
|
|
1308
|
-
|
|
1309
|
-
"""
|
|
1310
|
-
if filters is None and items is None and query is None:
|
|
1311
|
-
raise exceptions.PlatformException("400", "Must provide either filters, query or items list")
|
|
1312
|
-
|
|
1313
|
-
if task is None and task_id is None:
|
|
1314
|
-
raise exceptions.PlatformException("400", "Must provide either task or task_id")
|
|
1315
|
-
|
|
1316
|
-
if query is None:
|
|
1317
|
-
if filters is None:
|
|
1318
|
-
if not isinstance(items, list):
|
|
1319
|
-
items = [items]
|
|
1320
|
-
filters = entities.Filters(
|
|
1321
|
-
field="id",
|
|
1322
|
-
values=[item.id for item in items],
|
|
1323
|
-
operator=entities.FiltersOperations.IN,
|
|
1324
|
-
use_defaults=False,
|
|
1325
|
-
)
|
|
1326
|
-
query = filters.prepare()
|
|
1327
|
-
|
|
1328
|
-
if task_id is None:
|
|
1329
|
-
task_id = task.id
|
|
1330
|
-
|
|
1331
|
-
payload = {"query": "{}".format(json.dumps(query).replace("'", '"')), "asynced": wait}
|
|
1332
|
-
|
|
1333
|
-
url = "{}/{}/removeFromTask".format(URL_PATH, task_id)
|
|
1334
|
-
|
|
1335
|
-
success, response = self._client_api.gen_request(req_type="post", path=url, json_req=payload)
|
|
1336
|
-
|
|
1337
|
-
if success:
|
|
1338
|
-
command = entities.Command.from_json(_json=response.json(), client_api=self._client_api)
|
|
1339
|
-
if not wait:
|
|
1340
|
-
return command
|
|
1341
|
-
command = command.wait(timeout=0)
|
|
1342
|
-
|
|
1343
|
-
if "removeFromTaskId" not in command.spec:
|
|
1344
|
-
raise exceptions.PlatformException(
|
|
1345
|
-
error="400", message="removeFromTaskId key is missing in command response: {}".format(response)
|
|
1346
|
-
)
|
|
1347
|
-
else:
|
|
1348
|
-
raise exceptions.PlatformException(response)
|
|
1349
|
-
return True
|
|
1350
|
-
|
|
1351
|
-
def get_items(
|
|
1352
|
-
self,
|
|
1353
|
-
task_id: str = None,
|
|
1354
|
-
task_name: str = None,
|
|
1355
|
-
dataset: entities.Dataset = None,
|
|
1356
|
-
filters: entities.Filters = None,
|
|
1357
|
-
get_consensus_items: bool = False,
|
|
1358
|
-
task: entities.Task = None,
|
|
1359
|
-
) -> entities.PagedEntities:
|
|
1360
|
-
"""
|
|
1361
|
-
Get the task items to use in your code.
|
|
1362
|
-
|
|
1363
|
-
**Prerequisites**: You must be in the role of an *owner*, *developer*, or *annotation manager* who has been assigned to be *owner* of the annotation task.
|
|
1364
|
-
|
|
1365
|
-
If a filters param is provided, you will receive a PagedEntity output of the task items. If no filter is provided, you will receive a list of the items.
|
|
1366
|
-
|
|
1367
|
-
:param str task_id: the id of the task
|
|
1368
|
-
:param str task_name: the name of the task
|
|
1369
|
-
:param bool get_consensus_items: get the items from the consensus assignment
|
|
1370
|
-
:param dtlpy.entities.Task task: task object
|
|
1371
|
-
:param dtlpy.entities.dataset.Dataset dataset: dataset object that refer to the task
|
|
1372
|
-
:param dtlpy.entities.filters.Filters filters: Filters entity or a dictionary containing filters parameters
|
|
1373
|
-
:return: list of the items or PagedEntity output of items
|
|
1374
|
-
:rtype: list or dtlpy.entities.paged_entities.PagedEntities
|
|
1375
|
-
|
|
1376
|
-
**Example**:
|
|
1377
|
-
|
|
1378
|
-
.. code-block:: python
|
|
1379
|
-
|
|
1380
|
-
dataset.tasks.get_items(task_id= 'task_id')
|
|
1381
|
-
"""
|
|
1382
|
-
if task is None and task_id is None and task_name is None:
|
|
1383
|
-
raise exceptions.PlatformException("400", "Please provide either task_id or task_name")
|
|
1384
|
-
|
|
1385
|
-
if task_id is None:
|
|
1386
|
-
if task is None:
|
|
1387
|
-
task = self.get(task_name=task_name)
|
|
1388
|
-
task_id = task.id
|
|
1389
|
-
|
|
1390
|
-
if dataset is None and self._dataset is None:
|
|
1391
|
-
raise exceptions.PlatformException("400", "Please provide a dataset entity")
|
|
1392
|
-
if dataset is None:
|
|
1393
|
-
dataset = self._dataset
|
|
1394
|
-
|
|
1395
|
-
if filters is None:
|
|
1396
|
-
filters = entities.Filters(use_defaults=False)
|
|
1397
|
-
filters.add(field="metadata.system.refs.id", values=[task_id], operator=entities.FiltersOperations.IN)
|
|
1398
|
-
|
|
1399
|
-
if not get_consensus_items:
|
|
1400
|
-
if task is None:
|
|
1401
|
-
task = self.get(task_id=task_id)
|
|
1402
|
-
if task.metadata.get("system", dict()).get("consensusAssignmentId", None):
|
|
1403
|
-
filters.add(
|
|
1404
|
-
field="metadata.system.refs.id",
|
|
1405
|
-
values=task.metadata["system"]["consensusAssignmentId"],
|
|
1406
|
-
operator=entities.FiltersOperations.NOT_EQUAL,
|
|
1407
|
-
)
|
|
1408
|
-
|
|
1409
|
-
return dataset.items.list(filters=filters)
|
|
1410
|
-
|
|
1411
|
-
def set_status(self, status: str, operation: str, task_id: str, item_ids: List[str]):
|
|
1412
|
-
"""
|
|
1413
|
-
Update an item status within a task.
|
|
1414
|
-
|
|
1415
|
-
**Prerequisites**: You must be in the role of an *owner*, *developer*, or *annotation manager* who has been assigned to be *owner* of the annotation task.
|
|
1416
|
-
|
|
1417
|
-
:param str status: string the describes the status
|
|
1418
|
-
:param str operation: the status action need 'create' or 'delete'
|
|
1419
|
-
:param str task_id: the Id of the task
|
|
1420
|
-
:param list item_ids: List[str] id items ids
|
|
1421
|
-
:return: True if success
|
|
1422
|
-
:rtype: bool
|
|
1423
|
-
|
|
1424
|
-
**Example**:
|
|
1425
|
-
|
|
1426
|
-
.. code-block:: python
|
|
1427
|
-
|
|
1428
|
-
dataset.tasks.set_status(task_id= 'task_id', status='complete', operation='create')
|
|
1429
|
-
"""
|
|
1430
|
-
url = "/assignments/items/tasks/{task_id}/status".format(task_id=task_id)
|
|
1431
|
-
payload = {
|
|
1432
|
-
"itemIds": item_ids,
|
|
1433
|
-
"statusPayload": {"operation": operation, "returnLastStatus": True, "status": status},
|
|
1434
|
-
}
|
|
1435
|
-
|
|
1436
|
-
success, response = self._client_api.gen_request(req_type="post", path=url, json_req=payload)
|
|
1437
|
-
|
|
1438
|
-
if not success:
|
|
1439
|
-
raise exceptions.PlatformException(response)
|
|
1440
|
-
if response.json() is not None:
|
|
1441
|
-
updated_items = set(response.json().keys())
|
|
1442
|
-
log_msg = "Items status was updated successfully."
|
|
1443
|
-
if len(updated_items) != len(item_ids):
|
|
1444
|
-
failed_items = set(item_ids).difference(updated_items)
|
|
1445
|
-
log_msg = "{success_count} out of TOTAL items were updated. The following items failed to update: {failed_items}".format(
|
|
1446
|
-
success_count=len(updated_items), failed_items=failed_items
|
|
1447
|
-
)
|
|
1448
|
-
logger.info(msg=log_msg)
|
|
1449
|
-
return True
|
|
1450
|
-
|
|
1451
|
-
def task_scores(self, task_id: str = None, page_offset: int = 0, page_size: int = 100):
|
|
1452
|
-
"""
|
|
1453
|
-
Get all entities scores in a task.
|
|
1454
|
-
|
|
1455
|
-
:param str task_id: the id of the task
|
|
1456
|
-
:param int page_offset: the page offset
|
|
1457
|
-
:param int page_size: the page size
|
|
1458
|
-
:return: page of the task scores
|
|
1459
|
-
|
|
1460
|
-
**Example**:
|
|
1461
|
-
|
|
1462
|
-
.. code-block:: python
|
|
1463
|
-
|
|
1464
|
-
dataset.tasks.task_scores(task_id= 'task_id')
|
|
1465
|
-
"""
|
|
1466
|
-
if task_id is None:
|
|
1467
|
-
raise exceptions.PlatformException("400", "Please provide task_id")
|
|
1468
|
-
|
|
1469
|
-
url = "/scores/tasks/{task_id}?page={page_offset}&pageSize={page_size}".format(
|
|
1470
|
-
task_id=task_id, page_offset=page_offset, page_size=page_size
|
|
1471
|
-
)
|
|
1472
|
-
success, response = self._client_api.gen_request(req_type="get", path=url)
|
|
1473
|
-
|
|
1474
|
-
if success:
|
|
1475
|
-
return response.json()
|
|
1476
|
-
else:
|
|
1477
|
-
raise exceptions.PlatformException(response)
|
|
1
|
+
import datetime
|
|
2
|
+
import logging
|
|
3
|
+
import json
|
|
4
|
+
from typing import Union, List
|
|
5
|
+
import warnings
|
|
6
|
+
|
|
7
|
+
from .. import exceptions, miscellaneous, entities, repositories, _api_reference
|
|
8
|
+
from ..services.api_client import ApiClient
|
|
9
|
+
|
|
10
|
+
logger = logging.getLogger(name="dtlpy")
|
|
11
|
+
URL_PATH = "/annotationtasks"
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class Tasks:
|
|
15
|
+
"""
|
|
16
|
+
Tasks Repository
|
|
17
|
+
|
|
18
|
+
The Tasks class allows the user to manage tasks and their properties.
|
|
19
|
+
For more information, read in our developers' documentation about `Creating Tasks <https://developers.dataloop.ai/tutorials/task_workflows/create_a_task/chapter/>`_, and `Redistributing and Reassigning Tasks <https://developers.dataloop.ai/tutorials/task_workflows/redistributing_and_reassigning_a_task/chapter/>`_.
|
|
20
|
+
"""
|
|
21
|
+
|
|
22
|
+
def __init__(
|
|
23
|
+
self,
|
|
24
|
+
client_api: ApiClient,
|
|
25
|
+
project: entities.Project = None,
|
|
26
|
+
dataset: entities.Dataset = None,
|
|
27
|
+
project_id: str = None,
|
|
28
|
+
):
|
|
29
|
+
self._client_api = client_api
|
|
30
|
+
self._project = project
|
|
31
|
+
self._dataset = dataset
|
|
32
|
+
self._assignments = None
|
|
33
|
+
self._project_id = project_id
|
|
34
|
+
|
|
35
|
+
############
|
|
36
|
+
# entities #
|
|
37
|
+
############
|
|
38
|
+
@property
|
|
39
|
+
def project(self) -> entities.Project:
|
|
40
|
+
if self._project is None and self._project_id is None:
|
|
41
|
+
if self._dataset is None:
|
|
42
|
+
raise exceptions.PlatformException(
|
|
43
|
+
error="2001",
|
|
44
|
+
message='Missing "project". need to set a Project entity or use project.tasks repository',
|
|
45
|
+
)
|
|
46
|
+
else:
|
|
47
|
+
self._project = self._dataset.project
|
|
48
|
+
self._project_id = self._project.id
|
|
49
|
+
if self._project is None and self._project_id is not None:
|
|
50
|
+
self._project = self._client_api.projects.get(project_id=self._project_id)
|
|
51
|
+
return self._project
|
|
52
|
+
|
|
53
|
+
@project.setter
|
|
54
|
+
def project(self, project: entities.Project):
|
|
55
|
+
if not isinstance(project, entities.Project):
|
|
56
|
+
raise ValueError("Must input a valid Project entity")
|
|
57
|
+
self._project = project
|
|
58
|
+
|
|
59
|
+
@property
|
|
60
|
+
def dataset(self) -> entities.Dataset:
|
|
61
|
+
if self._dataset is None:
|
|
62
|
+
raise exceptions.PlatformException(
|
|
63
|
+
error="2001", message='Missing "dataset". need to set a Dataset entity or use dataset.tasks repository'
|
|
64
|
+
)
|
|
65
|
+
assert isinstance(self._dataset, entities.Dataset)
|
|
66
|
+
return self._dataset
|
|
67
|
+
|
|
68
|
+
@dataset.setter
|
|
69
|
+
def dataset(self, dataset: entities.Dataset):
|
|
70
|
+
if not isinstance(dataset, entities.Dataset):
|
|
71
|
+
raise ValueError("Must input a valid Dataset entity")
|
|
72
|
+
self._dataset = dataset
|
|
73
|
+
|
|
74
|
+
@property
|
|
75
|
+
def assignments(self) -> repositories.Assignments:
|
|
76
|
+
if self._assignments is None:
|
|
77
|
+
self._assignments = repositories.Assignments(client_api=self._client_api, project=self._project)
|
|
78
|
+
assert isinstance(self._assignments, repositories.Assignments)
|
|
79
|
+
return self._assignments
|
|
80
|
+
|
|
81
|
+
def _build_entities_from_response(self, response_items) -> miscellaneous.List[entities.Task]:
|
|
82
|
+
pool = self._client_api.thread_pools(pool_name="entity.create")
|
|
83
|
+
jobs = [None for _ in range(len(response_items))]
|
|
84
|
+
|
|
85
|
+
for i_task, task in enumerate(response_items):
|
|
86
|
+
jobs[i_task] = pool.submit(
|
|
87
|
+
entities.Task._protected_from_json,
|
|
88
|
+
**{"client_api": self._client_api, "_json": task, "project": self._project, "dataset": self._dataset},
|
|
89
|
+
)
|
|
90
|
+
|
|
91
|
+
# get all results
|
|
92
|
+
results = [j.result() for j in jobs]
|
|
93
|
+
# log errors
|
|
94
|
+
_ = [logger.warning(r[1]) for r in results if r[0] is False]
|
|
95
|
+
# return good jobs
|
|
96
|
+
tasks = miscellaneous.List([r[1] for r in results if r[0] is True])
|
|
97
|
+
return tasks
|
|
98
|
+
|
|
99
|
+
def _list(self, filters: entities.Filters):
|
|
100
|
+
url = "{}/query".format(URL_PATH)
|
|
101
|
+
query = filters.prepare()
|
|
102
|
+
query["context"] = dict(projectIds=[self._project_id])
|
|
103
|
+
success, response = self._client_api.gen_request(req_type="post", path=url, json_req=filters.prepare())
|
|
104
|
+
|
|
105
|
+
if not success:
|
|
106
|
+
raise exceptions.PlatformException(response)
|
|
107
|
+
return response.json()
|
|
108
|
+
|
|
109
|
+
def query(self, filters=None, project_ids=None):
|
|
110
|
+
"""
|
|
111
|
+
List all tasks by filter.
|
|
112
|
+
|
|
113
|
+
**Prerequisites**: You must be in the role of an *owner* or *developer* or *annotation manager* who has been assigned the task.
|
|
114
|
+
|
|
115
|
+
:param dtlpy.entities.filters.Filters filters: Filters entity or a dictionary containing filters parameters
|
|
116
|
+
:param list project_ids: list of project ids of the required tasks
|
|
117
|
+
:return: Paged entity - task pages generator
|
|
118
|
+
:rtype: dtlpy.entities.paged_entities.PagedEntities
|
|
119
|
+
|
|
120
|
+
**Example**:
|
|
121
|
+
|
|
122
|
+
.. code-block:: python
|
|
123
|
+
|
|
124
|
+
dataset.tasks.query(project_ids='project_ids')
|
|
125
|
+
"""
|
|
126
|
+
if project_ids is None:
|
|
127
|
+
if self._project_id is not None:
|
|
128
|
+
project_ids = self._project_id
|
|
129
|
+
else:
|
|
130
|
+
project_ids = self.project.id
|
|
131
|
+
|
|
132
|
+
if not isinstance(project_ids, list):
|
|
133
|
+
project_ids = [project_ids]
|
|
134
|
+
|
|
135
|
+
if filters is None:
|
|
136
|
+
filters = entities.Filters(resource=entities.FiltersResource.TASK)
|
|
137
|
+
else:
|
|
138
|
+
if not isinstance(filters, entities.Filters):
|
|
139
|
+
raise exceptions.PlatformException("400", "Unknown filters type")
|
|
140
|
+
if filters.resource != entities.FiltersResource.TASK:
|
|
141
|
+
raise exceptions.PlatformException("400", "Filter resource must be task")
|
|
142
|
+
|
|
143
|
+
if filters.context is None:
|
|
144
|
+
filters.context = {"projectIds": project_ids}
|
|
145
|
+
|
|
146
|
+
if self._project_id is not None:
|
|
147
|
+
filters.add(field="projectId", values=self._project_id)
|
|
148
|
+
|
|
149
|
+
if self._dataset is not None:
|
|
150
|
+
filters.add(field="datasetId", values=self._dataset.id)
|
|
151
|
+
|
|
152
|
+
paged = entities.PagedEntities(
|
|
153
|
+
items_repository=self,
|
|
154
|
+
filters=filters,
|
|
155
|
+
page_offset=filters.page,
|
|
156
|
+
page_size=filters.page_size,
|
|
157
|
+
project_id=self._project_id,
|
|
158
|
+
client_api=self._client_api,
|
|
159
|
+
)
|
|
160
|
+
paged.get_page()
|
|
161
|
+
return paged
|
|
162
|
+
|
|
163
|
+
###########
|
|
164
|
+
# methods #
|
|
165
|
+
###########
|
|
166
|
+
@_api_reference.add(path="/annotationtasks/query", method="post")
|
|
167
|
+
def list(
|
|
168
|
+
self,
|
|
169
|
+
project_ids=None,
|
|
170
|
+
status=None,
|
|
171
|
+
task_name=None,
|
|
172
|
+
pages_size=None,
|
|
173
|
+
page_offset=None,
|
|
174
|
+
recipe=None,
|
|
175
|
+
creator=None,
|
|
176
|
+
assignments=None,
|
|
177
|
+
min_date=None,
|
|
178
|
+
max_date=None,
|
|
179
|
+
filters: entities.Filters = None,
|
|
180
|
+
) -> Union[miscellaneous.List[entities.Task], entities.PagedEntities]:
|
|
181
|
+
"""
|
|
182
|
+
List all tasks.
|
|
183
|
+
|
|
184
|
+
**Prerequisites**: You must be in the role of an *owner* or *developer* or *annotation manager* who has been assigned the task.
|
|
185
|
+
|
|
186
|
+
:param project_ids: search tasks by given list of project ids
|
|
187
|
+
:param str status: search tasks by a given task status
|
|
188
|
+
:param str task_name: search tasks by a given task name
|
|
189
|
+
:param int pages_size: pages size of the output generator
|
|
190
|
+
:param int page_offset: page offset of the output generator
|
|
191
|
+
:param dtlpy.entities.recipe.Recipe recipe: Search tasks that use a given recipe. Provide the required recipe object
|
|
192
|
+
:param str creator: search tasks created by a given creator (user email)
|
|
193
|
+
:param dtlpy.entities.assignment.Assignment recipe assignments: assignments object
|
|
194
|
+
:param double min_date: search all tasks created AFTER a given date, use a milliseconds format. For example: 1661780622008
|
|
195
|
+
:param double max_date: search all tasks created BEFORE a given date, use a milliseconds format. For example: 1661780622008
|
|
196
|
+
:param dtlpy.entities.filters.Filters filters: dl.Filters entity to filters tasks using DQL
|
|
197
|
+
:return: List of Task objects
|
|
198
|
+
|
|
199
|
+
**Example**:
|
|
200
|
+
|
|
201
|
+
.. code-block:: python
|
|
202
|
+
|
|
203
|
+
dataset.tasks.list(project_ids='project_ids',pages_size=100, page_offset=0)
|
|
204
|
+
"""
|
|
205
|
+
# url
|
|
206
|
+
url = URL_PATH + "/query"
|
|
207
|
+
|
|
208
|
+
if filters is None:
|
|
209
|
+
filters = entities.Filters(use_defaults=False, resource=entities.FiltersResource.TASK)
|
|
210
|
+
else:
|
|
211
|
+
return self.query(filters=filters, project_ids=project_ids)
|
|
212
|
+
|
|
213
|
+
if self._dataset is not None:
|
|
214
|
+
filters.add(field="datasetId", values=self._dataset.id)
|
|
215
|
+
|
|
216
|
+
if project_ids is not None:
|
|
217
|
+
if not isinstance(project_ids, list):
|
|
218
|
+
project_ids = [project_ids]
|
|
219
|
+
elif self._project_id is not None:
|
|
220
|
+
project_ids = [self._project_id]
|
|
221
|
+
else:
|
|
222
|
+
project_ids = [self.project.id]
|
|
223
|
+
filters.context = {"projectIds": project_ids}
|
|
224
|
+
|
|
225
|
+
if assignments is not None:
|
|
226
|
+
if not isinstance(assignments, list):
|
|
227
|
+
assignments = [assignments]
|
|
228
|
+
assignments = [
|
|
229
|
+
assignments_entity.id if isinstance(assignments_entity, entities.Assignment) else assignments_entity
|
|
230
|
+
for assignments_entity in assignments
|
|
231
|
+
]
|
|
232
|
+
filters.add(field="assignmentIds", values=assignments, operator=entities.FiltersOperations.IN)
|
|
233
|
+
if status is not None:
|
|
234
|
+
filters.add(field="status", values=status)
|
|
235
|
+
if task_name is not None:
|
|
236
|
+
filters.add(field="name", values=task_name)
|
|
237
|
+
if pages_size is not None:
|
|
238
|
+
filters.page_size = pages_size
|
|
239
|
+
if pages_size is None:
|
|
240
|
+
filters.page_size = 500
|
|
241
|
+
if page_offset is not None:
|
|
242
|
+
filters.page = page_offset
|
|
243
|
+
if recipe is not None:
|
|
244
|
+
if not isinstance(recipe, list):
|
|
245
|
+
recipe = [recipe]
|
|
246
|
+
recipe = [
|
|
247
|
+
recipe_entity.id if isinstance(recipe_entity, entities.Recipe) else recipe_entity
|
|
248
|
+
for recipe_entity in recipe
|
|
249
|
+
]
|
|
250
|
+
filters.add(field="recipeId", values=recipe, operator=entities.FiltersOperations.IN)
|
|
251
|
+
if creator is not None:
|
|
252
|
+
filters.add(field="creator", values=creator)
|
|
253
|
+
if min_date is not None:
|
|
254
|
+
filters.add(field="dueDate", values=min_date, operator=entities.FiltersOperations.GREATER_THAN)
|
|
255
|
+
if max_date is not None:
|
|
256
|
+
filters.add(field="dueDate", values=max_date, operator=entities.FiltersOperations.LESS_THAN)
|
|
257
|
+
|
|
258
|
+
success, response = self._client_api.gen_request(req_type="post", path=url, json_req=filters.prepare())
|
|
259
|
+
if success:
|
|
260
|
+
tasks = miscellaneous.List(
|
|
261
|
+
[
|
|
262
|
+
entities.Task.from_json(
|
|
263
|
+
client_api=self._client_api, _json=_json, project=self._project, dataset=self._dataset
|
|
264
|
+
)
|
|
265
|
+
for _json in response.json()["items"]
|
|
266
|
+
]
|
|
267
|
+
)
|
|
268
|
+
else:
|
|
269
|
+
logger.error("Platform error getting annotation task")
|
|
270
|
+
raise exceptions.PlatformException(response)
|
|
271
|
+
|
|
272
|
+
return tasks
|
|
273
|
+
|
|
274
|
+
@_api_reference.add(path="/annotationtasks/{id}", method="get")
|
|
275
|
+
def get(self, task_name=None, task_id=None) -> entities.Task:
|
|
276
|
+
"""
|
|
277
|
+
Get a Task object to use in your code.
|
|
278
|
+
|
|
279
|
+
**Prerequisites**: You must be in the role of an *owner* or *developer* or *annotation manager* who has been assigned the task.
|
|
280
|
+
|
|
281
|
+
:param str task_name: optional - search by name
|
|
282
|
+
:param str task_id: optional - search by id
|
|
283
|
+
:return: task object
|
|
284
|
+
:rtype: dtlpy.entities.task.Task
|
|
285
|
+
|
|
286
|
+
**Example**:
|
|
287
|
+
|
|
288
|
+
.. code-block:: python
|
|
289
|
+
|
|
290
|
+
dataset.tasks.get(task_id='task_id')
|
|
291
|
+
"""
|
|
292
|
+
|
|
293
|
+
# url
|
|
294
|
+
url = URL_PATH
|
|
295
|
+
|
|
296
|
+
if task_id is not None:
|
|
297
|
+
url = "{}/{}".format(url, task_id)
|
|
298
|
+
success, response = self._client_api.gen_request(req_type="get", path=url)
|
|
299
|
+
if not success:
|
|
300
|
+
raise exceptions.PlatformException(response)
|
|
301
|
+
else:
|
|
302
|
+
task = entities.Task.from_json(
|
|
303
|
+
_json=response.json(), client_api=self._client_api, project=self._project, dataset=self._dataset
|
|
304
|
+
)
|
|
305
|
+
# verify input task name is same as the given id
|
|
306
|
+
if task_name is not None and task.name != task_name:
|
|
307
|
+
logger.warning(
|
|
308
|
+
"Mismatch found in tasks.get: task_name is different then task.name:"
|
|
309
|
+
" {!r} != {!r}".format(task_name, task.name)
|
|
310
|
+
)
|
|
311
|
+
elif task_name is not None:
|
|
312
|
+
tasks = self.list(
|
|
313
|
+
filters=entities.Filters(field="name", values=task_name, resource=entities.FiltersResource.TASK)
|
|
314
|
+
)
|
|
315
|
+
if tasks.items_count == 0:
|
|
316
|
+
raise exceptions.PlatformException("404", "Annotation task not found")
|
|
317
|
+
elif tasks.items_count > 1:
|
|
318
|
+
raise exceptions.PlatformException(
|
|
319
|
+
"404", f"More than one Annotation task exist with the same name: {task_name}"
|
|
320
|
+
)
|
|
321
|
+
else:
|
|
322
|
+
task = tasks[0][0]
|
|
323
|
+
else:
|
|
324
|
+
raise exceptions.PlatformException("400", "Must provide either Annotation task name or Annotation task id")
|
|
325
|
+
|
|
326
|
+
assert isinstance(task, entities.Task)
|
|
327
|
+
return task
|
|
328
|
+
|
|
329
|
+
@property
|
|
330
|
+
def platform_url(self):
|
|
331
|
+
return self._client_api._get_resource_url(f"projects/{self.project.id}/tasks")
|
|
332
|
+
|
|
333
|
+
def open_in_web(self, task_name: str = None, task_id: str = None, task: entities.Task = None):
|
|
334
|
+
"""
|
|
335
|
+
Open the task in the web platform.
|
|
336
|
+
|
|
337
|
+
**Prerequisites**: You must be in the role of an *owner* or *developer* or *annotation manager* who has been assigned the task.
|
|
338
|
+
|
|
339
|
+
:param str task_name: the name of the task
|
|
340
|
+
:param str task_id: the Id of the task
|
|
341
|
+
:param dtlpy.entities.task.Task task: the task object
|
|
342
|
+
|
|
343
|
+
**Example**:
|
|
344
|
+
|
|
345
|
+
.. code-block:: python
|
|
346
|
+
|
|
347
|
+
dataset.tasks.open_in_web(task_id='task_id')
|
|
348
|
+
"""
|
|
349
|
+
if task_name is not None:
|
|
350
|
+
task = self.get(task_name=task_name)
|
|
351
|
+
if task is not None:
|
|
352
|
+
task.open_in_web()
|
|
353
|
+
elif task_id is not None:
|
|
354
|
+
self._client_api._open_in_web(url=self.platform_url + "/" + str(task_id))
|
|
355
|
+
else:
|
|
356
|
+
self._client_api._open_in_web(url=self.platform_url)
|
|
357
|
+
|
|
358
|
+
@_api_reference.add(path="/annotationtasks/{id}", method="delete")
|
|
359
|
+
def delete(self, task: entities.Task = None, task_name: str = None, task_id: str = None, wait: bool = True):
|
|
360
|
+
"""
|
|
361
|
+
Delete the Task.
|
|
362
|
+
|
|
363
|
+
**Prerequisites**: You must be in the role of an *owner* or *developer* or *annotation manager* who created that task.
|
|
364
|
+
|
|
365
|
+
:param dtlpy.entities.task.Task task: the task object
|
|
366
|
+
:param str task_name: the name of the task
|
|
367
|
+
:param str task_id: the Id of the task
|
|
368
|
+
:param bool wait: wait until delete task finish
|
|
369
|
+
:return: True is success
|
|
370
|
+
:rtype: bool
|
|
371
|
+
|
|
372
|
+
**Example**:
|
|
373
|
+
|
|
374
|
+
.. code-block:: python
|
|
375
|
+
|
|
376
|
+
dataset.tasks.delete(task_id='task_id')
|
|
377
|
+
"""
|
|
378
|
+
if task_id is None:
|
|
379
|
+
if task is None:
|
|
380
|
+
if task_name is None:
|
|
381
|
+
raise exceptions.PlatformException(
|
|
382
|
+
"400", "Must provide either annotation task, " "annotation task name or annotation task id"
|
|
383
|
+
)
|
|
384
|
+
else:
|
|
385
|
+
task = self.get(task_name=task_name)
|
|
386
|
+
task_id = task.id
|
|
387
|
+
|
|
388
|
+
url = URL_PATH
|
|
389
|
+
url = f"{url}/{task_id}"
|
|
390
|
+
success, response = self._client_api.gen_request(req_type="delete", path=url, json_req={"asynced": wait})
|
|
391
|
+
|
|
392
|
+
if not success:
|
|
393
|
+
raise exceptions.PlatformException(response)
|
|
394
|
+
response_json = response.json()
|
|
395
|
+
command = entities.Command.from_json(_json=response_json, client_api=self._client_api)
|
|
396
|
+
if not wait:
|
|
397
|
+
return command
|
|
398
|
+
command = command.wait(timeout=0)
|
|
399
|
+
if "deleteTaskId" not in command.spec:
|
|
400
|
+
raise exceptions.PlatformException(
|
|
401
|
+
error="400", message="deleteTaskId key is missing in command response: {}".format(response)
|
|
402
|
+
)
|
|
403
|
+
return True
|
|
404
|
+
|
|
405
|
+
@_api_reference.add(path="/annotationtasks/{id}", method="patch")
|
|
406
|
+
def update(self, task: entities.Task = None, system_metadata=False) -> entities.Task:
|
|
407
|
+
"""
|
|
408
|
+
Update a Task.
|
|
409
|
+
|
|
410
|
+
**Prerequisites**: You must be in the role of an *owner* or *developer* or *annotation manager* who created that task.
|
|
411
|
+
|
|
412
|
+
:param dtlpy.entities.task.Task task: the task object
|
|
413
|
+
:param bool system_metadata: DEPRECATED
|
|
414
|
+
:return: Task object
|
|
415
|
+
:rtype: dtlpy.entities.task.Task
|
|
416
|
+
|
|
417
|
+
**Example**:
|
|
418
|
+
|
|
419
|
+
.. code-block:: python
|
|
420
|
+
|
|
421
|
+
dataset.tasks.update(task='task_entity')
|
|
422
|
+
"""
|
|
423
|
+
url = URL_PATH
|
|
424
|
+
url = f"{url}/{task.id}"
|
|
425
|
+
|
|
426
|
+
if system_metadata:
|
|
427
|
+
warnings.warn(
|
|
428
|
+
"Task system metadata updates are not permitted. Please store custom metadata in 'task.metadata['user']' instead.",
|
|
429
|
+
DeprecationWarning,
|
|
430
|
+
)
|
|
431
|
+
|
|
432
|
+
success, response = self._client_api.gen_request(req_type="patch", path=url, json_req=task.to_json())
|
|
433
|
+
if success:
|
|
434
|
+
return entities.Task.from_json(
|
|
435
|
+
_json=response.json(), client_api=self._client_api, project=self._project, dataset=self._dataset
|
|
436
|
+
)
|
|
437
|
+
else:
|
|
438
|
+
raise exceptions.PlatformException(response)
|
|
439
|
+
|
|
440
|
+
def create_qa_task(self,
|
|
441
|
+
task: entities.Task,
|
|
442
|
+
assignee_ids,
|
|
443
|
+
due_date=None,
|
|
444
|
+
filters=None,
|
|
445
|
+
items=None,
|
|
446
|
+
query=None,
|
|
447
|
+
workload=None,
|
|
448
|
+
metadata=None,
|
|
449
|
+
available_actions=None,
|
|
450
|
+
wait=True,
|
|
451
|
+
batch_size=None,
|
|
452
|
+
max_batch_workload=None,
|
|
453
|
+
allowed_assignees=None,
|
|
454
|
+
priority=entities.TaskPriority.MEDIUM
|
|
455
|
+
) -> entities.Task:
|
|
456
|
+
"""
|
|
457
|
+
Create a new QA Task.
|
|
458
|
+
|
|
459
|
+
**Prerequisites**: You must be in the role of an *owner*, *developer*, or *annotation manager* who has been assigned to be *owner* of the annotation task.
|
|
460
|
+
|
|
461
|
+
:param dtlpy.entities.task.Task task: the parent annotation task object
|
|
462
|
+
:param list assignee_ids: list the QA task assignees (contributors) that should be working on the task. Provide a list of users' emails
|
|
463
|
+
:param float due_date: date by which the QA task should be finished; for example, due_date=datetime.datetime(day=1, month=1, year=2029).timestamp()
|
|
464
|
+
:param entities.Filters filters: dl.Filters entity to filter items for the task
|
|
465
|
+
:param List[entities.Item] items: list of items (item Id or objects) to insert to the task
|
|
466
|
+
:param dict DQL query: filter items for the task
|
|
467
|
+
:param List[WorkloadUnit] workload: list of WorkloadUnit objects. Customize distribution (percentage) between the task assignees. For example: [dl.WorkloadUnit(annotator@hi.com, 80), dl.WorkloadUnit(annotator2@hi.com, 20)]
|
|
468
|
+
:param dict metadata: metadata for the task
|
|
469
|
+
:param list available_actions: list of available actions (statuses) that will be available for the task items; The default statuses are: "approved" and "discard"
|
|
470
|
+
:param bool wait: wait until create task finish
|
|
471
|
+
:param int batch_size: Pulling batch size (items), use with pulling allocation method. Restrictions - Min 3, max 100
|
|
472
|
+
:param int max_batch_workload: Max items in assignment, use with pulling allocation method. Restrictions - Min batchSize + 2, max batchSize * 2
|
|
473
|
+
:param list allowed_assignees: list the task assignees (contributors) that should be working on the task. Provide a list of users' emails
|
|
474
|
+
:param entities.TaskPriority priority: priority of the task options in entities.TaskPriority
|
|
475
|
+
:return: task object
|
|
476
|
+
:rtype: dtlpy.entities.task.Task
|
|
477
|
+
|
|
478
|
+
**Example**:
|
|
479
|
+
|
|
480
|
+
.. code-block:: python
|
|
481
|
+
|
|
482
|
+
dataset.tasks.create_qa_task(task= 'task_entity',
|
|
483
|
+
due_date = datetime.datetime(day= 1, month= 1, year= 2029).timestamp(),
|
|
484
|
+
assignee_ids =[ 'annotator1@dataloop.ai', 'annotator2@dataloop.ai'])
|
|
485
|
+
"""
|
|
486
|
+
source_filter = entities.filters.SingleFilter(
|
|
487
|
+
field='metadata.system.refs',
|
|
488
|
+
values={
|
|
489
|
+
"id": task.id,
|
|
490
|
+
"type": "task",
|
|
491
|
+
"metadata":
|
|
492
|
+
{
|
|
493
|
+
"status":
|
|
494
|
+
{
|
|
495
|
+
"$exists": True
|
|
496
|
+
}
|
|
497
|
+
}
|
|
498
|
+
},
|
|
499
|
+
operator=entities.FiltersOperations.MATCH
|
|
500
|
+
)
|
|
501
|
+
|
|
502
|
+
if query is not None:
|
|
503
|
+
and_list = query.get('filter', query).get('$and', None)
|
|
504
|
+
if and_list is not None:
|
|
505
|
+
and_list.append(source_filter.prepare())
|
|
506
|
+
else:
|
|
507
|
+
if 'filter' not in query:
|
|
508
|
+
query['filter'] = {}
|
|
509
|
+
query['filter']['$and'] = [source_filter.prepare()]
|
|
510
|
+
|
|
511
|
+
else:
|
|
512
|
+
if filters is None and items is None:
|
|
513
|
+
filters = entities.Filters()
|
|
514
|
+
if filters:
|
|
515
|
+
filters.and_filter_list.append(source_filter)
|
|
516
|
+
|
|
517
|
+
return self.create(task_name='{}_qa'.format(task.name),
|
|
518
|
+
task_type='qa',
|
|
519
|
+
task_parent_id=task.id,
|
|
520
|
+
assignee_ids=assignee_ids,
|
|
521
|
+
workload=workload,
|
|
522
|
+
task_owner=task.creator,
|
|
523
|
+
project_id=task.project_id,
|
|
524
|
+
recipe_id=task.recipe_id,
|
|
525
|
+
due_date=due_date,
|
|
526
|
+
filters=filters,
|
|
527
|
+
items=items,
|
|
528
|
+
query=query,
|
|
529
|
+
metadata=metadata,
|
|
530
|
+
available_actions=available_actions,
|
|
531
|
+
wait=wait,
|
|
532
|
+
batch_size=batch_size,
|
|
533
|
+
max_batch_workload=max_batch_workload,
|
|
534
|
+
allowed_assignees=allowed_assignees,
|
|
535
|
+
priority=priority
|
|
536
|
+
)
|
|
537
|
+
|
|
538
|
+
def create_honeypot_task(
|
|
539
|
+
self,
|
|
540
|
+
name: str,
|
|
541
|
+
dataset: entities.Dataset = None,
|
|
542
|
+
due_date: float = None,
|
|
543
|
+
filters: entities.Filters = None,
|
|
544
|
+
owner: str = None,
|
|
545
|
+
recipe_id: str = None,
|
|
546
|
+
assignee_ids: List[str] = None,
|
|
547
|
+
workload=None,
|
|
548
|
+
available_actions=None,
|
|
549
|
+
priority=entities.TaskPriority.MEDIUM,
|
|
550
|
+
consensus_percentage=None,
|
|
551
|
+
consensus_assignees=None,
|
|
552
|
+
scoring=True,
|
|
553
|
+
limit=None,
|
|
554
|
+
wait=True,
|
|
555
|
+
enforce_video_conversion=True,
|
|
556
|
+
) -> entities.Task:
|
|
557
|
+
"""
|
|
558
|
+
Create a new Consensus Task.
|
|
559
|
+
|
|
560
|
+
**Prerequisites**: You must be in the role of an *owner*, *developer*, or *annotation manager* who has been assigned to be *owner* of the annotation task.
|
|
561
|
+
|
|
562
|
+
:param str name: the name of the task
|
|
563
|
+
:param entities.Dataset dataset: dataset object, the dataset that refer to the task
|
|
564
|
+
:param float due_date: date by which the task should be finished; for example, due_date=datetime.datetime(day=1, month=1, year=2029).timestamp()
|
|
565
|
+
:param entities.Filters filters: dl.Filters entity to filter items for the task
|
|
566
|
+
:param str owner: task owner. Provide user email
|
|
567
|
+
:param str recipe_id: recipe id for the task
|
|
568
|
+
:param list assignee_ids: list the task assignees (contributors) that should be working on the task. Provide a list of users' emails
|
|
569
|
+
:param workload: list of WorkloadUnit objects. Customize distribution (percentage) between the task assignees
|
|
570
|
+
:param list available_actions: list of available actions (statuses) that will be available for the task items
|
|
571
|
+
:param entities.TaskPriority priority: priority of the task options in entities.TaskPriority
|
|
572
|
+
:param str consensus_task_type: consensus task type - "consensus", "qualification", or "honeypot"
|
|
573
|
+
:param int consensus_percentage: percentage of items to be copied to multiple annotators (consensus items)
|
|
574
|
+
:param int consensus_assignees: the number of different annotators per item (number of copies per item)
|
|
575
|
+
:param bool scoring: create a scoring app in project
|
|
576
|
+
:param int limit: the limit items that the task can include
|
|
577
|
+
:param bool wait: wait until create task finish
|
|
578
|
+
:param bool enforce_video_conversion: Enforce WEBM conversion on video items for frame-accurate annotations
|
|
579
|
+
:return: Task object
|
|
580
|
+
:rtype: dtlpy.entities.task.Task
|
|
581
|
+
|
|
582
|
+
**Example**:
|
|
583
|
+
|
|
584
|
+
.. code-block:: python
|
|
585
|
+
|
|
586
|
+
# Create a consensus task
|
|
587
|
+
dataset.tasks.create_consensus_task(name='my_consensus_task',
|
|
588
|
+
assignee_ids=['annotator1@dataloop.ai', 'annotator2@dataloop.ai'],
|
|
589
|
+
consensus_percentage=66,
|
|
590
|
+
consensus_assignees=2)
|
|
591
|
+
"""
|
|
592
|
+
return self.create_consensus_task(
|
|
593
|
+
name=name,
|
|
594
|
+
dataset=dataset,
|
|
595
|
+
due_date=due_date,
|
|
596
|
+
filters=filters,
|
|
597
|
+
owner=owner,
|
|
598
|
+
recipe_id=recipe_id,
|
|
599
|
+
assignee_ids=assignee_ids,
|
|
600
|
+
workload=workload,
|
|
601
|
+
available_actions=available_actions,
|
|
602
|
+
priority=priority,
|
|
603
|
+
consensus_task_type=entities.ConsensusTaskType.HONEYPOT,
|
|
604
|
+
consensus_percentage=consensus_percentage,
|
|
605
|
+
consensus_assignees=consensus_assignees,
|
|
606
|
+
scoring=scoring,
|
|
607
|
+
limit=limit,
|
|
608
|
+
wait=wait,
|
|
609
|
+
enforce_video_conversion=enforce_video_conversion,
|
|
610
|
+
)
|
|
611
|
+
|
|
612
|
+
def create_qualification_task(
|
|
613
|
+
self,
|
|
614
|
+
name: str,
|
|
615
|
+
dataset: entities.Dataset = None,
|
|
616
|
+
due_date: float = None,
|
|
617
|
+
filters: entities.Filters = None,
|
|
618
|
+
owner: str = None,
|
|
619
|
+
recipe_id: str = None,
|
|
620
|
+
assignee_ids: List[str] = None,
|
|
621
|
+
workload=None,
|
|
622
|
+
available_actions=None,
|
|
623
|
+
priority=entities.TaskPriority.MEDIUM,
|
|
624
|
+
consensus_percentage=None,
|
|
625
|
+
consensus_assignees=None,
|
|
626
|
+
limit=None,
|
|
627
|
+
wait=True,
|
|
628
|
+
enforce_video_conversion=True,
|
|
629
|
+
) -> entities.Task:
|
|
630
|
+
"""
|
|
631
|
+
Create a new Qualification Task.
|
|
632
|
+
"""
|
|
633
|
+
return self.create_consensus_task(
|
|
634
|
+
name=name,
|
|
635
|
+
dataset=dataset,
|
|
636
|
+
due_date=due_date,
|
|
637
|
+
filters=filters,
|
|
638
|
+
owner=owner,
|
|
639
|
+
recipe_id=recipe_id,
|
|
640
|
+
assignee_ids=assignee_ids,
|
|
641
|
+
workload=workload,
|
|
642
|
+
available_actions=available_actions,
|
|
643
|
+
priority=priority,
|
|
644
|
+
consensus_task_type=entities.ConsensusTaskType.QUALIFICATION,
|
|
645
|
+
consensus_percentage=consensus_percentage,
|
|
646
|
+
consensus_assignees=consensus_assignees,
|
|
647
|
+
scoring=True,
|
|
648
|
+
limit=limit,
|
|
649
|
+
wait=wait,
|
|
650
|
+
enforce_video_conversion=enforce_video_conversion,
|
|
651
|
+
)
|
|
652
|
+
|
|
653
|
+
def create_consensus_task(
|
|
654
|
+
self,
|
|
655
|
+
name: str,
|
|
656
|
+
dataset: entities.Dataset = None,
|
|
657
|
+
due_date: float = None,
|
|
658
|
+
filters: entities.Filters = None,
|
|
659
|
+
owner: str = None,
|
|
660
|
+
recipe_id: str = None,
|
|
661
|
+
assignee_ids: List[str] = None,
|
|
662
|
+
workload=None,
|
|
663
|
+
available_actions=None,
|
|
664
|
+
priority=entities.TaskPriority.MEDIUM,
|
|
665
|
+
metadata=None,
|
|
666
|
+
consensus_task_type: entities.ConsensusTaskType = entities.ConsensusTaskType.CONSENSUS,
|
|
667
|
+
consensus_percentage=None,
|
|
668
|
+
consensus_assignees=None,
|
|
669
|
+
scoring=True,
|
|
670
|
+
limit=None,
|
|
671
|
+
wait=True,
|
|
672
|
+
enforce_video_conversion=True,
|
|
673
|
+
) -> entities.Task:
|
|
674
|
+
"""
|
|
675
|
+
Create a new Consensus Task.
|
|
676
|
+
|
|
677
|
+
**Prerequisites**: You must be in the role of an *owner*, *developer*, or *annotation manager* who has been assigned to be *owner* of the annotation task.
|
|
678
|
+
|
|
679
|
+
:param str name: the name of the task
|
|
680
|
+
:param entities.Dataset dataset: dataset object, the dataset that refer to the task
|
|
681
|
+
:param float due_date: date by which the task should be finished; for example, due_date=datetime.datetime(day=1, month=1, year=2029).timestamp()
|
|
682
|
+
:param entities.Filters filters: dl.Filters entity to filter items for the task
|
|
683
|
+
:param str owner: task owner. Provide user email
|
|
684
|
+
:param str recipe_id: recipe id for the task
|
|
685
|
+
:param list assignee_ids: list the task assignees (contributors) that should be working on the task. Provide a list of users' emails
|
|
686
|
+
:param workload: list of WorkloadUnit objects. Customize distribution (percentage) between the task assignees
|
|
687
|
+
:param list available_actions: list of available actions (statuses) that will be available for the task items
|
|
688
|
+
:param entities.TaskPriority priority: priority of the task options in entities.TaskPriority
|
|
689
|
+
:param dict metadata: metadata for the task
|
|
690
|
+
:param str consensus_task_type: consensus task type - "consensus", "qualification", or "honeypot"
|
|
691
|
+
:param int consensus_percentage: percentage of items to be copied to multiple annotators (consensus items)
|
|
692
|
+
:param int consensus_assignees: the number of different annotators per item (number of copies per item)
|
|
693
|
+
:param bool scoring: create a scoring app in project
|
|
694
|
+
:param int limit: the limit items that the task can include
|
|
695
|
+
:param bool wait: wait until create task finish
|
|
696
|
+
:param bool enforce_video_conversion: Enforce WEBM conversion on video items for frame-accurate annotations
|
|
697
|
+
:return: Task object
|
|
698
|
+
:rtype: dtlpy.entities.task.Task
|
|
699
|
+
|
|
700
|
+
**Example**:
|
|
701
|
+
|
|
702
|
+
.. code-block:: python
|
|
703
|
+
|
|
704
|
+
# Create a consensus task
|
|
705
|
+
dataset.tasks.create_consensus_task(name='my_consensus_task',
|
|
706
|
+
assignee_ids=['annotator1@dataloop.ai', 'annotator2@dataloop.ai'],
|
|
707
|
+
consensus_percentage=66,
|
|
708
|
+
consensus_assignees=2)
|
|
709
|
+
"""
|
|
710
|
+
|
|
711
|
+
if dataset is None:
|
|
712
|
+
dataset = self.dataset
|
|
713
|
+
|
|
714
|
+
if due_date is None:
|
|
715
|
+
due_date = (datetime.datetime.now() + datetime.timedelta(days=7)).timestamp()
|
|
716
|
+
|
|
717
|
+
if filters is None:
|
|
718
|
+
filters = entities.Filters()
|
|
719
|
+
|
|
720
|
+
if owner is None:
|
|
721
|
+
owner = self._client_api.info()["user_email"]
|
|
722
|
+
|
|
723
|
+
if recipe_id is None:
|
|
724
|
+
recipe_id = dataset.get_recipe_ids()[0]
|
|
725
|
+
|
|
726
|
+
if workload is None and assignee_ids is not None:
|
|
727
|
+
workload = entities.Workload.generate(assignee_ids=assignee_ids)
|
|
728
|
+
|
|
729
|
+
# Handle metadata for consensus tasks
|
|
730
|
+
if metadata is None:
|
|
731
|
+
metadata = {}
|
|
732
|
+
if "system" not in metadata:
|
|
733
|
+
metadata["system"] = {}
|
|
734
|
+
if assignee_ids is not None:
|
|
735
|
+
metadata["system"]["allowedAssignees"] = assignee_ids
|
|
736
|
+
if consensus_task_type is not None:
|
|
737
|
+
metadata["system"]["consensusTaskType"] = consensus_task_type
|
|
738
|
+
metadata = self._add_task_metadata_params(
|
|
739
|
+
metadata=metadata, input_value=consensus_percentage, input_name="consensusPercentage"
|
|
740
|
+
)
|
|
741
|
+
metadata = self._add_task_metadata_params(
|
|
742
|
+
metadata=metadata, input_value=consensus_assignees, input_name="consensusAssignees"
|
|
743
|
+
)
|
|
744
|
+
metadata = self._add_task_metadata_params(metadata=metadata, input_value=scoring, input_name="scoring")
|
|
745
|
+
|
|
746
|
+
# Create payload for consensus task
|
|
747
|
+
payload = {
|
|
748
|
+
"name": name,
|
|
749
|
+
"query": "{}".format(json.dumps(filters.prepare()).replace("'", '"')),
|
|
750
|
+
"taskOwner": owner,
|
|
751
|
+
"spec": {"type": "annotation"},
|
|
752
|
+
"datasetId": dataset.id,
|
|
753
|
+
"projectId": self.project.id,
|
|
754
|
+
"assignmentIds": [],
|
|
755
|
+
"recipeId": recipe_id,
|
|
756
|
+
"dueDate": due_date * 1000,
|
|
757
|
+
"asynced": wait,
|
|
758
|
+
"priority": priority,
|
|
759
|
+
"percentage": True,
|
|
760
|
+
}
|
|
761
|
+
|
|
762
|
+
# Add workload if provided
|
|
763
|
+
if workload:
|
|
764
|
+
payload["workload"] = workload.to_json()
|
|
765
|
+
|
|
766
|
+
# Add limit if provided
|
|
767
|
+
if limit:
|
|
768
|
+
payload["limit"] = limit
|
|
769
|
+
|
|
770
|
+
# Add available actions if provided
|
|
771
|
+
if available_actions is not None:
|
|
772
|
+
payload["availableActions"] = [action.to_json() for action in available_actions]
|
|
773
|
+
|
|
774
|
+
# Handle video conversion
|
|
775
|
+
if not enforce_video_conversion:
|
|
776
|
+
payload["disableWebm"] = not enforce_video_conversion
|
|
777
|
+
|
|
778
|
+
# Handle metadata for consensus tasks
|
|
779
|
+
if metadata is not None:
|
|
780
|
+
payload["metadata"] = metadata
|
|
781
|
+
|
|
782
|
+
return self._create_task(payload, wait=wait)
|
|
783
|
+
|
|
784
|
+
def _add_task_metadata_params(self, metadata, input_value, input_name):
|
|
785
|
+
if input_value is not None and not isinstance(input_value, int):
|
|
786
|
+
raise exceptions.PlatformException(error="400", message=f"{input_name} must be a numbers")
|
|
787
|
+
if input_value is not None:
|
|
788
|
+
metadata["system"][input_name] = input_value
|
|
789
|
+
return metadata
|
|
790
|
+
|
|
791
|
+
def create_labeling_task(
|
|
792
|
+
self,
|
|
793
|
+
name: str,
|
|
794
|
+
dataset: entities.Dataset = None,
|
|
795
|
+
due_date: float = None,
|
|
796
|
+
filters: entities.Filters = None,
|
|
797
|
+
owner: str = None,
|
|
798
|
+
recipe_id: str = None,
|
|
799
|
+
assignee_ids: List[str] = None,
|
|
800
|
+
workload=None,
|
|
801
|
+
available_actions=None,
|
|
802
|
+
priority=entities.TaskPriority.MEDIUM,
|
|
803
|
+
metadata=None,
|
|
804
|
+
batch_size=None,
|
|
805
|
+
max_batch_workload=None,
|
|
806
|
+
allowed_assignees=None,
|
|
807
|
+
limit=None,
|
|
808
|
+
wait=True,
|
|
809
|
+
enforce_video_conversion=True,
|
|
810
|
+
) -> entities.Task:
|
|
811
|
+
"""
|
|
812
|
+
Create a new Annotation Task (Distribution or Pulling).
|
|
813
|
+
|
|
814
|
+
**Prerequisites**: You must be in the role of an *owner*, *developer*, or *annotation manager* who has been assigned to be *owner* of the annotation task.
|
|
815
|
+
|
|
816
|
+
:param str name: the name of the task
|
|
817
|
+
:param entities.Dataset dataset: dataset object, the dataset that refer to the task
|
|
818
|
+
:param float due_date: date by which the task should be finished; for example, due_date=datetime.datetime(day=1, month=1, year=2029).timestamp()
|
|
819
|
+
:param entities.Filters filters: dl.Filters entity to filter items for the task
|
|
820
|
+
:param str owner: task owner. Provide user email
|
|
821
|
+
:param str recipe_id: recipe id for the task
|
|
822
|
+
:param list assignee_ids: list the task assignees (contributors) that should be working on the task. Provide a list of users' emails
|
|
823
|
+
:param workload: list of WorkloadUnit objects. Customize distribution (percentage) between the task assignees
|
|
824
|
+
:param list available_actions: list of available actions (statuses) that will be available for the task items
|
|
825
|
+
:param entities.TaskPriority priority: priority of the task options in entities.TaskPriority
|
|
826
|
+
:param dict metadata: metadata for the task
|
|
827
|
+
:param int batch_size: Pulling batch size (items), use with pulling allocation method. Restrictions - Min 3, max 100
|
|
828
|
+
:param int max_batch_workload: Max items in assignment, use with pulling allocation method. Restrictions - Min batchSize + 2, max batchSize * 2
|
|
829
|
+
:param list allowed_assignees: list the task assignees (contributors) that should be working on the task. Provide a list of users' emails
|
|
830
|
+
:param int limit: the limit items that the task can include
|
|
831
|
+
:param bool wait: wait until create task finish
|
|
832
|
+
:param bool enforce_video_conversion: Enforce WEBM conversion on video items for frame-accurate annotations
|
|
833
|
+
:return: Task object
|
|
834
|
+
:rtype: dtlpy.entities.task.Task
|
|
835
|
+
|
|
836
|
+
**Example**:
|
|
837
|
+
|
|
838
|
+
.. code-block:: python
|
|
839
|
+
|
|
840
|
+
# Create a distribution task
|
|
841
|
+
dataset.tasks.create_labeling(name='my_distribution_task',
|
|
842
|
+
assignee_ids=['annotator1@dataloop.ai', 'annotator2@dataloop.ai'])
|
|
843
|
+
|
|
844
|
+
# Create a pulling task
|
|
845
|
+
dataset.tasks.create_labeling(name='my_pulling_task',
|
|
846
|
+
assignee_ids=['annotator1@dataloop.ai', 'annotator2@dataloop.ai'],
|
|
847
|
+
batch_size=5,
|
|
848
|
+
max_batch_workload=7)
|
|
849
|
+
"""
|
|
850
|
+
|
|
851
|
+
if dataset is None:
|
|
852
|
+
dataset = self.dataset
|
|
853
|
+
|
|
854
|
+
if due_date is None:
|
|
855
|
+
due_date = (datetime.datetime.now() + datetime.timedelta(days=7)).timestamp()
|
|
856
|
+
|
|
857
|
+
if filters is None:
|
|
858
|
+
filters = entities.Filters()
|
|
859
|
+
|
|
860
|
+
if owner is None:
|
|
861
|
+
owner = self._client_api.info()["user_email"]
|
|
862
|
+
|
|
863
|
+
if recipe_id is None:
|
|
864
|
+
recipe_id = dataset.get_recipe_ids()[0]
|
|
865
|
+
|
|
866
|
+
if workload is None and assignee_ids is not None:
|
|
867
|
+
workload = entities.Workload.generate(assignee_ids=assignee_ids)
|
|
868
|
+
|
|
869
|
+
if metadata is None:
|
|
870
|
+
metadata = {}
|
|
871
|
+
if any([batch_size, max_batch_workload]):
|
|
872
|
+
if "system" not in metadata:
|
|
873
|
+
metadata["system"] = {}
|
|
874
|
+
if allowed_assignees is not None or assignee_ids is not None:
|
|
875
|
+
metadata["system"]["allowedAssignees"] = allowed_assignees if allowed_assignees else assignee_ids
|
|
876
|
+
metadata = self._add_task_metadata_params(metadata=metadata, input_value=batch_size, input_name="batchSize")
|
|
877
|
+
metadata = self._add_task_metadata_params(
|
|
878
|
+
metadata=metadata, input_value=max_batch_workload, input_name="maxBatchWorkload"
|
|
879
|
+
)
|
|
880
|
+
|
|
881
|
+
# Create payload for annotation task
|
|
882
|
+
payload = {
|
|
883
|
+
"name": name,
|
|
884
|
+
"query": "{}".format(json.dumps(filters.prepare()).replace("'", '"')),
|
|
885
|
+
"taskOwner": owner,
|
|
886
|
+
"spec": {"type": "annotation"},
|
|
887
|
+
"datasetId": dataset.id,
|
|
888
|
+
"projectId": self.project.id,
|
|
889
|
+
"assignmentIds": [],
|
|
890
|
+
"recipeId": recipe_id,
|
|
891
|
+
"dueDate": due_date * 1000,
|
|
892
|
+
"asynced": wait,
|
|
893
|
+
"priority": priority,
|
|
894
|
+
}
|
|
895
|
+
|
|
896
|
+
# Add workload if provided
|
|
897
|
+
if workload:
|
|
898
|
+
payload["workload"] = workload.to_json()
|
|
899
|
+
|
|
900
|
+
# Add limit if provided
|
|
901
|
+
if limit:
|
|
902
|
+
payload["limit"] = limit
|
|
903
|
+
|
|
904
|
+
# Add available actions if provided
|
|
905
|
+
if available_actions is not None:
|
|
906
|
+
payload["availableActions"] = [action.to_json() for action in available_actions]
|
|
907
|
+
|
|
908
|
+
# Handle video conversion
|
|
909
|
+
if not enforce_video_conversion:
|
|
910
|
+
payload["disableWebm"] = not enforce_video_conversion
|
|
911
|
+
|
|
912
|
+
# Handle metadata for pulling tasks
|
|
913
|
+
if metadata is not None:
|
|
914
|
+
payload["metadata"] = metadata
|
|
915
|
+
|
|
916
|
+
return self._create_task(payload, wait=wait)
|
|
917
|
+
|
|
918
|
+
def _create_task(self, payload: dict, wait: bool = True) -> entities.Task:
|
|
919
|
+
"""
|
|
920
|
+
Private function to create a task from a prepared payload.
|
|
921
|
+
|
|
922
|
+
:param dict payload: the prepared payload for task creation
|
|
923
|
+
:param bool wait: whether to wait for task creation to complete
|
|
924
|
+
:return: created Task object
|
|
925
|
+
:rtype: dtlpy.entities.task.Task
|
|
926
|
+
"""
|
|
927
|
+
success, response = self._client_api.gen_request(req_type="post", path=URL_PATH, json_req=payload)
|
|
928
|
+
if success:
|
|
929
|
+
response_json = response.json()
|
|
930
|
+
if payload.get("checkIfExist") is not None and "name" in response_json:
|
|
931
|
+
return entities.Task.from_json(
|
|
932
|
+
_json=response.json(), client_api=self._client_api, project=self._project, dataset=self._dataset
|
|
933
|
+
)
|
|
934
|
+
|
|
935
|
+
command = entities.Command.from_json(_json=response_json, client_api=self._client_api)
|
|
936
|
+
if not wait:
|
|
937
|
+
return command
|
|
938
|
+
command = command.wait(timeout=0)
|
|
939
|
+
if "createTaskPayload" not in command.spec:
|
|
940
|
+
raise exceptions.PlatformException(
|
|
941
|
+
error="400", message="createTaskPayload key is missing in command response: {}".format(response)
|
|
942
|
+
)
|
|
943
|
+
task = self.get(task_id=command.spec["createdTaskId"])
|
|
944
|
+
else:
|
|
945
|
+
raise exceptions.PlatformException(response)
|
|
946
|
+
|
|
947
|
+
assert isinstance(task, entities.Task)
|
|
948
|
+
return task
|
|
949
|
+
|
|
950
|
+
@_api_reference.add(path="/annotationtasks", method="post")
|
|
951
|
+
def create(
|
|
952
|
+
self,
|
|
953
|
+
task_name,
|
|
954
|
+
due_date=None,
|
|
955
|
+
assignee_ids=None,
|
|
956
|
+
workload=None,
|
|
957
|
+
dataset=None,
|
|
958
|
+
task_owner=None,
|
|
959
|
+
task_type="annotation",
|
|
960
|
+
task_parent_id=None,
|
|
961
|
+
project_id=None,
|
|
962
|
+
recipe_id=None,
|
|
963
|
+
assignments_ids=None,
|
|
964
|
+
metadata=None,
|
|
965
|
+
filters=None,
|
|
966
|
+
items=None,
|
|
967
|
+
query=None,
|
|
968
|
+
available_actions=None,
|
|
969
|
+
wait=True,
|
|
970
|
+
check_if_exist: entities.Filters = False,
|
|
971
|
+
limit=None,
|
|
972
|
+
batch_size=None,
|
|
973
|
+
max_batch_workload=None,
|
|
974
|
+
allowed_assignees=None,
|
|
975
|
+
priority=entities.TaskPriority.MEDIUM,
|
|
976
|
+
consensus_task_type=None,
|
|
977
|
+
consensus_percentage=None,
|
|
978
|
+
consensus_assignees=None,
|
|
979
|
+
scoring=True,
|
|
980
|
+
enforce_video_conversion=True,
|
|
981
|
+
) -> entities.Task:
|
|
982
|
+
"""
|
|
983
|
+
Create a new Task (Annotation or QA).
|
|
984
|
+
|
|
985
|
+
**Prerequisites**: You must be in the role of an *owner*, *developer*, or *annotation manager* who has been assigned to be *owner* of the annotation task.
|
|
986
|
+
|
|
987
|
+
:param str task_name: the name of the task
|
|
988
|
+
:param float due_date: date by which the task should be finished; for example, due_date=datetime.datetime(day=1, month=1, year=2029).timestamp()
|
|
989
|
+
:param list assignee_ids: list the task assignees (contributors) that should be working on the task. Provide a list of users' emails
|
|
990
|
+
:param List[WorkloadUnit] List[WorkloadUnit] workload: list of WorkloadUnit objects. Customize distribution (percentage) between the task assignees. For example: [dl.WorkloadUnit(annotator@hi.com, 80), dl.WorkloadUnit(annotator2@hi.com, 20)]
|
|
991
|
+
:param entities.Dataset dataset: dataset object, the dataset that refer to the task
|
|
992
|
+
:param str task_owner: task owner. Provide user email
|
|
993
|
+
:param str task_type: task type "annotation" or "qa"
|
|
994
|
+
:param str task_parent_id: optional if type is qa - parent annotation task id
|
|
995
|
+
:param str project_id: the Id of the project where task will be created
|
|
996
|
+
:param str recipe_id: recipe id for the task
|
|
997
|
+
:param list assignments_ids: assignments ids to the task
|
|
998
|
+
:param dict metadata: metadata for the task
|
|
999
|
+
:param entities.Filters filters: dl.Filters entity to filter items for the task
|
|
1000
|
+
:param List[entities.Item] items: list of items (item Id or objects) to insert to the task
|
|
1001
|
+
:param dict DQL query: filter items for the task
|
|
1002
|
+
:param list available_actions: list of available actions (statuses) that will be available for the task items; The default statuses are: "completed" and "discard"
|
|
1003
|
+
:param bool wait: wait until create task finish
|
|
1004
|
+
:param entities.Filters check_if_exist: dl.Filters check if task exist according to filter
|
|
1005
|
+
:param int limit: the limit items that the task can include
|
|
1006
|
+
:param int batch_size: Pulling batch size (items), use with pulling allocation method. Restrictions - Min 3, max 100
|
|
1007
|
+
:param int max_batch_workload: max_batch_workload: Max items in assignment, use with pulling allocation method. Restrictions - Min batchSize + 2, max batchSize * 2
|
|
1008
|
+
:param list allowed_assignees: list the task assignees (contributors) that should be working on the task. Provide a list of users' emails
|
|
1009
|
+
:param entities.TaskPriority priority: priority of the task options in entities.TaskPriority
|
|
1010
|
+
:param entities.ConsensusTaskType consensus_task_type: consensus_task_type of the task options in entities.ConsensusTaskType
|
|
1011
|
+
:param int consensus_percentage: percentage of items to be copied to multiple annotators (consensus items)
|
|
1012
|
+
:param int consensus_assignees: the number of different annotators per item (number of copies per item)
|
|
1013
|
+
:param bool scoring: create a scoring app in project
|
|
1014
|
+
:param bool enforce_video_conversion: Enforce WEBM conversion on video items for frame-accurate annotations. WEBM Conversion will be executed as a project service and incurs compute costs. Service compute resources can be set according to planned workload.
|
|
1015
|
+
:return: Task object
|
|
1016
|
+
:rtype: dtlpy.entities.task.Task
|
|
1017
|
+
|
|
1018
|
+
**Example**:
|
|
1019
|
+
|
|
1020
|
+
.. code-block:: python
|
|
1021
|
+
|
|
1022
|
+
dataset.tasks.create(task= 'task_entity',
|
|
1023
|
+
due_date = datetime.datetime(day= 1, month= 1, year= 2029).timestamp(),
|
|
1024
|
+
assignee_ids =[ 'annotator1@dataloop.ai', 'annotator2@dataloop.ai'],
|
|
1025
|
+
available_actions=[dl.ItemAction("discard"), dl.ItemAction("to-check")])
|
|
1026
|
+
"""
|
|
1027
|
+
|
|
1028
|
+
if dataset is None and self._dataset is None:
|
|
1029
|
+
raise exceptions.PlatformException("400", "Please provide param dataset")
|
|
1030
|
+
if due_date is None:
|
|
1031
|
+
due_date = (datetime.datetime.now() + datetime.timedelta(days=7)).timestamp()
|
|
1032
|
+
if query is None:
|
|
1033
|
+
if filters is None and items is None:
|
|
1034
|
+
query = entities.Filters().prepare()
|
|
1035
|
+
elif filters is None:
|
|
1036
|
+
item_list = list()
|
|
1037
|
+
if isinstance(items, entities.PagedEntities):
|
|
1038
|
+
for page in items:
|
|
1039
|
+
for item in page:
|
|
1040
|
+
item_list.append(item)
|
|
1041
|
+
elif isinstance(items, list):
|
|
1042
|
+
item_list = items
|
|
1043
|
+
elif isinstance(items, entities.Item):
|
|
1044
|
+
item_list.append(items)
|
|
1045
|
+
else:
|
|
1046
|
+
raise exceptions.PlatformException("400", "Unknown items type")
|
|
1047
|
+
query = entities.Filters(
|
|
1048
|
+
field="id",
|
|
1049
|
+
values=[item.id for item in item_list],
|
|
1050
|
+
operator=entities.FiltersOperations.IN,
|
|
1051
|
+
use_defaults=False,
|
|
1052
|
+
).prepare()
|
|
1053
|
+
else:
|
|
1054
|
+
query = filters.prepare()
|
|
1055
|
+
|
|
1056
|
+
if dataset is None:
|
|
1057
|
+
dataset = self._dataset
|
|
1058
|
+
|
|
1059
|
+
if task_owner is None:
|
|
1060
|
+
task_owner = self._client_api.info()["user_email"]
|
|
1061
|
+
|
|
1062
|
+
if task_type not in ["annotation", "qa"]:
|
|
1063
|
+
raise ValueError('task_type must be one of: "annotation", "qa". got: {}'.format(task_type))
|
|
1064
|
+
|
|
1065
|
+
if recipe_id is None:
|
|
1066
|
+
recipe_id = dataset.get_recipe_ids()[0]
|
|
1067
|
+
|
|
1068
|
+
if project_id is None:
|
|
1069
|
+
if self._project_id is not None:
|
|
1070
|
+
project_id = self._project_id
|
|
1071
|
+
else:
|
|
1072
|
+
project_id = self.project.id
|
|
1073
|
+
|
|
1074
|
+
if workload is None and assignee_ids is not None:
|
|
1075
|
+
workload = entities.Workload.generate(assignee_ids=assignee_ids)
|
|
1076
|
+
|
|
1077
|
+
if assignments_ids is None:
|
|
1078
|
+
assignments_ids = list()
|
|
1079
|
+
|
|
1080
|
+
payload = {
|
|
1081
|
+
"name": task_name,
|
|
1082
|
+
"query": "{}".format(json.dumps(query).replace("'", '"')),
|
|
1083
|
+
"taskOwner": task_owner,
|
|
1084
|
+
"spec": {"type": task_type},
|
|
1085
|
+
"datasetId": dataset.id,
|
|
1086
|
+
"projectId": project_id,
|
|
1087
|
+
"assignmentIds": assignments_ids,
|
|
1088
|
+
"recipeId": recipe_id,
|
|
1089
|
+
"dueDate": due_date * 1000,
|
|
1090
|
+
"asynced": wait,
|
|
1091
|
+
"priority": priority,
|
|
1092
|
+
}
|
|
1093
|
+
|
|
1094
|
+
if check_if_exist:
|
|
1095
|
+
if check_if_exist.resource != entities.FiltersResource.TASK:
|
|
1096
|
+
raise exceptions.PlatformException(
|
|
1097
|
+
"407",
|
|
1098
|
+
"Filter resource for check_if_exist param must be {}, got {}".format(
|
|
1099
|
+
entities.FiltersResource.TASK, check_if_exist.resource
|
|
1100
|
+
),
|
|
1101
|
+
)
|
|
1102
|
+
payload["checkIfExist"] = {"query": check_if_exist.prepare()}
|
|
1103
|
+
|
|
1104
|
+
if workload:
|
|
1105
|
+
payload["workload"] = workload.to_json()
|
|
1106
|
+
|
|
1107
|
+
if limit:
|
|
1108
|
+
payload["limit"] = limit
|
|
1109
|
+
|
|
1110
|
+
if available_actions is not None:
|
|
1111
|
+
payload["availableActions"] = [action.to_json() for action in available_actions]
|
|
1112
|
+
|
|
1113
|
+
if task_parent_id is not None:
|
|
1114
|
+
payload["spec"]["parentTaskId"] = task_parent_id
|
|
1115
|
+
|
|
1116
|
+
if not enforce_video_conversion:
|
|
1117
|
+
payload["disableWebm"] = not enforce_video_conversion
|
|
1118
|
+
|
|
1119
|
+
is_pulling = any([batch_size, max_batch_workload])
|
|
1120
|
+
is_consensus = any([consensus_percentage, consensus_assignees, consensus_task_type])
|
|
1121
|
+
if is_pulling and is_consensus:
|
|
1122
|
+
raise exceptions.PlatformException(error="400", message="Consensus can not work as a pulling task")
|
|
1123
|
+
if any([is_pulling, is_consensus]):
|
|
1124
|
+
if metadata is None:
|
|
1125
|
+
metadata = {}
|
|
1126
|
+
if "system" not in metadata:
|
|
1127
|
+
metadata["system"] = {}
|
|
1128
|
+
if allowed_assignees is not None or assignee_ids is not None:
|
|
1129
|
+
metadata["system"]["allowedAssignees"] = allowed_assignees if allowed_assignees else assignee_ids
|
|
1130
|
+
if consensus_task_type is not None:
|
|
1131
|
+
metadata["system"]["consensusTaskType"] = consensus_task_type
|
|
1132
|
+
metadata = self._add_task_metadata_params(metadata=metadata, input_value=batch_size, input_name="batchSize")
|
|
1133
|
+
metadata = self._add_task_metadata_params(
|
|
1134
|
+
metadata=metadata, input_value=max_batch_workload, input_name="maxBatchWorkload"
|
|
1135
|
+
)
|
|
1136
|
+
metadata = self._add_task_metadata_params(
|
|
1137
|
+
metadata=metadata, input_value=consensus_percentage, input_name="consensusPercentage"
|
|
1138
|
+
)
|
|
1139
|
+
metadata = self._add_task_metadata_params(
|
|
1140
|
+
metadata=metadata, input_value=consensus_assignees, input_name="consensusAssignees"
|
|
1141
|
+
)
|
|
1142
|
+
metadata = self._add_task_metadata_params(metadata=metadata, input_value=scoring, input_name="scoring")
|
|
1143
|
+
|
|
1144
|
+
if metadata is not None:
|
|
1145
|
+
payload["metadata"] = metadata
|
|
1146
|
+
|
|
1147
|
+
return self._create_task(payload, wait=wait)
|
|
1148
|
+
|
|
1149
|
+
def __item_operations(self, dataset: entities.Dataset, op, task=None, task_id=None, filters=None, items=None):
|
|
1150
|
+
|
|
1151
|
+
if task is None and task_id is None:
|
|
1152
|
+
raise exceptions.PlatformException("400", "Must provide either task or task id")
|
|
1153
|
+
elif task_id is None:
|
|
1154
|
+
task_id = task.id
|
|
1155
|
+
|
|
1156
|
+
try:
|
|
1157
|
+
if filters is None and items is None:
|
|
1158
|
+
raise exceptions.PlatformException("400", "Must provide either filters or items list")
|
|
1159
|
+
|
|
1160
|
+
if filters is None:
|
|
1161
|
+
filters = entities.Filters(
|
|
1162
|
+
field="id",
|
|
1163
|
+
values=[item.id for item in items],
|
|
1164
|
+
operator=entities.FiltersOperations.IN,
|
|
1165
|
+
use_defaults=False,
|
|
1166
|
+
)
|
|
1167
|
+
|
|
1168
|
+
if op == "delete":
|
|
1169
|
+
if task is None:
|
|
1170
|
+
task = self.get(task_id=task_id)
|
|
1171
|
+
assignment_ids = task.assignmentIds
|
|
1172
|
+
filters._ref_assignment = True
|
|
1173
|
+
filters._ref_assignment_id = assignment_ids
|
|
1174
|
+
|
|
1175
|
+
filters._ref_task = True
|
|
1176
|
+
filters._ref_task_id = task_id
|
|
1177
|
+
filters._ref_op = op
|
|
1178
|
+
return dataset.items.update(filters=filters)
|
|
1179
|
+
finally:
|
|
1180
|
+
if filters is not None:
|
|
1181
|
+
filters._nullify_refs()
|
|
1182
|
+
|
|
1183
|
+
@_api_reference.add(path="/annotationtasks/{id}/addToTask", method="post")
|
|
1184
|
+
def add_items(
|
|
1185
|
+
self,
|
|
1186
|
+
task: entities.Task = None,
|
|
1187
|
+
task_id=None,
|
|
1188
|
+
filters: entities.Filters = None,
|
|
1189
|
+
items=None,
|
|
1190
|
+
assignee_ids=None,
|
|
1191
|
+
query=None,
|
|
1192
|
+
workload=None,
|
|
1193
|
+
limit=None,
|
|
1194
|
+
wait=True,
|
|
1195
|
+
) -> entities.Task:
|
|
1196
|
+
"""
|
|
1197
|
+
Add items to a Task.
|
|
1198
|
+
|
|
1199
|
+
**Prerequisites**: You must be in the role of an *owner*, *developer*, or *annotation manager* who has been assigned to be *owner* of the annotation task.
|
|
1200
|
+
|
|
1201
|
+
:param dtlpy.entities.task.Task task: task object
|
|
1202
|
+
:param str task_id: the Id of the task
|
|
1203
|
+
:param dtlpy.entities.filters.Filters filters: Filters entity or a dictionary containing filters parameters
|
|
1204
|
+
:param list items: list of items (item Ids or objects) to add to the task
|
|
1205
|
+
:param list assignee_ids: list to assignee who works in the task
|
|
1206
|
+
:param dict query: query to filter the items for the task
|
|
1207
|
+
:param list workload: list of WorkloadUnit objects. Customize distribution (percentage) between the task assignees. For example: [dl.WorkloadUnit(annotator@hi.com, 80), dl.WorkloadUnit(annotator2@hi.com, 20)]
|
|
1208
|
+
:param int limit: the limit items that task can include
|
|
1209
|
+
:param bool wait: wait until add items will to finish
|
|
1210
|
+
:return: task entity
|
|
1211
|
+
:rtype: dtlpy.entities.task.Task
|
|
1212
|
+
|
|
1213
|
+
**Example**:
|
|
1214
|
+
|
|
1215
|
+
.. code-block:: python
|
|
1216
|
+
|
|
1217
|
+
dataset.tasks.add_items(task= 'task_entity',
|
|
1218
|
+
items = [items])
|
|
1219
|
+
"""
|
|
1220
|
+
if filters is None and items is None and query is None:
|
|
1221
|
+
raise exceptions.PlatformException("400", "Must provide either filters, query or items list")
|
|
1222
|
+
|
|
1223
|
+
if task is None and task_id is None:
|
|
1224
|
+
raise exceptions.PlatformException("400", "Must provide either task or task_id")
|
|
1225
|
+
|
|
1226
|
+
if query is None:
|
|
1227
|
+
if filters is None:
|
|
1228
|
+
if not isinstance(items, list):
|
|
1229
|
+
items = [items]
|
|
1230
|
+
filters = entities.Filters(
|
|
1231
|
+
field="id",
|
|
1232
|
+
values=[item.id for item in items],
|
|
1233
|
+
operator=entities.FiltersOperations.IN,
|
|
1234
|
+
use_defaults=False,
|
|
1235
|
+
)
|
|
1236
|
+
query = filters.prepare()
|
|
1237
|
+
|
|
1238
|
+
if workload is None and assignee_ids is not None:
|
|
1239
|
+
workload = entities.Workload.generate(assignee_ids=assignee_ids)
|
|
1240
|
+
|
|
1241
|
+
if task_id is None:
|
|
1242
|
+
task_id = task.id
|
|
1243
|
+
|
|
1244
|
+
payload = {"query": "{}".format(json.dumps(query).replace("'", '"'))}
|
|
1245
|
+
|
|
1246
|
+
if workload is not None:
|
|
1247
|
+
payload["workload"] = workload.to_json()
|
|
1248
|
+
|
|
1249
|
+
if limit is not None:
|
|
1250
|
+
payload["limit"] = limit
|
|
1251
|
+
|
|
1252
|
+
payload["asynced"] = wait
|
|
1253
|
+
|
|
1254
|
+
url = "{}/{}/addToTask".format(URL_PATH, task_id)
|
|
1255
|
+
|
|
1256
|
+
success, response = self._client_api.gen_request(req_type="post", path=url, json_req=payload)
|
|
1257
|
+
|
|
1258
|
+
if success:
|
|
1259
|
+
command = entities.Command.from_json(_json=response.json(), client_api=self._client_api)
|
|
1260
|
+
if not wait:
|
|
1261
|
+
return command
|
|
1262
|
+
backoff_factor = 2
|
|
1263
|
+
if command.type == "BulkAddToTaskSetting":
|
|
1264
|
+
backoff_factor = 8
|
|
1265
|
+
command = command.wait(timeout=0, backoff_factor=backoff_factor)
|
|
1266
|
+
if task is None:
|
|
1267
|
+
task = self.get(task_id=task_id)
|
|
1268
|
+
if "addToTaskPayload" not in command.spec:
|
|
1269
|
+
raise exceptions.PlatformException(
|
|
1270
|
+
error="400", message="addToTaskPayload key is missing in command response: {}".format(response)
|
|
1271
|
+
)
|
|
1272
|
+
else:
|
|
1273
|
+
raise exceptions.PlatformException(response)
|
|
1274
|
+
|
|
1275
|
+
assert isinstance(task, entities.Task)
|
|
1276
|
+
return task
|
|
1277
|
+
|
|
1278
|
+
# @_api_reference.add(path='/annotationtasks/{id}/removeFromTask', method='post')
|
|
1279
|
+
def remove_items(
|
|
1280
|
+
self,
|
|
1281
|
+
task: entities.Task = None,
|
|
1282
|
+
task_id=None,
|
|
1283
|
+
filters: entities.Filters = None,
|
|
1284
|
+
query=None,
|
|
1285
|
+
items=None,
|
|
1286
|
+
wait=True,
|
|
1287
|
+
):
|
|
1288
|
+
"""
|
|
1289
|
+
remove items from Task.
|
|
1290
|
+
|
|
1291
|
+
**Prerequisites**: You must be in the role of an *owner*, *developer*, or *annotation manager* who has been assigned to be *owner* of the annotation task.
|
|
1292
|
+
|
|
1293
|
+
:param dtlpy.entities.task.Task task: task object
|
|
1294
|
+
:param str task_id: the Id of the task
|
|
1295
|
+
:param dtlpy.entities.filters.Filters filters: Filters entity or a dictionary containing filters parameters
|
|
1296
|
+
:param dict query: query to filter the items use it
|
|
1297
|
+
:param list items: list of items to add to the task
|
|
1298
|
+
:param bool wait: wait until remove items finish
|
|
1299
|
+
:return: True if success and an error if failed
|
|
1300
|
+
:rtype: bool
|
|
1301
|
+
|
|
1302
|
+
**Examples**:
|
|
1303
|
+
|
|
1304
|
+
.. code-block:: python
|
|
1305
|
+
|
|
1306
|
+
dataset.tasks.remove_items(task= 'task_entity',
|
|
1307
|
+
items = [items])
|
|
1308
|
+
|
|
1309
|
+
"""
|
|
1310
|
+
if filters is None and items is None and query is None:
|
|
1311
|
+
raise exceptions.PlatformException("400", "Must provide either filters, query or items list")
|
|
1312
|
+
|
|
1313
|
+
if task is None and task_id is None:
|
|
1314
|
+
raise exceptions.PlatformException("400", "Must provide either task or task_id")
|
|
1315
|
+
|
|
1316
|
+
if query is None:
|
|
1317
|
+
if filters is None:
|
|
1318
|
+
if not isinstance(items, list):
|
|
1319
|
+
items = [items]
|
|
1320
|
+
filters = entities.Filters(
|
|
1321
|
+
field="id",
|
|
1322
|
+
values=[item.id for item in items],
|
|
1323
|
+
operator=entities.FiltersOperations.IN,
|
|
1324
|
+
use_defaults=False,
|
|
1325
|
+
)
|
|
1326
|
+
query = filters.prepare()
|
|
1327
|
+
|
|
1328
|
+
if task_id is None:
|
|
1329
|
+
task_id = task.id
|
|
1330
|
+
|
|
1331
|
+
payload = {"query": "{}".format(json.dumps(query).replace("'", '"')), "asynced": wait}
|
|
1332
|
+
|
|
1333
|
+
url = "{}/{}/removeFromTask".format(URL_PATH, task_id)
|
|
1334
|
+
|
|
1335
|
+
success, response = self._client_api.gen_request(req_type="post", path=url, json_req=payload)
|
|
1336
|
+
|
|
1337
|
+
if success:
|
|
1338
|
+
command = entities.Command.from_json(_json=response.json(), client_api=self._client_api)
|
|
1339
|
+
if not wait:
|
|
1340
|
+
return command
|
|
1341
|
+
command = command.wait(timeout=0)
|
|
1342
|
+
|
|
1343
|
+
if "removeFromTaskId" not in command.spec:
|
|
1344
|
+
raise exceptions.PlatformException(
|
|
1345
|
+
error="400", message="removeFromTaskId key is missing in command response: {}".format(response)
|
|
1346
|
+
)
|
|
1347
|
+
else:
|
|
1348
|
+
raise exceptions.PlatformException(response)
|
|
1349
|
+
return True
|
|
1350
|
+
|
|
1351
|
+
def get_items(
|
|
1352
|
+
self,
|
|
1353
|
+
task_id: str = None,
|
|
1354
|
+
task_name: str = None,
|
|
1355
|
+
dataset: entities.Dataset = None,
|
|
1356
|
+
filters: entities.Filters = None,
|
|
1357
|
+
get_consensus_items: bool = False,
|
|
1358
|
+
task: entities.Task = None,
|
|
1359
|
+
) -> entities.PagedEntities:
|
|
1360
|
+
"""
|
|
1361
|
+
Get the task items to use in your code.
|
|
1362
|
+
|
|
1363
|
+
**Prerequisites**: You must be in the role of an *owner*, *developer*, or *annotation manager* who has been assigned to be *owner* of the annotation task.
|
|
1364
|
+
|
|
1365
|
+
If a filters param is provided, you will receive a PagedEntity output of the task items. If no filter is provided, you will receive a list of the items.
|
|
1366
|
+
|
|
1367
|
+
:param str task_id: the id of the task
|
|
1368
|
+
:param str task_name: the name of the task
|
|
1369
|
+
:param bool get_consensus_items: get the items from the consensus assignment
|
|
1370
|
+
:param dtlpy.entities.Task task: task object
|
|
1371
|
+
:param dtlpy.entities.dataset.Dataset dataset: dataset object that refer to the task
|
|
1372
|
+
:param dtlpy.entities.filters.Filters filters: Filters entity or a dictionary containing filters parameters
|
|
1373
|
+
:return: list of the items or PagedEntity output of items
|
|
1374
|
+
:rtype: list or dtlpy.entities.paged_entities.PagedEntities
|
|
1375
|
+
|
|
1376
|
+
**Example**:
|
|
1377
|
+
|
|
1378
|
+
.. code-block:: python
|
|
1379
|
+
|
|
1380
|
+
dataset.tasks.get_items(task_id= 'task_id')
|
|
1381
|
+
"""
|
|
1382
|
+
if task is None and task_id is None and task_name is None:
|
|
1383
|
+
raise exceptions.PlatformException("400", "Please provide either task_id or task_name")
|
|
1384
|
+
|
|
1385
|
+
if task_id is None:
|
|
1386
|
+
if task is None:
|
|
1387
|
+
task = self.get(task_name=task_name)
|
|
1388
|
+
task_id = task.id
|
|
1389
|
+
|
|
1390
|
+
if dataset is None and self._dataset is None:
|
|
1391
|
+
raise exceptions.PlatformException("400", "Please provide a dataset entity")
|
|
1392
|
+
if dataset is None:
|
|
1393
|
+
dataset = self._dataset
|
|
1394
|
+
|
|
1395
|
+
if filters is None:
|
|
1396
|
+
filters = entities.Filters(use_defaults=False)
|
|
1397
|
+
filters.add(field="metadata.system.refs.id", values=[task_id], operator=entities.FiltersOperations.IN)
|
|
1398
|
+
|
|
1399
|
+
if not get_consensus_items:
|
|
1400
|
+
if task is None:
|
|
1401
|
+
task = self.get(task_id=task_id)
|
|
1402
|
+
if task.metadata.get("system", dict()).get("consensusAssignmentId", None):
|
|
1403
|
+
filters.add(
|
|
1404
|
+
field="metadata.system.refs.id",
|
|
1405
|
+
values=task.metadata["system"]["consensusAssignmentId"],
|
|
1406
|
+
operator=entities.FiltersOperations.NOT_EQUAL,
|
|
1407
|
+
)
|
|
1408
|
+
|
|
1409
|
+
return dataset.items.list(filters=filters)
|
|
1410
|
+
|
|
1411
|
+
def set_status(self, status: str, operation: str, task_id: str, item_ids: List[str]):
|
|
1412
|
+
"""
|
|
1413
|
+
Update an item status within a task.
|
|
1414
|
+
|
|
1415
|
+
**Prerequisites**: You must be in the role of an *owner*, *developer*, or *annotation manager* who has been assigned to be *owner* of the annotation task.
|
|
1416
|
+
|
|
1417
|
+
:param str status: string the describes the status
|
|
1418
|
+
:param str operation: the status action need 'create' or 'delete'
|
|
1419
|
+
:param str task_id: the Id of the task
|
|
1420
|
+
:param list item_ids: List[str] id items ids
|
|
1421
|
+
:return: True if success
|
|
1422
|
+
:rtype: bool
|
|
1423
|
+
|
|
1424
|
+
**Example**:
|
|
1425
|
+
|
|
1426
|
+
.. code-block:: python
|
|
1427
|
+
|
|
1428
|
+
dataset.tasks.set_status(task_id= 'task_id', status='complete', operation='create')
|
|
1429
|
+
"""
|
|
1430
|
+
url = "/assignments/items/tasks/{task_id}/status".format(task_id=task_id)
|
|
1431
|
+
payload = {
|
|
1432
|
+
"itemIds": item_ids,
|
|
1433
|
+
"statusPayload": {"operation": operation, "returnLastStatus": True, "status": status},
|
|
1434
|
+
}
|
|
1435
|
+
|
|
1436
|
+
success, response = self._client_api.gen_request(req_type="post", path=url, json_req=payload)
|
|
1437
|
+
|
|
1438
|
+
if not success:
|
|
1439
|
+
raise exceptions.PlatformException(response)
|
|
1440
|
+
if response.json() is not None:
|
|
1441
|
+
updated_items = set(response.json().keys())
|
|
1442
|
+
log_msg = "Items status was updated successfully."
|
|
1443
|
+
if len(updated_items) != len(item_ids):
|
|
1444
|
+
failed_items = set(item_ids).difference(updated_items)
|
|
1445
|
+
log_msg = "{success_count} out of TOTAL items were updated. The following items failed to update: {failed_items}".format(
|
|
1446
|
+
success_count=len(updated_items), failed_items=failed_items
|
|
1447
|
+
)
|
|
1448
|
+
logger.info(msg=log_msg)
|
|
1449
|
+
return True
|
|
1450
|
+
|
|
1451
|
+
def task_scores(self, task_id: str = None, page_offset: int = 0, page_size: int = 100):
|
|
1452
|
+
"""
|
|
1453
|
+
Get all entities scores in a task.
|
|
1454
|
+
|
|
1455
|
+
:param str task_id: the id of the task
|
|
1456
|
+
:param int page_offset: the page offset
|
|
1457
|
+
:param int page_size: the page size
|
|
1458
|
+
:return: page of the task scores
|
|
1459
|
+
|
|
1460
|
+
**Example**:
|
|
1461
|
+
|
|
1462
|
+
.. code-block:: python
|
|
1463
|
+
|
|
1464
|
+
dataset.tasks.task_scores(task_id= 'task_id')
|
|
1465
|
+
"""
|
|
1466
|
+
if task_id is None:
|
|
1467
|
+
raise exceptions.PlatformException("400", "Please provide task_id")
|
|
1468
|
+
|
|
1469
|
+
url = "/scores/tasks/{task_id}?page={page_offset}&pageSize={page_size}".format(
|
|
1470
|
+
task_id=task_id, page_offset=page_offset, page_size=page_size
|
|
1471
|
+
)
|
|
1472
|
+
success, response = self._client_api.gen_request(req_type="get", path=url)
|
|
1473
|
+
|
|
1474
|
+
if success:
|
|
1475
|
+
return response.json()
|
|
1476
|
+
else:
|
|
1477
|
+
raise exceptions.PlatformException(response)
|