dtlpy 1.90.39__py3-none-any.whl → 1.92.18__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dtlpy/__init__.py +5 -2
- dtlpy/__version__.py +1 -1
- dtlpy/assets/lock_open.png +0 -0
- dtlpy/entities/__init__.py +1 -1
- dtlpy/entities/analytic.py +118 -98
- dtlpy/entities/annotation.py +22 -31
- dtlpy/entities/annotation_collection.py +19 -21
- dtlpy/entities/app.py +13 -3
- dtlpy/entities/assignment.py +6 -0
- dtlpy/entities/base_entity.py +0 -23
- dtlpy/entities/command.py +3 -2
- dtlpy/entities/dataset.py +53 -3
- dtlpy/entities/dpk.py +15 -0
- dtlpy/entities/execution.py +13 -1
- dtlpy/entities/feature_set.py +3 -0
- dtlpy/entities/filters.py +87 -8
- dtlpy/entities/integration.py +1 -1
- dtlpy/entities/item.py +41 -1
- dtlpy/entities/node.py +49 -3
- dtlpy/entities/ontology.py +62 -5
- dtlpy/entities/package_function.py +2 -0
- dtlpy/entities/package_module.py +13 -0
- dtlpy/entities/pipeline.py +20 -1
- dtlpy/entities/pipeline_execution.py +37 -6
- dtlpy/entities/prompt_item.py +240 -27
- dtlpy/entities/recipe.py +37 -0
- dtlpy/entities/service.py +33 -4
- dtlpy/ml/base_model_adapter.py +166 -18
- dtlpy/new_instance.py +80 -9
- dtlpy/repositories/apps.py +68 -22
- dtlpy/repositories/assignments.py +1 -1
- dtlpy/repositories/commands.py +10 -2
- dtlpy/repositories/datasets.py +143 -13
- dtlpy/repositories/dpks.py +34 -1
- dtlpy/repositories/executions.py +27 -30
- dtlpy/repositories/feature_sets.py +23 -3
- dtlpy/repositories/features.py +4 -1
- dtlpy/repositories/models.py +1 -1
- dtlpy/repositories/packages.py +6 -3
- dtlpy/repositories/pipeline_executions.py +58 -5
- dtlpy/repositories/services.py +28 -7
- dtlpy/repositories/tasks.py +8 -2
- dtlpy/repositories/uploader.py +5 -2
- dtlpy/services/api_client.py +74 -12
- {dtlpy-1.90.39.dist-info → dtlpy-1.92.18.dist-info}/METADATA +2 -2
- {dtlpy-1.90.39.dist-info → dtlpy-1.92.18.dist-info}/RECORD +54 -57
- tests/features/environment.py +67 -1
- dtlpy/callbacks/__init__.py +0 -16
- dtlpy/callbacks/piper_progress_reporter.py +0 -29
- dtlpy/callbacks/progress_viewer.py +0 -54
- {dtlpy-1.90.39.data → dtlpy-1.92.18.data}/scripts/dlp +0 -0
- {dtlpy-1.90.39.data → dtlpy-1.92.18.data}/scripts/dlp.bat +0 -0
- {dtlpy-1.90.39.data → dtlpy-1.92.18.data}/scripts/dlp.py +0 -0
- {dtlpy-1.90.39.dist-info → dtlpy-1.92.18.dist-info}/LICENSE +0 -0
- {dtlpy-1.90.39.dist-info → dtlpy-1.92.18.dist-info}/WHEEL +0 -0
- {dtlpy-1.90.39.dist-info → dtlpy-1.92.18.dist-info}/entry_points.txt +0 -0
- {dtlpy-1.90.39.dist-info → dtlpy-1.92.18.dist-info}/top_level.txt +0 -0
dtlpy/entities/base_entity.py
CHANGED
|
@@ -98,22 +98,6 @@ class DlEntity(object):
|
|
|
98
98
|
"""
|
|
99
99
|
return miscellaneous.List([self]).to_df(show_all=show_all, columns=columns)
|
|
100
100
|
|
|
101
|
-
# def __repr__(self):
|
|
102
|
-
# string = '{}('.format(self.__class__.__name__)
|
|
103
|
-
# for prop in dir(self):
|
|
104
|
-
# if isinstance(prop, DlProperty) and prop.repr is True:
|
|
105
|
-
# string += '{}={}'.format()
|
|
106
|
-
# params = json.dumps(self._dict, indent=4)
|
|
107
|
-
# return "{}({})".format(self.__class__.__name__, params)
|
|
108
|
-
|
|
109
|
-
# def __repr__(self):
|
|
110
|
-
# self.print()
|
|
111
|
-
|
|
112
|
-
# def __getattribute__(self, attr):
|
|
113
|
-
# if super(BaseEntity, self).__getattribute__(attr) is None:
|
|
114
|
-
# pass
|
|
115
|
-
# return super(BaseEntity, self).__getattribute__(attr)
|
|
116
|
-
|
|
117
101
|
|
|
118
102
|
class DlProperty:
|
|
119
103
|
def __init__(self, location=None, default='NODEFAULT', _type=None, _kls=None):
|
|
@@ -206,16 +190,9 @@ class DlProperty:
|
|
|
206
190
|
|
|
207
191
|
# instantiate dictionary to the type
|
|
208
192
|
value = self._to_instance(_dict=value)
|
|
209
|
-
|
|
210
|
-
# isinstance(value, typing_extensions.get_args(self._type))
|
|
211
193
|
return value
|
|
212
194
|
|
|
213
195
|
def __set__(self, instance, value):
|
|
214
|
-
# TODO still not working properly. need to fix this validation
|
|
215
|
-
# if not isinstance(value, typing_extensions.get_args(self._type)):
|
|
216
|
-
# logger.warning(
|
|
217
|
-
# f'Incorrect typing for type: {type(instance)}. {self.location} must be of type {self._type}. Received: {type(value)}')
|
|
218
|
-
_client_api = getattr(instance, 'client_api', None)
|
|
219
196
|
|
|
220
197
|
# validate - if validator is set
|
|
221
198
|
if self._validator is not None:
|
dtlpy/entities/command.py
CHANGED
|
@@ -141,7 +141,7 @@ class Command(entities.BaseEntity):
|
|
|
141
141
|
entities.CommandsStatus.FINALIZING,
|
|
142
142
|
entities.CommandsStatus.IN_PROGRESS]
|
|
143
143
|
|
|
144
|
-
def wait(self, timeout=0, step=None,
|
|
144
|
+
def wait(self, timeout=0, step=None, backoff_factor=1):
|
|
145
145
|
"""
|
|
146
146
|
Wait for Command to finish
|
|
147
147
|
|
|
@@ -157,4 +157,5 @@ class Command(entities.BaseEntity):
|
|
|
157
157
|
timeout=timeout,
|
|
158
158
|
step=step,
|
|
159
159
|
url=self.url,
|
|
160
|
-
backoff_factor=backoff_factor
|
|
160
|
+
backoff_factor=backoff_factor
|
|
161
|
+
)
|
dtlpy/entities/dataset.py
CHANGED
|
@@ -18,6 +18,11 @@ class IndexDriver(str, Enum):
|
|
|
18
18
|
V2 = "v2"
|
|
19
19
|
|
|
20
20
|
|
|
21
|
+
class ExportType(str, Enum):
|
|
22
|
+
JSON = "json"
|
|
23
|
+
ZIP = "zip"
|
|
24
|
+
|
|
25
|
+
|
|
21
26
|
class ExpirationOptions:
|
|
22
27
|
"""
|
|
23
28
|
ExpirationOptions object
|
|
@@ -58,7 +63,6 @@ class Dataset(entities.BaseEntity):
|
|
|
58
63
|
items_count = attr.ib()
|
|
59
64
|
metadata = attr.ib(repr=False)
|
|
60
65
|
directoryTree = attr.ib(repr=False)
|
|
61
|
-
export = attr.ib(repr=False)
|
|
62
66
|
expiration_options = attr.ib()
|
|
63
67
|
index_driver = attr.ib()
|
|
64
68
|
enable_sync_with_cloned = attr.ib(repr=False)
|
|
@@ -165,7 +169,6 @@ class Dataset(entities.BaseEntity):
|
|
|
165
169
|
projects=projects,
|
|
166
170
|
creator=_json.get('creator', None),
|
|
167
171
|
items_url=_json.get('items', None),
|
|
168
|
-
export=_json.get('export', None),
|
|
169
172
|
driver=_json.get('driver', None),
|
|
170
173
|
name=_json.get('name', None),
|
|
171
174
|
url=_json.get('url', None),
|
|
@@ -240,7 +243,7 @@ class Dataset(entities.BaseEntity):
|
|
|
240
243
|
|
|
241
244
|
@property
|
|
242
245
|
def platform_url(self):
|
|
243
|
-
return self._client_api._get_resource_url("projects/{}/datasets/{}".format(self.project.id, self.id))
|
|
246
|
+
return self._client_api._get_resource_url("projects/{}/datasets/{}/items".format(self.project.id, self.id))
|
|
244
247
|
|
|
245
248
|
@readonly.setter
|
|
246
249
|
def readonly(self, state):
|
|
@@ -653,6 +656,53 @@ class Dataset(entities.BaseEntity):
|
|
|
653
656
|
export_version=export_version
|
|
654
657
|
)
|
|
655
658
|
|
|
659
|
+
def export(self,
|
|
660
|
+
local_path=None,
|
|
661
|
+
filters=None,
|
|
662
|
+
annotation_filters=None,
|
|
663
|
+
feature_vector_filters=None,
|
|
664
|
+
include_feature_vectors: bool = False,
|
|
665
|
+
include_annotations: bool = False,
|
|
666
|
+
export_type: ExportType = ExportType.JSON,
|
|
667
|
+
timeout: int = 0):
|
|
668
|
+
"""
|
|
669
|
+
Export dataset items and annotations.
|
|
670
|
+
|
|
671
|
+
**Prerequisites**: You must be an *owner* or *developer* to use this method.
|
|
672
|
+
|
|
673
|
+
You must provide at least ONE of the following params: dataset, dataset_name, dataset_id.
|
|
674
|
+
|
|
675
|
+
:param str local_path: The local path to save the exported dataset
|
|
676
|
+
:param Union[dict, dtlpy.entities.filters.Filters] filters: Filters entity or a query dictionary
|
|
677
|
+
:param dtlpy.entities.filters.Filters annotation_filters: Filters entity
|
|
678
|
+
:param dtlpy.entities.filters.Filters feature_vector_filters: Filters entity
|
|
679
|
+
:param bool include_feature_vectors: Include item feature vectors in the export
|
|
680
|
+
:param bool include_annotations: Include item annotations in the export
|
|
681
|
+
:param entities.ExportType export_type: Type of export ('json' or 'zip')
|
|
682
|
+
:param int timeout: Maximum time in seconds to wait for the export to complete
|
|
683
|
+
:return: Exported item
|
|
684
|
+
:rtype: dtlpy.entities.item.Item
|
|
685
|
+
|
|
686
|
+
**Example**:
|
|
687
|
+
|
|
688
|
+
.. code-block:: python
|
|
689
|
+
|
|
690
|
+
export_item = dataset.export(filters=filters,
|
|
691
|
+
include_feature_vectors=True,
|
|
692
|
+
include_annotations=True,
|
|
693
|
+
export_type=dl.ExportType.JSON)
|
|
694
|
+
"""
|
|
695
|
+
|
|
696
|
+
return self.datasets.export(dataset=self,
|
|
697
|
+
local_path=local_path,
|
|
698
|
+
filters=filters,
|
|
699
|
+
annotation_filters=annotation_filters,
|
|
700
|
+
feature_vector_filters=feature_vector_filters,
|
|
701
|
+
include_feature_vectors=include_feature_vectors,
|
|
702
|
+
include_annotations=include_annotations,
|
|
703
|
+
export_type=export_type,
|
|
704
|
+
timeout=timeout)
|
|
705
|
+
|
|
656
706
|
def upload_annotations(self,
|
|
657
707
|
local_path,
|
|
658
708
|
filters=None,
|
dtlpy/entities/dpk.py
CHANGED
|
@@ -126,6 +126,7 @@ class DpkComputeConfig(entities.DlEntity):
|
|
|
126
126
|
driver_id: str = entities.DlProperty(location=['driverId'], _type=str)
|
|
127
127
|
versions: dict = entities.DlProperty(location=['versions'], _type=dict)
|
|
128
128
|
name: str = entities.DlProperty(location=['name'], _type=str)
|
|
129
|
+
integrations: List[dict] = entities.DlProperty(location=['integrations'], _type=list)
|
|
129
130
|
|
|
130
131
|
def to_json(self) -> dict:
|
|
131
132
|
return self._dict.copy()
|
|
@@ -226,6 +227,7 @@ class Components(entities.DlEntity):
|
|
|
226
227
|
class Dpk(entities.DlEntity):
|
|
227
228
|
# name change
|
|
228
229
|
id: str = entities.DlProperty(location=['id'], _type=str)
|
|
230
|
+
base_id: str = entities.DlProperty(location=['baseId'], _type=str)
|
|
229
231
|
name: str = entities.DlProperty(location=['name'], _type=str)
|
|
230
232
|
version: str = entities.DlProperty(location=['version'], _type=str)
|
|
231
233
|
attributes: list = entities.DlProperty(location=['attributes'], _type=dict)
|
|
@@ -388,6 +390,19 @@ class Dpk(entities.DlEntity):
|
|
|
388
390
|
self._revisions = self._get_revision_pages()
|
|
389
391
|
return self._revisions
|
|
390
392
|
|
|
393
|
+
def get_revisions(self, version: str):
|
|
394
|
+
"""
|
|
395
|
+
Get the dpk with the specified version.
|
|
396
|
+
|
|
397
|
+
:param str version: the version of the dpk to get.
|
|
398
|
+
:return: Dpk
|
|
399
|
+
|
|
400
|
+
** Example **
|
|
401
|
+
..code-block:: python
|
|
402
|
+
dpk = dpk.get_revisions(version='1.0.0')
|
|
403
|
+
"""
|
|
404
|
+
return self.dpks.get_revisions(dpk_id=self.base_id, version=version)
|
|
405
|
+
|
|
391
406
|
@staticmethod
|
|
392
407
|
def _protected_from_json(_json, client_api, project, is_fetched=True):
|
|
393
408
|
"""
|
dtlpy/entities/execution.py
CHANGED
|
@@ -13,8 +13,12 @@ logger = logging.getLogger(name='dtlpy')
|
|
|
13
13
|
class ExecutionStatus(str, Enum):
|
|
14
14
|
SUCCESS = "success"
|
|
15
15
|
FAILED = "failed"
|
|
16
|
-
IN_PROGRESS = "
|
|
16
|
+
IN_PROGRESS = "in-progress"
|
|
17
17
|
CREATED = "created"
|
|
18
|
+
TERMINATED = 'terminated',
|
|
19
|
+
ABORTED = 'aborted'
|
|
20
|
+
CANCELED = 'canceled'
|
|
21
|
+
SYSTEM_FAILURE = 'system-failure'
|
|
18
22
|
|
|
19
23
|
|
|
20
24
|
@attr.s
|
|
@@ -377,3 +381,11 @@ class Execution(entities.BaseEntity):
|
|
|
377
381
|
:return: Service execution object
|
|
378
382
|
"""
|
|
379
383
|
return self.executions.wait(execution_id=self.id)
|
|
384
|
+
|
|
385
|
+
def in_progress(self):
|
|
386
|
+
return self.latest_status['status'] not in [ExecutionStatus.FAILED,
|
|
387
|
+
ExecutionStatus.SUCCESS,
|
|
388
|
+
ExecutionStatus.TERMINATED,
|
|
389
|
+
ExecutionStatus.ABORTED,
|
|
390
|
+
ExecutionStatus.CANCELED,
|
|
391
|
+
ExecutionStatus.SYSTEM_FAILURE]
|
dtlpy/entities/feature_set.py
CHANGED
|
@@ -31,6 +31,7 @@ class FeatureSet(entities.BaseEntity):
|
|
|
31
31
|
set_type = attr.ib()
|
|
32
32
|
entity_type = attr.ib()
|
|
33
33
|
project_id = attr.ib()
|
|
34
|
+
model_id = attr.ib()
|
|
34
35
|
org_id = attr.ib()
|
|
35
36
|
|
|
36
37
|
# sdk
|
|
@@ -100,6 +101,7 @@ class FeatureSet(entities.BaseEntity):
|
|
|
100
101
|
size=_json.get('size', None),
|
|
101
102
|
url=_json.get('url', None),
|
|
102
103
|
project_id=_json.get('project', None),
|
|
104
|
+
model_id=_json.get('modelId', None),
|
|
103
105
|
created_at=_json.get('createdAt', None),
|
|
104
106
|
creator=_json.get('creator', None),
|
|
105
107
|
updated_by=_json.get('updatedBy', None),
|
|
@@ -121,6 +123,7 @@ class FeatureSet(entities.BaseEntity):
|
|
|
121
123
|
'type': self.set_type,
|
|
122
124
|
'entityType': self.entity_type,
|
|
123
125
|
'project': self.project_id,
|
|
126
|
+
'modelId': self.model_id,
|
|
124
127
|
'creator': self.creator,
|
|
125
128
|
'createdAt': self.created_at,
|
|
126
129
|
'updatedBy': self.updated_by,
|
dtlpy/entities/filters.py
CHANGED
|
@@ -2,6 +2,7 @@ import urllib.parse
|
|
|
2
2
|
import logging
|
|
3
3
|
import json
|
|
4
4
|
import os
|
|
5
|
+
import io
|
|
5
6
|
from enum import Enum
|
|
6
7
|
|
|
7
8
|
from .. import exceptions, entities
|
|
@@ -139,7 +140,7 @@ class Filters:
|
|
|
139
140
|
|
|
140
141
|
@property
|
|
141
142
|
def resource(self):
|
|
142
|
-
return self._resource
|
|
143
|
+
return f'{self._resource.value}' if isinstance(self._resource, FiltersResource) else f'{self._resource}'
|
|
143
144
|
|
|
144
145
|
@resource.setter
|
|
145
146
|
def resource(self, resource):
|
|
@@ -295,9 +296,9 @@ class Filters:
|
|
|
295
296
|
self.join = dict()
|
|
296
297
|
if 'on' not in self.join:
|
|
297
298
|
if self.resource == FiltersResource.ITEM:
|
|
298
|
-
self.join['on'] = {'resource': FiltersResource.ANNOTATION, 'local': 'itemId', 'forigen': 'id'}
|
|
299
|
+
self.join['on'] = {'resource': FiltersResource.ANNOTATION.value, 'local': 'itemId', 'forigen': 'id'}
|
|
299
300
|
else:
|
|
300
|
-
self.join['on'] = {'resource': FiltersResource.ITEM, 'local': 'id', 'forigen': 'itemId'}
|
|
301
|
+
self.join['on'] = {'resource': FiltersResource.ITEM.value, 'local': 'id', 'forigen': 'itemId'}
|
|
301
302
|
if 'filter' not in self.join:
|
|
302
303
|
self.join['filter'] = dict()
|
|
303
304
|
join_method = '$' + method
|
|
@@ -474,7 +475,7 @@ class Filters:
|
|
|
474
475
|
"""
|
|
475
476
|
if value not in [FiltersOrderByDirection.ASCENDING, FiltersOrderByDirection.DESCENDING]:
|
|
476
477
|
raise exceptions.PlatformException(error='400', message='Sort can be by ascending or descending order only')
|
|
477
|
-
self.sort[field] = value
|
|
478
|
+
self.sort[field] = value.value if isinstance(value, FiltersOrderByDirection) else value
|
|
478
479
|
|
|
479
480
|
def platform_url(self, resource) -> str:
|
|
480
481
|
"""
|
|
@@ -488,14 +489,17 @@ class Filters:
|
|
|
488
489
|
# add the view option
|
|
489
490
|
_json['view'] = 'icons'
|
|
490
491
|
# convert from enum to string
|
|
491
|
-
_json["resource"] = '{
|
|
492
|
+
_json["resource"] = f'{_json["resource"]}'
|
|
492
493
|
# convert the dictionary to a json string
|
|
493
|
-
_json['dqlFilter'] = json.dumps({'filter': _json.pop('filter')
|
|
494
|
+
_json['dqlFilter'] = json.dumps({'filter': _json.pop('filter'),
|
|
495
|
+
'join': _json.pop('join'),
|
|
496
|
+
'sort': _json.get('sort')})
|
|
494
497
|
# set the page size as the UI default
|
|
495
498
|
_json['pageSize'] = 100
|
|
499
|
+
_json['page'] = _json['page']
|
|
496
500
|
# build the url for the dataset data browser
|
|
497
501
|
if isinstance(resource, entities.Dataset):
|
|
498
|
-
url = resource.platform_url + '
|
|
502
|
+
url = resource.platform_url + f'?{urllib.parse.urlencode(_json)}'
|
|
499
503
|
else:
|
|
500
504
|
raise NotImplementedError('Not implemented for resource type: {}'.format(type(resource)))
|
|
501
505
|
return url
|
|
@@ -511,6 +515,81 @@ class Filters:
|
|
|
511
515
|
else:
|
|
512
516
|
raise NotImplementedError('Not implemented for resource type: {}'.format(type(resource)))
|
|
513
517
|
|
|
518
|
+
def save(self, project: entities.Project, filter_name: str):
|
|
519
|
+
"""
|
|
520
|
+
Save the current DQL filter to the project
|
|
521
|
+
|
|
522
|
+
:param project: dl.Project
|
|
523
|
+
:param filter_name: the saved filter's name
|
|
524
|
+
:return: True if success
|
|
525
|
+
"""
|
|
526
|
+
_json_filter = self.prepare()
|
|
527
|
+
shebang_dict = {"type": "dql",
|
|
528
|
+
"shebang": "dataloop",
|
|
529
|
+
"metadata": {
|
|
530
|
+
"version": "1.0.0",
|
|
531
|
+
"system": {
|
|
532
|
+
"mimetype": "dql"
|
|
533
|
+
},
|
|
534
|
+
"dltype": "filter",
|
|
535
|
+
"filterFieldsState": [],
|
|
536
|
+
"resource": "items",
|
|
537
|
+
"filter": _json_filter.pop('filter'),
|
|
538
|
+
"join": _json_filter.pop('join')
|
|
539
|
+
}
|
|
540
|
+
}
|
|
541
|
+
b_dataset = project.datasets._get_binaries_dataset()
|
|
542
|
+
byte_io = io.BytesIO()
|
|
543
|
+
byte_io.name = filter_name
|
|
544
|
+
byte_io.write(json.dumps(shebang_dict).encode())
|
|
545
|
+
byte_io.seek(0)
|
|
546
|
+
b_dataset.items.upload(local_path=byte_io,
|
|
547
|
+
remote_path='/.dataloop/dqlfilters/items',
|
|
548
|
+
remote_name=filter_name)
|
|
549
|
+
return True
|
|
550
|
+
|
|
551
|
+
@classmethod
|
|
552
|
+
def load(cls, project: entities.Project, filter_name: str) -> 'Filters':
|
|
553
|
+
"""
|
|
554
|
+
Load a saved filter from the project by name
|
|
555
|
+
|
|
556
|
+
:param project: dl.Project entity
|
|
557
|
+
:param filter_name: filter name
|
|
558
|
+
:return: dl.Filters
|
|
559
|
+
"""
|
|
560
|
+
b_dataset = project.datasets._get_binaries_dataset()
|
|
561
|
+
f = entities.Filters(custom_filter={
|
|
562
|
+
'filter': {'$and': [{'filename': f'/.dataloop/dqlfilters/items/{filter_name}'}]},
|
|
563
|
+
'page': 0,
|
|
564
|
+
'pageSize': 1000,
|
|
565
|
+
'resource': 'items'
|
|
566
|
+
})
|
|
567
|
+
pages = b_dataset.items.list(filters=f)
|
|
568
|
+
if pages.items_count == 0:
|
|
569
|
+
raise exceptions.NotFound(
|
|
570
|
+
f'Saved filter not found: {filter_name}. Run `Filters.list()` to list existing filters')
|
|
571
|
+
with open(pages.items[0].download()) as f:
|
|
572
|
+
data = json.load(f)
|
|
573
|
+
custom_filter = data['metadata']['filter']
|
|
574
|
+
custom_filter['join'] = data['metadata']['join']
|
|
575
|
+
return cls(custom_filter=custom_filter)
|
|
576
|
+
|
|
577
|
+
@staticmethod
|
|
578
|
+
def list(project: entities.Project) -> list:
|
|
579
|
+
"""
|
|
580
|
+
List all saved filters for a project
|
|
581
|
+
:param project: dl.Project entity
|
|
582
|
+
:return: a list of all the saved filters' names
|
|
583
|
+
"""
|
|
584
|
+
b_dataset = project.datasets._get_binaries_dataset()
|
|
585
|
+
f = entities.Filters(use_defaults=False,
|
|
586
|
+
field='dir',
|
|
587
|
+
values='/.dataloop/dqlfilters/items')
|
|
588
|
+
pages = b_dataset.items.list(filters=f)
|
|
589
|
+
all_filter_items = list(pages.all())
|
|
590
|
+
names = [i.name for i in all_filter_items]
|
|
591
|
+
return names
|
|
592
|
+
|
|
514
593
|
|
|
515
594
|
class SingleFilter:
|
|
516
595
|
def __init__(self, field, values, operator: FiltersOperations = None):
|
|
@@ -552,6 +631,6 @@ class SingleFilter:
|
|
|
552
631
|
_json[self.field] = value
|
|
553
632
|
|
|
554
633
|
return _json
|
|
555
|
-
|
|
634
|
+
|
|
556
635
|
def print(self, indent=2):
|
|
557
636
|
print(json.dumps(self.prepare(), indent=indent))
|
dtlpy/entities/integration.py
CHANGED
|
@@ -79,7 +79,7 @@ class Integration(entities.BaseEntity):
|
|
|
79
79
|
name=_json.get('name', None),
|
|
80
80
|
creator=_json.get('creator', None),
|
|
81
81
|
created_at=_json.get('createdAt', None),
|
|
82
|
-
update_at=_json.get('
|
|
82
|
+
update_at=_json.get('updatedAt', None),
|
|
83
83
|
type=_json.get('type', None),
|
|
84
84
|
org=_json.get('org', None),
|
|
85
85
|
client_api=client_api,
|
dtlpy/entities/item.py
CHANGED
|
@@ -6,11 +6,13 @@ import logging
|
|
|
6
6
|
import attr
|
|
7
7
|
import copy
|
|
8
8
|
import os
|
|
9
|
-
|
|
9
|
+
import io
|
|
10
10
|
from .. import repositories, entities, exceptions
|
|
11
11
|
from .annotation import ViewAnnotationOptions, ExportVersion
|
|
12
12
|
from ..services.api_client import ApiClient
|
|
13
13
|
from ..services.api_client import client as client_api
|
|
14
|
+
import json
|
|
15
|
+
import requests
|
|
14
16
|
|
|
15
17
|
logger = logging.getLogger(name='dtlpy')
|
|
16
18
|
|
|
@@ -182,6 +184,18 @@ class Item(entities.BaseEntity):
|
|
|
182
184
|
def model(self):
|
|
183
185
|
return self._model
|
|
184
186
|
|
|
187
|
+
def __update_item_binary(self, _json):
|
|
188
|
+
binary = io.BytesIO()
|
|
189
|
+
binary.write(json.dumps(_json).encode())
|
|
190
|
+
binary.seek(0)
|
|
191
|
+
binary.name = self.name
|
|
192
|
+
resp = requests.post(url=client_api.environment + f'/items/{self.id}/revisions',
|
|
193
|
+
headers=client_api.auth,
|
|
194
|
+
files={'file': (binary.name, binary)}
|
|
195
|
+
)
|
|
196
|
+
if not resp.ok:
|
|
197
|
+
raise ValueError(resp.text)
|
|
198
|
+
|
|
185
199
|
@property
|
|
186
200
|
def project(self):
|
|
187
201
|
if self._project is None:
|
|
@@ -661,6 +675,32 @@ class Item(entities.BaseEntity):
|
|
|
661
675
|
else:
|
|
662
676
|
raise exceptions.PlatformException('400', 'must provide assignment_id or task_id')
|
|
663
677
|
|
|
678
|
+
def status(self, assignment_id: str = None, task_id: str = None):
|
|
679
|
+
"""
|
|
680
|
+
Get item status
|
|
681
|
+
|
|
682
|
+
:param str assignment_id: assignment id
|
|
683
|
+
:param str task_id: task id
|
|
684
|
+
|
|
685
|
+
:return: status
|
|
686
|
+
:rtype: str
|
|
687
|
+
|
|
688
|
+
**Example**:
|
|
689
|
+
|
|
690
|
+
.. code-block:: python
|
|
691
|
+
|
|
692
|
+
status = item.status(task_id='task_id')
|
|
693
|
+
"""
|
|
694
|
+
if not assignment_id and not task_id:
|
|
695
|
+
raise exceptions.PlatformException('400', 'must provide assignment_id or task_id')
|
|
696
|
+
status = None
|
|
697
|
+
resource_id = assignment_id if assignment_id else task_id
|
|
698
|
+
for ref in self.metadata.get('system', dict()).get('refs', []):
|
|
699
|
+
if ref.get('id') == resource_id:
|
|
700
|
+
status = ref.get('metadata', {}).get('status', None)
|
|
701
|
+
break
|
|
702
|
+
return status
|
|
703
|
+
|
|
664
704
|
def set_description(self, text: str):
|
|
665
705
|
"""
|
|
666
706
|
Update Item description
|
dtlpy/entities/node.py
CHANGED
|
@@ -120,7 +120,7 @@ class PipelineNodeIO:
|
|
|
120
120
|
self.description = description
|
|
121
121
|
|
|
122
122
|
if action is not None:
|
|
123
|
-
warnings.warn('action param has been deprecated in version 1.
|
|
123
|
+
warnings.warn('action param has been deprecated in version 1.95', DeprecationWarning)
|
|
124
124
|
if actions is None:
|
|
125
125
|
actions = []
|
|
126
126
|
actions.append(action)
|
|
@@ -128,7 +128,7 @@ class PipelineNodeIO:
|
|
|
128
128
|
|
|
129
129
|
@property
|
|
130
130
|
def action(self):
|
|
131
|
-
warnings.warn('action attribute has been deprecated in version 1.
|
|
131
|
+
warnings.warn('action attribute has been deprecated in version 1.95', DeprecationWarning)
|
|
132
132
|
return None
|
|
133
133
|
|
|
134
134
|
@staticmethod
|
|
@@ -221,6 +221,8 @@ class PipelineNode:
|
|
|
221
221
|
config: dict = None,
|
|
222
222
|
position: tuple = (1, 1),
|
|
223
223
|
app_id: str = None,
|
|
224
|
+
dpk_name: str = None,
|
|
225
|
+
app_name: str = None,
|
|
224
226
|
):
|
|
225
227
|
"""
|
|
226
228
|
:param str name: node name
|
|
@@ -234,6 +236,8 @@ class PipelineNode:
|
|
|
234
236
|
:param dict config: for the code node dict in format { package: {code : the_code}}
|
|
235
237
|
:param tuple position: tuple of the node place
|
|
236
238
|
:param str app_id: app id
|
|
239
|
+
:param str dpk_name: dpk name
|
|
240
|
+
:param str app_name: app name
|
|
237
241
|
"""
|
|
238
242
|
self.name = name
|
|
239
243
|
self.node_id = node_id
|
|
@@ -246,6 +250,8 @@ class PipelineNode:
|
|
|
246
250
|
self.config = config
|
|
247
251
|
self.position = position
|
|
248
252
|
self.app_id = app_id
|
|
253
|
+
self.dpk_name = dpk_name
|
|
254
|
+
self.app_name = app_name
|
|
249
255
|
self._pipeline = None
|
|
250
256
|
|
|
251
257
|
@property
|
|
@@ -297,7 +303,9 @@ class PipelineNode:
|
|
|
297
303
|
project_id=_json.get('projectId', None),
|
|
298
304
|
config=_json.get('config', None),
|
|
299
305
|
position=position,
|
|
300
|
-
app_id=_json.get('appId', None)
|
|
306
|
+
app_id=_json.get('appId', None),
|
|
307
|
+
dpk_name=_json.get('dpkName', None),
|
|
308
|
+
app_name=_json.get('appName', None),
|
|
301
309
|
)
|
|
302
310
|
|
|
303
311
|
def to_json(self):
|
|
@@ -310,6 +318,8 @@ class PipelineNode:
|
|
|
310
318
|
'type': self.node_type,
|
|
311
319
|
'namespace': self.namespace.to_json(),
|
|
312
320
|
'projectId': self.project_id,
|
|
321
|
+
'dpkName': self.dpk_name,
|
|
322
|
+
'appName': self.app_name,
|
|
313
323
|
}
|
|
314
324
|
if self.config is not None:
|
|
315
325
|
_json['config'] = self.config
|
|
@@ -921,12 +931,20 @@ class DatasetNode(PipelineNode):
|
|
|
921
931
|
project_id: str,
|
|
922
932
|
dataset_id: str,
|
|
923
933
|
dataset_folder: str = None,
|
|
934
|
+
load_existing_data: bool = False,
|
|
935
|
+
data_filters: entities.Filters = None,
|
|
924
936
|
position: tuple = (1, 1)):
|
|
925
937
|
"""
|
|
926
938
|
:param str name: node name
|
|
927
939
|
:param str project_id: project id
|
|
928
940
|
:param str dataset_id: dataset id
|
|
929
941
|
:param str dataset_folder: folder in dataset to work in it
|
|
942
|
+
:param bool load_existing_data: optional - enable to automatically load existing data into the
|
|
943
|
+
pipeline (executions) upon activation, based on the defined dataset,
|
|
944
|
+
folder, and data_filters.
|
|
945
|
+
:param entities.Filters data_filters: optional - filters entity or a dictionary containing filters parameters.
|
|
946
|
+
Use to filter the data items to be loaded when load_existing_data
|
|
947
|
+
is enabled.
|
|
930
948
|
:param tuple position: tuple of the node place
|
|
931
949
|
"""
|
|
932
950
|
inputs = [self._default_io()]
|
|
@@ -944,6 +962,8 @@ class DatasetNode(PipelineNode):
|
|
|
944
962
|
position=position)
|
|
945
963
|
self.dataset_id = dataset_id
|
|
946
964
|
self.dataset_folder = dataset_folder
|
|
965
|
+
self.load_existing_data = load_existing_data
|
|
966
|
+
self.data_filters = data_filters
|
|
947
967
|
|
|
948
968
|
@property
|
|
949
969
|
def dataset_id(self):
|
|
@@ -964,6 +984,32 @@ class DatasetNode(PipelineNode):
|
|
|
964
984
|
dataset_folder = '/' + dataset_folder
|
|
965
985
|
self.metadata['dir'] = dataset_folder
|
|
966
986
|
|
|
987
|
+
@property
|
|
988
|
+
def load_existing_data(self):
|
|
989
|
+
return self.metadata.get('triggerToPipeline', {}).get('active', False)
|
|
990
|
+
|
|
991
|
+
@load_existing_data.setter
|
|
992
|
+
def load_existing_data(self, load_existing_data: bool):
|
|
993
|
+
if load_existing_data:
|
|
994
|
+
self.metadata.setdefault('triggerToPipeline', {})['active'] = True
|
|
995
|
+
else:
|
|
996
|
+
self.metadata.pop('triggerToPipeline', None)
|
|
997
|
+
|
|
998
|
+
@property
|
|
999
|
+
def data_filters(self):
|
|
1000
|
+
data_filters = self.metadata.get('triggerToPipeline', {}).get('filter', None)
|
|
1001
|
+
if data_filters:
|
|
1002
|
+
data_filters = entities.Filters(custom_filter=json.loads(data_filters))
|
|
1003
|
+
return data_filters
|
|
1004
|
+
|
|
1005
|
+
@data_filters.setter
|
|
1006
|
+
def data_filters(self, data_filters: entities.Filters):
|
|
1007
|
+
if data_filters is None:
|
|
1008
|
+
filters = None
|
|
1009
|
+
else:
|
|
1010
|
+
filters = json.dumps(data_filters.prepare(query_only=True).get('filter'))
|
|
1011
|
+
self.metadata.setdefault('triggerToPipeline', {})['filter'] = filters
|
|
1012
|
+
|
|
967
1013
|
@staticmethod
|
|
968
1014
|
def from_json(_json: dict):
|
|
969
1015
|
parent = PipelineNode.from_json(_json)
|
dtlpy/entities/ontology.py
CHANGED
|
@@ -207,7 +207,14 @@ class Ontology(entities.BaseEntity):
|
|
|
207
207
|
|
|
208
208
|
@property
|
|
209
209
|
def _use_attributes_2(self):
|
|
210
|
-
|
|
210
|
+
if isinstance(self.metadata, dict):
|
|
211
|
+
attributes = self.metadata.get("attributes", None)
|
|
212
|
+
if attributes is not None:
|
|
213
|
+
return True
|
|
214
|
+
else:
|
|
215
|
+
if isinstance(self.attributes, list) and len(self.attributes) > 0:
|
|
216
|
+
return False
|
|
217
|
+
return True
|
|
211
218
|
|
|
212
219
|
@classmethod
|
|
213
220
|
def from_json(cls, _json, client_api, recipe, dataset=None, project=None, is_fetched=True):
|
|
@@ -223,10 +230,9 @@ class Ontology(entities.BaseEntity):
|
|
|
223
230
|
:return: Ontology object
|
|
224
231
|
:rtype: dtlpy.entities.ontology.Ontology
|
|
225
232
|
"""
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
else
|
|
229
|
-
attributes = _json.get('metadata', {}).get("attributes", [])
|
|
233
|
+
attributes_v2 = _json.get('metadata', {}).get("attributes", [])
|
|
234
|
+
attributes_v1 = _json.get("attributes", [])
|
|
235
|
+
attributes = attributes_v2 if attributes_v2 else attributes_v1
|
|
230
236
|
|
|
231
237
|
labels = list()
|
|
232
238
|
for root in _json["roots"]:
|
|
@@ -741,3 +747,54 @@ class Ontology(entities.BaseEntity):
|
|
|
741
747
|
"""
|
|
742
748
|
|
|
743
749
|
return self.ontologies.delete_attributes(ontology_id=self.id, keys=keys)
|
|
750
|
+
|
|
751
|
+
def copy_from(self, ontology_json: dict):
|
|
752
|
+
"""
|
|
753
|
+
Import ontology to the platform.\n
|
|
754
|
+
Notice: only the following fields will be updated: `labels`, `attributes`, `instance_map` and `color_map`.
|
|
755
|
+
|
|
756
|
+
:param dict ontology_json: The source ontology json to copy from
|
|
757
|
+
:return: Ontology object: The updated ontology entity
|
|
758
|
+
:rtype: dtlpy.entities.ontology.Ontology
|
|
759
|
+
|
|
760
|
+
**Example**:
|
|
761
|
+
|
|
762
|
+
.. code-block:: python
|
|
763
|
+
|
|
764
|
+
ontology = ontology.import_ontology(ontology_json=ontology_json)
|
|
765
|
+
"""
|
|
766
|
+
# TODO: Add support for import from ontology entity in the Future
|
|
767
|
+
if not self._use_attributes_2:
|
|
768
|
+
raise ValueError("This method is only supported for attributes 2 mode!")
|
|
769
|
+
new_ontology = self.from_json(_json=ontology_json, client_api=self._client_api, recipe=self.recipe)
|
|
770
|
+
|
|
771
|
+
# Update 'labels' and 'attributes'
|
|
772
|
+
self.labels = new_ontology.labels
|
|
773
|
+
new_attributes = new_ontology.attributes
|
|
774
|
+
if isinstance(new_attributes, list):
|
|
775
|
+
for new_attribute in new_attributes:
|
|
776
|
+
attribute_range = new_attribute.get("range", None)
|
|
777
|
+
if attribute_range is not None:
|
|
778
|
+
attribute_range = entities.AttributesRange(
|
|
779
|
+
min_range=attribute_range.get("min", None),
|
|
780
|
+
max_range=attribute_range.get("max", None),
|
|
781
|
+
step=attribute_range.get("step", None)
|
|
782
|
+
)
|
|
783
|
+
script_data = new_attribute.get("scriptData", None)
|
|
784
|
+
if script_data is None:
|
|
785
|
+
new_attribute_key = new_attribute.get("key", None)
|
|
786
|
+
raise Exception(f"Attribute '{new_attribute_key}' scriptData is missing in the ontology json!")
|
|
787
|
+
self.update_attributes(
|
|
788
|
+
title=script_data.get("title", None),
|
|
789
|
+
key=new_attribute.get("key", None),
|
|
790
|
+
attribute_type=new_attribute.get("type", None),
|
|
791
|
+
scope=new_attribute.get("scope", None),
|
|
792
|
+
optional=script_data.get("optional", None),
|
|
793
|
+
values=new_attribute.get("values", None),
|
|
794
|
+
attribute_range=attribute_range
|
|
795
|
+
)
|
|
796
|
+
|
|
797
|
+
# Update 'instance map' and 'color map'
|
|
798
|
+
self._instance_map = new_ontology.instance_map
|
|
799
|
+
self._color_map = new_ontology.color_map
|
|
800
|
+
return self.update(system_metadata=True)
|
|
@@ -97,6 +97,8 @@ class FunctionIO(entities.DlEntity):
|
|
|
97
97
|
name = entities.DlProperty(location=['name'], _type=str)
|
|
98
98
|
actions = entities.DlProperty(location=['actions'], _type=list)
|
|
99
99
|
description = entities.DlProperty(location=['description'], _type=str)
|
|
100
|
+
integration = entities.DlProperty(location=['integration'], _type=dict)
|
|
101
|
+
mandatory = entities.DlProperty(location=['mandatory'], _type=bool)
|
|
100
102
|
|
|
101
103
|
def __repr__(self):
|
|
102
104
|
# TODO need to move to DlEntity
|