dtlpy 1.85.25__py3-none-any.whl → 1.87.18__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dtlpy/__init__.py +3 -3
- dtlpy/__version__.py +1 -1
- dtlpy/entities/__init__.py +1 -1
- dtlpy/entities/annotation.py +10 -12
- dtlpy/entities/annotation_collection.py +11 -9
- dtlpy/entities/annotation_definitions/__init__.py +2 -1
- dtlpy/entities/annotation_definitions/ref_image.py +86 -0
- dtlpy/entities/command.py +1 -1
- dtlpy/entities/dataset.py +4 -8
- dtlpy/entities/feature_set.py +0 -3
- dtlpy/entities/filters.py +12 -2
- dtlpy/entities/item.py +0 -1
- dtlpy/entities/model.py +51 -2
- dtlpy/entities/node.py +14 -5
- dtlpy/entities/ontology.py +2 -2
- dtlpy/entities/package_function.py +3 -0
- dtlpy/entities/pipeline.py +11 -2
- dtlpy/entities/recipe.py +1 -1
- dtlpy/entities/service.py +33 -16
- dtlpy/entities/task.py +18 -1
- dtlpy/entities/trigger.py +7 -1
- dtlpy/ml/base_model_adapter.py +56 -11
- dtlpy/ml/train_utils.py +0 -1
- dtlpy/new_instance.py +5 -3
- dtlpy/repositories/artifacts.py +9 -15
- dtlpy/repositories/codebases.py +2 -14
- dtlpy/repositories/commands.py +6 -7
- dtlpy/repositories/datasets.py +73 -43
- dtlpy/repositories/downloader.py +1 -1
- dtlpy/repositories/feature_sets.py +1 -6
- dtlpy/repositories/models.py +69 -26
- dtlpy/repositories/packages.py +5 -4
- dtlpy/repositories/pipelines.py +5 -4
- dtlpy/repositories/services.py +32 -5
- dtlpy/repositories/tasks.py +8 -3
- dtlpy/repositories/uploader.py +1 -1
- dtlpy/services/api_client.py +2 -1
- dtlpy/utilities/dataset_generators/dataset_generator.py +2 -2
- dtlpy/utilities/reports/figures.py +215 -48
- {dtlpy-1.85.25.dist-info → dtlpy-1.87.18.dist-info}/METADATA +1 -2
- {dtlpy-1.85.25.dist-info → dtlpy-1.87.18.dist-info}/RECORD +49 -48
- tests/features/environment.py +49 -2
- {dtlpy-1.85.25.data → dtlpy-1.87.18.data}/scripts/dlp +0 -0
- {dtlpy-1.85.25.data → dtlpy-1.87.18.data}/scripts/dlp.bat +0 -0
- {dtlpy-1.85.25.data → dtlpy-1.87.18.data}/scripts/dlp.py +0 -0
- {dtlpy-1.85.25.dist-info → dtlpy-1.87.18.dist-info}/LICENSE +0 -0
- {dtlpy-1.85.25.dist-info → dtlpy-1.87.18.dist-info}/WHEEL +0 -0
- {dtlpy-1.85.25.dist-info → dtlpy-1.87.18.dist-info}/entry_points.txt +0 -0
- {dtlpy-1.85.25.dist-info → dtlpy-1.87.18.dist-info}/top_level.txt +0 -0
dtlpy/__init__.py
CHANGED
|
@@ -69,7 +69,7 @@ from .entities import (
|
|
|
69
69
|
# annotations
|
|
70
70
|
Box, Cube, Cube3d, Point, Note, Message, Segmentation, Ellipse, Classification, Subtitle, Polyline, Pose,
|
|
71
71
|
Description,
|
|
72
|
-
Polygon, Text, FreeText,
|
|
72
|
+
Polygon, Text, FreeText, RefImage,
|
|
73
73
|
# filters
|
|
74
74
|
Filters, FiltersKnownFields, FiltersResource, FiltersOperations, FiltersMethod, FiltersOrderByDirection,
|
|
75
75
|
FiltersKnownFields as KnownFields,
|
|
@@ -313,14 +313,14 @@ INSTANCE_CATALOG_REGULAR_XS = InstanceCatalog.REGULAR_XS
|
|
|
313
313
|
INSTANCE_CATALOG_REGULAR_S = InstanceCatalog.REGULAR_S
|
|
314
314
|
INSTANCE_CATALOG_REGULAR_M = InstanceCatalog.REGULAR_M
|
|
315
315
|
INSTANCE_CATALOG_REGULAR_L = InstanceCatalog.REGULAR_L
|
|
316
|
-
INSTANCE_CATALOG_REGULAR_XL = InstanceCatalog.REGULAR_XL
|
|
317
316
|
INSTANCE_CATALOG_HIGHMEM_XS = InstanceCatalog.HIGHMEM_XS
|
|
318
317
|
INSTANCE_CATALOG_HIGHMEM_S = InstanceCatalog.HIGHMEM_S
|
|
319
318
|
INSTANCE_CATALOG_HIGHMEM_M = InstanceCatalog.HIGHMEM_M
|
|
320
319
|
INSTANCE_CATALOG_HIGHMEM_L = InstanceCatalog.HIGHMEM_L
|
|
321
|
-
INSTANCE_CATALOG_HIGHMEM_XL = InstanceCatalog.HIGHMEM_XL
|
|
322
320
|
INSTANCE_CATALOG_GPU_K80_S = InstanceCatalog.GPU_K80_S
|
|
323
321
|
INSTANCE_CATALOG_GPU_K80_M = InstanceCatalog.GPU_K80_M
|
|
322
|
+
INSTANCE_CATALOG_GPU_T4_S = InstanceCatalog.GPU_T4_S
|
|
323
|
+
INSTANCE_CATALOG_GPU_T4_M = InstanceCatalog.GPU_T4_M
|
|
324
324
|
|
|
325
325
|
MODALITY_TYPE_OVERLAY = ModalityTypeEnum.OVERLAY
|
|
326
326
|
MODALITY_TYPE_PREVIEW = ModalityTypeEnum.PREVIEW
|
dtlpy/__version__.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
version = '1.
|
|
1
|
+
version = '1.87.18'
|
dtlpy/entities/__init__.py
CHANGED
|
@@ -33,7 +33,7 @@ from .filters import Filters, FiltersKnownFields, FiltersResource, FiltersOperat
|
|
|
33
33
|
from .recipe import Recipe
|
|
34
34
|
from .ontology import Ontology, AttributesTypes, AttributesRange
|
|
35
35
|
from .annotation_definitions import Box, Cube, Cube3d, Point, Segmentation, Polygon, Ellipse, Classification, \
|
|
36
|
-
Subtitle, Text, FreeText, \
|
|
36
|
+
Subtitle, Text, FreeText, RefImage, \
|
|
37
37
|
Polyline, Comparison, UndefinedAnnotationType, Note, Message, Description, Pose
|
|
38
38
|
from .label import Label
|
|
39
39
|
from .codebase import Codebase, PackageCodebaseType, ItemCodebase, GitCodebase, FilesystemCodebase, LocalCodebase
|
dtlpy/entities/annotation.py
CHANGED
|
@@ -235,9 +235,7 @@ class Annotation(entities.BaseEntity):
|
|
|
235
235
|
def coordinates(self):
|
|
236
236
|
color = None
|
|
237
237
|
if self.type in ['binary']:
|
|
238
|
-
color = self.
|
|
239
|
-
if color is None:
|
|
240
|
-
color = self.color
|
|
238
|
+
color = self.color
|
|
241
239
|
coordinates = self.annotation_definition.to_coordinates(color=color)
|
|
242
240
|
return coordinates
|
|
243
241
|
|
|
@@ -446,16 +444,16 @@ class Annotation(entities.BaseEntity):
|
|
|
446
444
|
@property
|
|
447
445
|
def color(self):
|
|
448
446
|
# if "dataset" is not in self - this will always get the dataset
|
|
449
|
-
|
|
450
|
-
|
|
447
|
+
try:
|
|
448
|
+
colors = self.dataset._get_ontology().color_map
|
|
449
|
+
except (exceptions.BadRequest, exceptions.NotFound):
|
|
450
|
+
colors = None
|
|
451
|
+
logger.warning('Cant get dataset for annotation color. using default.')
|
|
452
|
+
if colors is not None and self.label in colors:
|
|
453
|
+
color = colors[self.label]
|
|
451
454
|
else:
|
|
452
|
-
|
|
453
|
-
|
|
454
|
-
except exceptions.BadRequest:
|
|
455
|
-
colors = None
|
|
456
|
-
logger.warning('Cant get dataset for annotation color. using default.')
|
|
457
|
-
if colors is not None and self.label in colors:
|
|
458
|
-
color = colors[self.label]
|
|
455
|
+
if self.type == 'binary' and self.annotation_definition._color is not None:
|
|
456
|
+
color = self.annotation_definition._color
|
|
459
457
|
else:
|
|
460
458
|
color = (255, 255, 255)
|
|
461
459
|
return color
|
|
@@ -75,23 +75,25 @@ class AnnotationCollection(entities.BaseEntity):
|
|
|
75
75
|
:param metadata: optional- metadata dictionary for annotation
|
|
76
76
|
:param parent_id: set a parent for this annotation (parent annotation ID)
|
|
77
77
|
:param prompt_id: Connect the annotation with a specific prompt in a dl.PromptItem
|
|
78
|
-
:param model_info: optional - set model on annotation {'
|
|
78
|
+
:param model_info: optional - set model on annotation {'confidence':0 [Mandatory], (Float between 0-1)
|
|
79
|
+
'name',:'' [Optional], ('name' refers to 'model_name')
|
|
80
|
+
'model_id':''[Optional]}
|
|
79
81
|
:return:
|
|
80
82
|
"""
|
|
81
83
|
if model_info is not None:
|
|
82
|
-
if not isinstance(model_info, dict) or '
|
|
83
|
-
raise ValueError('"model_info" must be a dict with
|
|
84
|
+
if not isinstance(model_info, dict) or 'confidence' not in model_info:
|
|
85
|
+
raise ValueError('"model_info" must be a dict with key: "confidence"')
|
|
84
86
|
if metadata is None:
|
|
85
87
|
metadata = dict()
|
|
86
88
|
if 'user' not in metadata:
|
|
87
89
|
metadata['user'] = dict()
|
|
88
|
-
|
|
89
|
-
|
|
90
|
+
confidence = float(model_info['confidence'])
|
|
91
|
+
if confidence < 0 or confidence > 1:
|
|
92
|
+
raise ValueError('"confidence" must be a float between 0 and 1')
|
|
93
|
+
metadata['user']['model'] = {'confidence': confidence,
|
|
94
|
+
'name': model_info.get('name'),
|
|
90
95
|
'model_id': model_info.get('model_id'),
|
|
91
|
-
'snapshot_id': model_info.get('snapshot_id'),
|
|
92
96
|
}
|
|
93
|
-
metadata['user']['annotation_type'] = 'prediction'
|
|
94
|
-
|
|
95
97
|
if prompt_id is not None:
|
|
96
98
|
if metadata is None:
|
|
97
99
|
metadata = dict()
|
|
@@ -512,7 +514,7 @@ class AnnotationCollection(entities.BaseEntity):
|
|
|
512
514
|
|
|
513
515
|
@classmethod
|
|
514
516
|
def from_json(cls, _json: list, item=None, is_video=None, fps=25, height=None, width=None,
|
|
515
|
-
client_api=None, is_audio=None):
|
|
517
|
+
client_api=None, is_audio=None) -> 'AnnotationCollection':
|
|
516
518
|
"""
|
|
517
519
|
Create an annotation collection object from platform json
|
|
518
520
|
|
|
@@ -0,0 +1,86 @@
|
|
|
1
|
+
from . import BaseAnnotationDefinition
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
class RefImage(BaseAnnotationDefinition):
|
|
5
|
+
"""
|
|
6
|
+
Create an image annotation. Reference the url or item id in this annotation type
|
|
7
|
+
"""
|
|
8
|
+
type = "ref_image"
|
|
9
|
+
|
|
10
|
+
def __init__(self, ref, ref_type=None, mimetype=None, label='ref-image', attributes=None, description=None):
|
|
11
|
+
|
|
12
|
+
"""
|
|
13
|
+
Create an image annotation. Used for generative model and any other algorithm where and image is the output
|
|
14
|
+
|
|
15
|
+
For type 'id', need to upload the image as item in the platform and reference the item id in the annotation.
|
|
16
|
+
For type 'url', mimetype must be provided to load the ref correctly in the platform
|
|
17
|
+
|
|
18
|
+
:param str ref: the reference to the image annotation, represented by an ‘itemId’ or ‘url’
|
|
19
|
+
:param str ref_type: one of ‘id’ | ‘url’
|
|
20
|
+
:param str mimetype: optional. in case the refType is URL, e.g. image/jpeg, video/mpeg
|
|
21
|
+
:param label: annotation label
|
|
22
|
+
:param attributes: annotation attributes
|
|
23
|
+
:param description:
|
|
24
|
+
|
|
25
|
+
:return:
|
|
26
|
+
"""
|
|
27
|
+
super().__init__(description=description, attributes=attributes)
|
|
28
|
+
if ref_type is None:
|
|
29
|
+
if ref.startswith('http'):
|
|
30
|
+
ref_type = 'url'
|
|
31
|
+
else:
|
|
32
|
+
ref_type = 'id'
|
|
33
|
+
self.ref = ref
|
|
34
|
+
self.ref_type = ref_type
|
|
35
|
+
self.mimetype = mimetype
|
|
36
|
+
self.label = label
|
|
37
|
+
|
|
38
|
+
@property
|
|
39
|
+
def x(self):
|
|
40
|
+
return 0
|
|
41
|
+
|
|
42
|
+
@property
|
|
43
|
+
def y(self):
|
|
44
|
+
return 0
|
|
45
|
+
|
|
46
|
+
@property
|
|
47
|
+
def geo(self):
|
|
48
|
+
return list()
|
|
49
|
+
|
|
50
|
+
def show(self, image, thickness, with_text, height, width, annotation_format, color, alpha=1):
|
|
51
|
+
"""
|
|
52
|
+
Show annotation as ndarray
|
|
53
|
+
:param image: empty or image to draw on
|
|
54
|
+
:param thickness:
|
|
55
|
+
:param with_text: not required
|
|
56
|
+
:param height: item height
|
|
57
|
+
:param width: item width
|
|
58
|
+
:param annotation_format: options: list(dl.ViewAnnotationOptions)
|
|
59
|
+
:param color: color
|
|
60
|
+
:param alpha: opacity value [0 1], default 1
|
|
61
|
+
:return: ndarray
|
|
62
|
+
"""
|
|
63
|
+
# TODO over or show the image annotations
|
|
64
|
+
return self.add_text_to_image(image=image, annotation=self)
|
|
65
|
+
|
|
66
|
+
def to_coordinates(self, color):
|
|
67
|
+
coordinates = {
|
|
68
|
+
"ref": self.ref,
|
|
69
|
+
"refType": self.ref_type,
|
|
70
|
+
"mimetype": self.mimetype,
|
|
71
|
+
}
|
|
72
|
+
return coordinates
|
|
73
|
+
|
|
74
|
+
@classmethod
|
|
75
|
+
def from_json(cls, _json):
|
|
76
|
+
coordinates = _json["coordinates"]
|
|
77
|
+
ref = coordinates.get('ref')
|
|
78
|
+
ref_type = coordinates.get('refType')
|
|
79
|
+
mimetype = coordinates.get('mimetype')
|
|
80
|
+
return cls(
|
|
81
|
+
ref=ref,
|
|
82
|
+
ref_type=ref_type,
|
|
83
|
+
mimetype=mimetype,
|
|
84
|
+
label=_json["label"],
|
|
85
|
+
attributes=_json.get("attributes", None),
|
|
86
|
+
)
|
dtlpy/entities/command.py
CHANGED
|
@@ -141,7 +141,7 @@ class Command(entities.BaseEntity):
|
|
|
141
141
|
entities.CommandsStatus.FINALIZING,
|
|
142
142
|
entities.CommandsStatus.IN_PROGRESS]
|
|
143
143
|
|
|
144
|
-
def wait(self, timeout=0, step=None, backoff_factor=
|
|
144
|
+
def wait(self, timeout=0, step=None, backoff_factor=1):
|
|
145
145
|
"""
|
|
146
146
|
Wait for Command to finish
|
|
147
147
|
|
dtlpy/entities/dataset.py
CHANGED
|
@@ -244,9 +244,8 @@ class Dataset(entities.BaseEntity):
|
|
|
244
244
|
|
|
245
245
|
@readonly.setter
|
|
246
246
|
def readonly(self, state):
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
message='Cannot set attribute readonly. Please use "set_readonly({})" method'.format(state))
|
|
247
|
+
import warnings
|
|
248
|
+
warnings.warn("`readonly` flag on dataset is deprecated, doing nothing.", DeprecationWarning)
|
|
250
249
|
|
|
251
250
|
@property
|
|
252
251
|
def labels_flat_dict(self):
|
|
@@ -518,11 +517,8 @@ class Dataset(entities.BaseEntity):
|
|
|
518
517
|
|
|
519
518
|
dataset.set_readonly(state=True)
|
|
520
519
|
"""
|
|
521
|
-
|
|
522
|
-
|
|
523
|
-
error='400',
|
|
524
|
-
message='Argument "state" must be bool. input type: {}'.format(type(state)))
|
|
525
|
-
return self.datasets.set_readonly(dataset=self, state=state)
|
|
520
|
+
import warnings
|
|
521
|
+
warnings.warn("`readonly` flag on dataset is deprecated, doing nothing.", DeprecationWarning)
|
|
526
522
|
|
|
527
523
|
def clone(self,
|
|
528
524
|
clone_name=None,
|
dtlpy/entities/feature_set.py
CHANGED
|
@@ -23,7 +23,6 @@ class FeatureSet(entities.BaseEntity):
|
|
|
23
23
|
# platform
|
|
24
24
|
id = attr.ib()
|
|
25
25
|
name = attr.ib()
|
|
26
|
-
tags = attr.ib()
|
|
27
26
|
url = attr.ib(repr=False)
|
|
28
27
|
creator = attr.ib(repr=False)
|
|
29
28
|
created_at = attr.ib()
|
|
@@ -104,7 +103,6 @@ class FeatureSet(entities.BaseEntity):
|
|
|
104
103
|
created_at=_json.get('createdAt', None),
|
|
105
104
|
creator=_json.get('creator', None),
|
|
106
105
|
updated_by=_json.get('updatedBy', None),
|
|
107
|
-
tags=_json.get('tags', None),
|
|
108
106
|
client_api=client_api,
|
|
109
107
|
org_id=_json.get('org', None),
|
|
110
108
|
)
|
|
@@ -127,7 +125,6 @@ class FeatureSet(entities.BaseEntity):
|
|
|
127
125
|
'createdAt': self.created_at,
|
|
128
126
|
'updatedBy': self.updated_by,
|
|
129
127
|
'name': self.name,
|
|
130
|
-
'tags': self.tags,
|
|
131
128
|
'size': self.size,
|
|
132
129
|
'url': self.url}
|
|
133
130
|
if self.org_id is not None:
|
dtlpy/entities/filters.py
CHANGED
|
@@ -33,7 +33,7 @@ class FiltersResource(str, Enum):
|
|
|
33
33
|
MODEL = "models"
|
|
34
34
|
WEBHOOK = "webhooks"
|
|
35
35
|
RECIPE = 'recipe'
|
|
36
|
-
DATASET = '
|
|
36
|
+
DATASET = 'datasets'
|
|
37
37
|
ONTOLOGY = 'ontology'
|
|
38
38
|
TASK = 'tasks'
|
|
39
39
|
PIPELINE = 'pipeline'
|
|
@@ -113,6 +113,7 @@ class Filters:
|
|
|
113
113
|
self._ref_op = None
|
|
114
114
|
self._ref_assignment_id = None
|
|
115
115
|
self._ref_task_id = None
|
|
116
|
+
self._system_space = None
|
|
116
117
|
|
|
117
118
|
self._use_defaults = use_defaults
|
|
118
119
|
self.__add_defaults()
|
|
@@ -146,6 +147,14 @@ class Filters:
|
|
|
146
147
|
self.reset()
|
|
147
148
|
self.__add_defaults()
|
|
148
149
|
|
|
150
|
+
@property
|
|
151
|
+
def system_space(self):
|
|
152
|
+
return self._system_space
|
|
153
|
+
|
|
154
|
+
@system_space.setter
|
|
155
|
+
def system_space(self, val: bool):
|
|
156
|
+
self._system_space = val
|
|
157
|
+
|
|
149
158
|
def reset(self):
|
|
150
159
|
self.or_filter_list = list()
|
|
151
160
|
self.and_filter_list = list()
|
|
@@ -443,7 +452,8 @@ class Filters:
|
|
|
443
452
|
|
|
444
453
|
if self.context is not None:
|
|
445
454
|
_json['context'] = self.context
|
|
446
|
-
|
|
455
|
+
if self._system_space is not None:
|
|
456
|
+
_json['systemSpace'] = self._system_space
|
|
447
457
|
return _json
|
|
448
458
|
|
|
449
459
|
def sort_by(self, field, value: FiltersOrderByDirection = FiltersOrderByDirection.ASCENDING):
|
dtlpy/entities/item.py
CHANGED
dtlpy/entities/model.py
CHANGED
|
@@ -105,6 +105,7 @@ class Model(entities.BaseEntity):
|
|
|
105
105
|
_client_api = attr.ib(type=ApiClient, repr=False)
|
|
106
106
|
_repositories = attr.ib(repr=False)
|
|
107
107
|
_ontology = attr.ib(repr=False, default=None)
|
|
108
|
+
updated_by = attr.ib(default=None)
|
|
108
109
|
|
|
109
110
|
@staticmethod
|
|
110
111
|
def _protected_from_json(_json, client_api, project, package, is_fetched=True):
|
|
@@ -184,7 +185,8 @@ class Model(entities.BaseEntity):
|
|
|
184
185
|
context=_json.get('context', {}),
|
|
185
186
|
input_type=_json.get('inputType', None),
|
|
186
187
|
output_type=_json.get('outputType', None),
|
|
187
|
-
module_name
|
|
188
|
+
module_name=_json.get('moduleName', None),
|
|
189
|
+
updated_by=_json.get('updatedBy', None)
|
|
188
190
|
)
|
|
189
191
|
inst.is_fetched = is_fetched
|
|
190
192
|
return inst
|
|
@@ -212,6 +214,7 @@ class Model(entities.BaseEntity):
|
|
|
212
214
|
attr.fields(Model).updated_at,
|
|
213
215
|
attr.fields(Model).input_type,
|
|
214
216
|
attr.fields(Model).output_type,
|
|
217
|
+
attr.fields(Model).updated_by
|
|
215
218
|
))
|
|
216
219
|
_json['packageId'] = self.package_id
|
|
217
220
|
_json['datasetId'] = self.dataset_id
|
|
@@ -230,6 +233,10 @@ class Model(entities.BaseEntity):
|
|
|
230
233
|
artifact = artifact.to_json(as_artifact=True)
|
|
231
234
|
model_artifacts.append(artifact)
|
|
232
235
|
_json['artifacts'] = model_artifacts
|
|
236
|
+
|
|
237
|
+
if self.updated_by:
|
|
238
|
+
_json['updatedBy'] = self.updated_by
|
|
239
|
+
|
|
233
240
|
return _json
|
|
234
241
|
|
|
235
242
|
############
|
|
@@ -307,7 +314,9 @@ class Model(entities.BaseEntity):
|
|
|
307
314
|
dpks=repositories.Dpks(client_api=self._client_api),
|
|
308
315
|
services=repositories.Services(client_api=self._client_api,
|
|
309
316
|
project=self._project,
|
|
310
|
-
project_id=self.project_id
|
|
317
|
+
project_id=self.project_id,
|
|
318
|
+
model_id=self.id,
|
|
319
|
+
model=self),
|
|
311
320
|
)
|
|
312
321
|
return r
|
|
313
322
|
|
|
@@ -391,6 +400,46 @@ class Model(entities.BaseEntity):
|
|
|
391
400
|
###########
|
|
392
401
|
# methods #
|
|
393
402
|
###########
|
|
403
|
+
|
|
404
|
+
def add_subset(self, subset_name: str, subset_filter: entities.Filters):
|
|
405
|
+
"""
|
|
406
|
+
Adds a subset for the model, specifying a subset of the model's dataset that could be used for training or
|
|
407
|
+
validation.
|
|
408
|
+
|
|
409
|
+
:param str subset_name: the name of the subset
|
|
410
|
+
:param dtlpy.entities.Filters subset_filter: the filtering operation that this subset performs in the dataset.
|
|
411
|
+
|
|
412
|
+
**Example**
|
|
413
|
+
|
|
414
|
+
.. code-block:: python
|
|
415
|
+
|
|
416
|
+
model.add_subset(subset_name='train', subset_filter=dtlpy.Filters(field='dir', values='/train'))
|
|
417
|
+
model.metadata['system']['subsets']
|
|
418
|
+
{'train': <dtlpy.entities.filters.Filters object at 0x1501dfe20>}
|
|
419
|
+
|
|
420
|
+
"""
|
|
421
|
+
self.models.add_subset(self, subset_name, subset_filter)
|
|
422
|
+
|
|
423
|
+
def delete_subset(self, subset_name: str):
|
|
424
|
+
"""
|
|
425
|
+
Removes a subset from the model's metadata.
|
|
426
|
+
|
|
427
|
+
:param str subset_name: the name of the subset
|
|
428
|
+
|
|
429
|
+
**Example**
|
|
430
|
+
|
|
431
|
+
.. code-block:: python
|
|
432
|
+
|
|
433
|
+
model.add_subset(subset_name='train', subset_filter=dtlpy.Filters(field='dir', values='/train'))
|
|
434
|
+
model.metadata['system']['subsets']
|
|
435
|
+
{'train': <dtlpy.entities.filters.Filters object at 0x1501dfe20>}
|
|
436
|
+
models.delete_subset(subset_name='train')
|
|
437
|
+
metadata['system']['subsets']
|
|
438
|
+
{}
|
|
439
|
+
|
|
440
|
+
"""
|
|
441
|
+
self.models.delete_subset(self, subset_name)
|
|
442
|
+
|
|
394
443
|
def update(self, system_metadata=False):
|
|
395
444
|
"""
|
|
396
445
|
Update Models changes to platform
|
dtlpy/entities/node.py
CHANGED
|
@@ -94,7 +94,8 @@ class PipelineNodeIO:
|
|
|
94
94
|
action: str = None,
|
|
95
95
|
default_value=None,
|
|
96
96
|
variable_name: str = None,
|
|
97
|
-
actions: list = None
|
|
97
|
+
actions: list = None,
|
|
98
|
+
description: str = None):
|
|
98
99
|
"""
|
|
99
100
|
Pipeline Node
|
|
100
101
|
|
|
@@ -116,6 +117,7 @@ class PipelineNodeIO:
|
|
|
116
117
|
self.port_percentage = port_percentage
|
|
117
118
|
self.default_value = default_value
|
|
118
119
|
self.variable_name = variable_name
|
|
120
|
+
self.description = description
|
|
119
121
|
|
|
120
122
|
if action is not None:
|
|
121
123
|
warnings.warn('action param has been deprecated in version 1.80', DeprecationWarning)
|
|
@@ -141,6 +143,7 @@ class PipelineNodeIO:
|
|
|
141
143
|
default_value=_json.get('defaultValue', None),
|
|
142
144
|
variable_name=_json.get('variableName', None),
|
|
143
145
|
actions=_json.get('actions', None),
|
|
146
|
+
description=_json.get('description', None),
|
|
144
147
|
)
|
|
145
148
|
|
|
146
149
|
def to_json(self):
|
|
@@ -158,7 +161,8 @@ class PipelineNodeIO:
|
|
|
158
161
|
_json['actions'] = self.actions
|
|
159
162
|
if self.default_value:
|
|
160
163
|
_json['defaultValue'] = self.default_value
|
|
161
|
-
|
|
164
|
+
if self.description:
|
|
165
|
+
_json['description'] = self.description
|
|
162
166
|
return _json
|
|
163
167
|
|
|
164
168
|
|
|
@@ -506,9 +510,9 @@ class CodeNode(PipelineNode):
|
|
|
506
510
|
:param list inputs: list of PipelineNodeIO inputs
|
|
507
511
|
:param tuple position: tuple of the node place
|
|
508
512
|
"""
|
|
509
|
-
if
|
|
513
|
+
if inputs is None:
|
|
510
514
|
inputs = [self._default_io()]
|
|
511
|
-
if
|
|
515
|
+
if outputs is None:
|
|
512
516
|
outputs = [self._default_io()]
|
|
513
517
|
|
|
514
518
|
if method is None or not isinstance(method, Callable):
|
|
@@ -857,7 +861,12 @@ class FunctionNode(PipelineNode):
|
|
|
857
861
|
inputs = []
|
|
858
862
|
outputs = []
|
|
859
863
|
package = self.service.package
|
|
860
|
-
|
|
864
|
+
modules = []
|
|
865
|
+
if isinstance(package, entities.Package):
|
|
866
|
+
modules = package.modules
|
|
867
|
+
elif isinstance(package, entities.Dpk):
|
|
868
|
+
modules = package.components.modules
|
|
869
|
+
for model in modules:
|
|
861
870
|
if model.name == self.service.module_name:
|
|
862
871
|
for func in model.functions:
|
|
863
872
|
if func.name == function_name:
|
dtlpy/entities/ontology.py
CHANGED
|
@@ -396,9 +396,9 @@ class Ontology(entities.BaseEntity):
|
|
|
396
396
|
def _add_image_label(self, icon_path):
|
|
397
397
|
display_data = dict()
|
|
398
398
|
if self.project is not None:
|
|
399
|
-
dataset = self.project.datasets.
|
|
399
|
+
dataset = self.project.datasets._get_binaries_dataset()
|
|
400
400
|
elif self.dataset is not None:
|
|
401
|
-
dataset = self.dataset.project.datasets.
|
|
401
|
+
dataset = self.dataset.project.datasets._get_binaries_dataset()
|
|
402
402
|
else:
|
|
403
403
|
raise ValueError('must have project or dataset to create with icon path')
|
|
404
404
|
platform_path = "/.dataloop/ontologies/{}/labelDisplayImages/".format(self.id)
|
|
@@ -95,6 +95,7 @@ class FunctionIO(entities.DlEntity):
|
|
|
95
95
|
value = entities.DlProperty(location=['value'], _type=str)
|
|
96
96
|
name = entities.DlProperty(location=['name'], _type=str)
|
|
97
97
|
actions = entities.DlProperty(location=['actions'], _type=list)
|
|
98
|
+
description = entities.DlProperty(location=['description'], _type=str)
|
|
98
99
|
|
|
99
100
|
def __repr__(self):
|
|
100
101
|
# TODO need to move to DlEntity
|
|
@@ -164,6 +165,8 @@ class FunctionIO(entities.DlEntity):
|
|
|
164
165
|
}
|
|
165
166
|
if self.actions:
|
|
166
167
|
_json['actions'] = self.actions
|
|
168
|
+
if self.description:
|
|
169
|
+
_json['description'] = self.description
|
|
167
170
|
elif resource in ['execution', 'service']:
|
|
168
171
|
_json = {
|
|
169
172
|
self.name: self.value
|
dtlpy/entities/pipeline.py
CHANGED
|
@@ -8,6 +8,8 @@ from .node import PipelineNode, PipelineConnection, TaskNode, CodeNode, Function
|
|
|
8
8
|
from .. import repositories, entities
|
|
9
9
|
from ..services.api_client import ApiClient
|
|
10
10
|
from .package_function import PackageInputType
|
|
11
|
+
import copy
|
|
12
|
+
|
|
11
13
|
logger = logging.getLogger(name='dtlpy')
|
|
12
14
|
|
|
13
15
|
|
|
@@ -227,6 +229,8 @@ class Pipeline(entities.BaseEntity):
|
|
|
227
229
|
_original_variables = attr.ib(repr=False, type=List[Variable])
|
|
228
230
|
_repositories = attr.ib(repr=False)
|
|
229
231
|
|
|
232
|
+
updated_by = attr.ib(default=None)
|
|
233
|
+
|
|
230
234
|
@staticmethod
|
|
231
235
|
def _protected_from_json(_json, client_api, project, is_fetched=True):
|
|
232
236
|
"""
|
|
@@ -270,7 +274,8 @@ class Pipeline(entities.BaseEntity):
|
|
|
270
274
|
json_variables = _json.get('variables', None) or list()
|
|
271
275
|
variables = list()
|
|
272
276
|
if json_variables:
|
|
273
|
-
|
|
277
|
+
copy_json_variables = copy.deepcopy(json_variables)
|
|
278
|
+
variables = [Variable.from_json(_json=v) for v in copy_json_variables]
|
|
274
279
|
|
|
275
280
|
settings = PipelineSettings.from_json(_json=_json.get('settings', dict()))
|
|
276
281
|
inst = cls(
|
|
@@ -295,6 +300,7 @@ class Pipeline(entities.BaseEntity):
|
|
|
295
300
|
status=_json.get('status', None),
|
|
296
301
|
original_settings=settings,
|
|
297
302
|
original_variables=json_variables,
|
|
303
|
+
updated_by=_json.get('updatedBy', None),
|
|
298
304
|
)
|
|
299
305
|
for node in _json.get('nodes', list()):
|
|
300
306
|
inst.nodes.add(node=cls.pipeline_node(node))
|
|
@@ -349,7 +355,8 @@ class Pipeline(entities.BaseEntity):
|
|
|
349
355
|
attr.fields(Pipeline).settings,
|
|
350
356
|
attr.fields(Pipeline).variables,
|
|
351
357
|
attr.fields(Pipeline)._original_settings,
|
|
352
|
-
attr.fields(Pipeline)._original_variables
|
|
358
|
+
attr.fields(Pipeline)._original_variables,
|
|
359
|
+
attr.fields(Pipeline).updated_by,
|
|
353
360
|
))
|
|
354
361
|
|
|
355
362
|
_json['projectId'] = self.project_id
|
|
@@ -374,6 +381,8 @@ class Pipeline(entities.BaseEntity):
|
|
|
374
381
|
_json['description'] = self.description
|
|
375
382
|
if self.revisions is not None:
|
|
376
383
|
_json['revisions'] = self.revisions
|
|
384
|
+
if self.updated_by is not None:
|
|
385
|
+
_json['updatedBy'] = self.updated_by
|
|
377
386
|
|
|
378
387
|
return _json
|
|
379
388
|
|
dtlpy/entities/recipe.py
CHANGED
|
@@ -252,7 +252,7 @@ class Recipe(entities.BaseEntity):
|
|
|
252
252
|
message='file Must be pdf')
|
|
253
253
|
for project_id in self.project_ids:
|
|
254
254
|
project = repositories.Projects(client_api=self._client_api).get(project_id=project_id)
|
|
255
|
-
dataset = project.datasets.
|
|
255
|
+
dataset = project.datasets._get_binaries_dataset()
|
|
256
256
|
remote_path = '/.dataloop/recipes/{}/instructions'.format(self.id)
|
|
257
257
|
instruction_item = dataset.items.upload(local_path=annotation_instruction_file,
|
|
258
258
|
remote_path=remote_path,
|