dtlpy 1.86.13__py3-none-any.whl → 1.87.18__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dtlpy/__init__.py +2 -2
- dtlpy/__version__.py +1 -1
- dtlpy/entities/annotation.py +10 -12
- dtlpy/entities/annotation_collection.py +0 -2
- dtlpy/entities/command.py +1 -1
- dtlpy/entities/dataset.py +4 -8
- dtlpy/entities/filters.py +12 -2
- dtlpy/entities/model.py +11 -2
- dtlpy/entities/node.py +6 -2
- dtlpy/entities/ontology.py +2 -2
- dtlpy/entities/package_function.py +3 -0
- dtlpy/entities/pipeline.py +7 -1
- dtlpy/entities/recipe.py +1 -1
- dtlpy/entities/service.py +22 -11
- dtlpy/entities/task.py +18 -1
- dtlpy/entities/trigger.py +7 -1
- dtlpy/ml/base_model_adapter.py +8 -10
- dtlpy/ml/train_utils.py +0 -1
- dtlpy/new_instance.py +5 -3
- dtlpy/repositories/artifacts.py +9 -15
- dtlpy/repositories/codebases.py +2 -14
- dtlpy/repositories/commands.py +6 -7
- dtlpy/repositories/datasets.py +73 -43
- dtlpy/repositories/downloader.py +1 -1
- dtlpy/repositories/models.py +7 -21
- dtlpy/repositories/packages.py +5 -4
- dtlpy/repositories/services.py +32 -5
- dtlpy/repositories/tasks.py +1 -1
- dtlpy/repositories/uploader.py +1 -1
- dtlpy/services/api_client.py +1 -1
- {dtlpy-1.86.13.dist-info → dtlpy-1.87.18.dist-info}/METADATA +1 -1
- {dtlpy-1.86.13.dist-info → dtlpy-1.87.18.dist-info}/RECORD +39 -39
- {dtlpy-1.86.13.data → dtlpy-1.87.18.data}/scripts/dlp +0 -0
- {dtlpy-1.86.13.data → dtlpy-1.87.18.data}/scripts/dlp.bat +0 -0
- {dtlpy-1.86.13.data → dtlpy-1.87.18.data}/scripts/dlp.py +0 -0
- {dtlpy-1.86.13.dist-info → dtlpy-1.87.18.dist-info}/LICENSE +0 -0
- {dtlpy-1.86.13.dist-info → dtlpy-1.87.18.dist-info}/WHEEL +0 -0
- {dtlpy-1.86.13.dist-info → dtlpy-1.87.18.dist-info}/entry_points.txt +0 -0
- {dtlpy-1.86.13.dist-info → dtlpy-1.87.18.dist-info}/top_level.txt +0 -0
dtlpy/__init__.py
CHANGED
|
@@ -313,14 +313,14 @@ INSTANCE_CATALOG_REGULAR_XS = InstanceCatalog.REGULAR_XS
|
|
|
313
313
|
INSTANCE_CATALOG_REGULAR_S = InstanceCatalog.REGULAR_S
|
|
314
314
|
INSTANCE_CATALOG_REGULAR_M = InstanceCatalog.REGULAR_M
|
|
315
315
|
INSTANCE_CATALOG_REGULAR_L = InstanceCatalog.REGULAR_L
|
|
316
|
-
INSTANCE_CATALOG_REGULAR_XL = InstanceCatalog.REGULAR_XL
|
|
317
316
|
INSTANCE_CATALOG_HIGHMEM_XS = InstanceCatalog.HIGHMEM_XS
|
|
318
317
|
INSTANCE_CATALOG_HIGHMEM_S = InstanceCatalog.HIGHMEM_S
|
|
319
318
|
INSTANCE_CATALOG_HIGHMEM_M = InstanceCatalog.HIGHMEM_M
|
|
320
319
|
INSTANCE_CATALOG_HIGHMEM_L = InstanceCatalog.HIGHMEM_L
|
|
321
|
-
INSTANCE_CATALOG_HIGHMEM_XL = InstanceCatalog.HIGHMEM_XL
|
|
322
320
|
INSTANCE_CATALOG_GPU_K80_S = InstanceCatalog.GPU_K80_S
|
|
323
321
|
INSTANCE_CATALOG_GPU_K80_M = InstanceCatalog.GPU_K80_M
|
|
322
|
+
INSTANCE_CATALOG_GPU_T4_S = InstanceCatalog.GPU_T4_S
|
|
323
|
+
INSTANCE_CATALOG_GPU_T4_M = InstanceCatalog.GPU_T4_M
|
|
324
324
|
|
|
325
325
|
MODALITY_TYPE_OVERLAY = ModalityTypeEnum.OVERLAY
|
|
326
326
|
MODALITY_TYPE_PREVIEW = ModalityTypeEnum.PREVIEW
|
dtlpy/__version__.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
version = '1.
|
|
1
|
+
version = '1.87.18'
|
dtlpy/entities/annotation.py
CHANGED
|
@@ -235,9 +235,7 @@ class Annotation(entities.BaseEntity):
|
|
|
235
235
|
def coordinates(self):
|
|
236
236
|
color = None
|
|
237
237
|
if self.type in ['binary']:
|
|
238
|
-
color = self.
|
|
239
|
-
if color is None:
|
|
240
|
-
color = self.color
|
|
238
|
+
color = self.color
|
|
241
239
|
coordinates = self.annotation_definition.to_coordinates(color=color)
|
|
242
240
|
return coordinates
|
|
243
241
|
|
|
@@ -446,16 +444,16 @@ class Annotation(entities.BaseEntity):
|
|
|
446
444
|
@property
|
|
447
445
|
def color(self):
|
|
448
446
|
# if "dataset" is not in self - this will always get the dataset
|
|
449
|
-
|
|
450
|
-
|
|
447
|
+
try:
|
|
448
|
+
colors = self.dataset._get_ontology().color_map
|
|
449
|
+
except (exceptions.BadRequest, exceptions.NotFound):
|
|
450
|
+
colors = None
|
|
451
|
+
logger.warning('Cant get dataset for annotation color. using default.')
|
|
452
|
+
if colors is not None and self.label in colors:
|
|
453
|
+
color = colors[self.label]
|
|
451
454
|
else:
|
|
452
|
-
|
|
453
|
-
|
|
454
|
-
except (exceptions.BadRequest, exceptions.NotFound):
|
|
455
|
-
colors = None
|
|
456
|
-
logger.warning('Cant get dataset for annotation color. using default.')
|
|
457
|
-
if colors is not None and self.label in colors:
|
|
458
|
-
color = colors[self.label]
|
|
455
|
+
if self.type == 'binary' and self.annotation_definition._color is not None:
|
|
456
|
+
color = self.annotation_definition._color
|
|
459
457
|
else:
|
|
460
458
|
color = (255, 255, 255)
|
|
461
459
|
return color
|
|
@@ -94,8 +94,6 @@ class AnnotationCollection(entities.BaseEntity):
|
|
|
94
94
|
'name': model_info.get('name'),
|
|
95
95
|
'model_id': model_info.get('model_id'),
|
|
96
96
|
}
|
|
97
|
-
metadata['user']['annotation_type'] = 'prediction'
|
|
98
|
-
|
|
99
97
|
if prompt_id is not None:
|
|
100
98
|
if metadata is None:
|
|
101
99
|
metadata = dict()
|
dtlpy/entities/command.py
CHANGED
|
@@ -141,7 +141,7 @@ class Command(entities.BaseEntity):
|
|
|
141
141
|
entities.CommandsStatus.FINALIZING,
|
|
142
142
|
entities.CommandsStatus.IN_PROGRESS]
|
|
143
143
|
|
|
144
|
-
def wait(self, timeout=0, step=None, backoff_factor=
|
|
144
|
+
def wait(self, timeout=0, step=None, backoff_factor=1):
|
|
145
145
|
"""
|
|
146
146
|
Wait for Command to finish
|
|
147
147
|
|
dtlpy/entities/dataset.py
CHANGED
|
@@ -244,9 +244,8 @@ class Dataset(entities.BaseEntity):
|
|
|
244
244
|
|
|
245
245
|
@readonly.setter
|
|
246
246
|
def readonly(self, state):
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
message='Cannot set attribute readonly. Please use "set_readonly({})" method'.format(state))
|
|
247
|
+
import warnings
|
|
248
|
+
warnings.warn("`readonly` flag on dataset is deprecated, doing nothing.", DeprecationWarning)
|
|
250
249
|
|
|
251
250
|
@property
|
|
252
251
|
def labels_flat_dict(self):
|
|
@@ -518,11 +517,8 @@ class Dataset(entities.BaseEntity):
|
|
|
518
517
|
|
|
519
518
|
dataset.set_readonly(state=True)
|
|
520
519
|
"""
|
|
521
|
-
|
|
522
|
-
|
|
523
|
-
error='400',
|
|
524
|
-
message='Argument "state" must be bool. input type: {}'.format(type(state)))
|
|
525
|
-
return self.datasets.set_readonly(dataset=self, state=state)
|
|
520
|
+
import warnings
|
|
521
|
+
warnings.warn("`readonly` flag on dataset is deprecated, doing nothing.", DeprecationWarning)
|
|
526
522
|
|
|
527
523
|
def clone(self,
|
|
528
524
|
clone_name=None,
|
dtlpy/entities/filters.py
CHANGED
|
@@ -33,7 +33,7 @@ class FiltersResource(str, Enum):
|
|
|
33
33
|
MODEL = "models"
|
|
34
34
|
WEBHOOK = "webhooks"
|
|
35
35
|
RECIPE = 'recipe'
|
|
36
|
-
DATASET = '
|
|
36
|
+
DATASET = 'datasets'
|
|
37
37
|
ONTOLOGY = 'ontology'
|
|
38
38
|
TASK = 'tasks'
|
|
39
39
|
PIPELINE = 'pipeline'
|
|
@@ -113,6 +113,7 @@ class Filters:
|
|
|
113
113
|
self._ref_op = None
|
|
114
114
|
self._ref_assignment_id = None
|
|
115
115
|
self._ref_task_id = None
|
|
116
|
+
self._system_space = None
|
|
116
117
|
|
|
117
118
|
self._use_defaults = use_defaults
|
|
118
119
|
self.__add_defaults()
|
|
@@ -146,6 +147,14 @@ class Filters:
|
|
|
146
147
|
self.reset()
|
|
147
148
|
self.__add_defaults()
|
|
148
149
|
|
|
150
|
+
@property
|
|
151
|
+
def system_space(self):
|
|
152
|
+
return self._system_space
|
|
153
|
+
|
|
154
|
+
@system_space.setter
|
|
155
|
+
def system_space(self, val: bool):
|
|
156
|
+
self._system_space = val
|
|
157
|
+
|
|
149
158
|
def reset(self):
|
|
150
159
|
self.or_filter_list = list()
|
|
151
160
|
self.and_filter_list = list()
|
|
@@ -443,7 +452,8 @@ class Filters:
|
|
|
443
452
|
|
|
444
453
|
if self.context is not None:
|
|
445
454
|
_json['context'] = self.context
|
|
446
|
-
|
|
455
|
+
if self._system_space is not None:
|
|
456
|
+
_json['systemSpace'] = self._system_space
|
|
447
457
|
return _json
|
|
448
458
|
|
|
449
459
|
def sort_by(self, field, value: FiltersOrderByDirection = FiltersOrderByDirection.ASCENDING):
|
dtlpy/entities/model.py
CHANGED
|
@@ -105,6 +105,7 @@ class Model(entities.BaseEntity):
|
|
|
105
105
|
_client_api = attr.ib(type=ApiClient, repr=False)
|
|
106
106
|
_repositories = attr.ib(repr=False)
|
|
107
107
|
_ontology = attr.ib(repr=False, default=None)
|
|
108
|
+
updated_by = attr.ib(default=None)
|
|
108
109
|
|
|
109
110
|
@staticmethod
|
|
110
111
|
def _protected_from_json(_json, client_api, project, package, is_fetched=True):
|
|
@@ -184,7 +185,8 @@ class Model(entities.BaseEntity):
|
|
|
184
185
|
context=_json.get('context', {}),
|
|
185
186
|
input_type=_json.get('inputType', None),
|
|
186
187
|
output_type=_json.get('outputType', None),
|
|
187
|
-
module_name
|
|
188
|
+
module_name=_json.get('moduleName', None),
|
|
189
|
+
updated_by=_json.get('updatedBy', None)
|
|
188
190
|
)
|
|
189
191
|
inst.is_fetched = is_fetched
|
|
190
192
|
return inst
|
|
@@ -212,6 +214,7 @@ class Model(entities.BaseEntity):
|
|
|
212
214
|
attr.fields(Model).updated_at,
|
|
213
215
|
attr.fields(Model).input_type,
|
|
214
216
|
attr.fields(Model).output_type,
|
|
217
|
+
attr.fields(Model).updated_by
|
|
215
218
|
))
|
|
216
219
|
_json['packageId'] = self.package_id
|
|
217
220
|
_json['datasetId'] = self.dataset_id
|
|
@@ -230,6 +233,10 @@ class Model(entities.BaseEntity):
|
|
|
230
233
|
artifact = artifact.to_json(as_artifact=True)
|
|
231
234
|
model_artifacts.append(artifact)
|
|
232
235
|
_json['artifacts'] = model_artifacts
|
|
236
|
+
|
|
237
|
+
if self.updated_by:
|
|
238
|
+
_json['updatedBy'] = self.updated_by
|
|
239
|
+
|
|
233
240
|
return _json
|
|
234
241
|
|
|
235
242
|
############
|
|
@@ -307,7 +314,9 @@ class Model(entities.BaseEntity):
|
|
|
307
314
|
dpks=repositories.Dpks(client_api=self._client_api),
|
|
308
315
|
services=repositories.Services(client_api=self._client_api,
|
|
309
316
|
project=self._project,
|
|
310
|
-
project_id=self.project_id
|
|
317
|
+
project_id=self.project_id,
|
|
318
|
+
model_id=self.id,
|
|
319
|
+
model=self),
|
|
311
320
|
)
|
|
312
321
|
return r
|
|
313
322
|
|
dtlpy/entities/node.py
CHANGED
|
@@ -94,7 +94,8 @@ class PipelineNodeIO:
|
|
|
94
94
|
action: str = None,
|
|
95
95
|
default_value=None,
|
|
96
96
|
variable_name: str = None,
|
|
97
|
-
actions: list = None
|
|
97
|
+
actions: list = None,
|
|
98
|
+
description: str = None):
|
|
98
99
|
"""
|
|
99
100
|
Pipeline Node
|
|
100
101
|
|
|
@@ -116,6 +117,7 @@ class PipelineNodeIO:
|
|
|
116
117
|
self.port_percentage = port_percentage
|
|
117
118
|
self.default_value = default_value
|
|
118
119
|
self.variable_name = variable_name
|
|
120
|
+
self.description = description
|
|
119
121
|
|
|
120
122
|
if action is not None:
|
|
121
123
|
warnings.warn('action param has been deprecated in version 1.80', DeprecationWarning)
|
|
@@ -141,6 +143,7 @@ class PipelineNodeIO:
|
|
|
141
143
|
default_value=_json.get('defaultValue', None),
|
|
142
144
|
variable_name=_json.get('variableName', None),
|
|
143
145
|
actions=_json.get('actions', None),
|
|
146
|
+
description=_json.get('description', None),
|
|
144
147
|
)
|
|
145
148
|
|
|
146
149
|
def to_json(self):
|
|
@@ -158,7 +161,8 @@ class PipelineNodeIO:
|
|
|
158
161
|
_json['actions'] = self.actions
|
|
159
162
|
if self.default_value:
|
|
160
163
|
_json['defaultValue'] = self.default_value
|
|
161
|
-
|
|
164
|
+
if self.description:
|
|
165
|
+
_json['description'] = self.description
|
|
162
166
|
return _json
|
|
163
167
|
|
|
164
168
|
|
dtlpy/entities/ontology.py
CHANGED
|
@@ -396,9 +396,9 @@ class Ontology(entities.BaseEntity):
|
|
|
396
396
|
def _add_image_label(self, icon_path):
|
|
397
397
|
display_data = dict()
|
|
398
398
|
if self.project is not None:
|
|
399
|
-
dataset = self.project.datasets.
|
|
399
|
+
dataset = self.project.datasets._get_binaries_dataset()
|
|
400
400
|
elif self.dataset is not None:
|
|
401
|
-
dataset = self.dataset.project.datasets.
|
|
401
|
+
dataset = self.dataset.project.datasets._get_binaries_dataset()
|
|
402
402
|
else:
|
|
403
403
|
raise ValueError('must have project or dataset to create with icon path')
|
|
404
404
|
platform_path = "/.dataloop/ontologies/{}/labelDisplayImages/".format(self.id)
|
|
@@ -95,6 +95,7 @@ class FunctionIO(entities.DlEntity):
|
|
|
95
95
|
value = entities.DlProperty(location=['value'], _type=str)
|
|
96
96
|
name = entities.DlProperty(location=['name'], _type=str)
|
|
97
97
|
actions = entities.DlProperty(location=['actions'], _type=list)
|
|
98
|
+
description = entities.DlProperty(location=['description'], _type=str)
|
|
98
99
|
|
|
99
100
|
def __repr__(self):
|
|
100
101
|
# TODO need to move to DlEntity
|
|
@@ -164,6 +165,8 @@ class FunctionIO(entities.DlEntity):
|
|
|
164
165
|
}
|
|
165
166
|
if self.actions:
|
|
166
167
|
_json['actions'] = self.actions
|
|
168
|
+
if self.description:
|
|
169
|
+
_json['description'] = self.description
|
|
167
170
|
elif resource in ['execution', 'service']:
|
|
168
171
|
_json = {
|
|
169
172
|
self.name: self.value
|
dtlpy/entities/pipeline.py
CHANGED
|
@@ -229,6 +229,8 @@ class Pipeline(entities.BaseEntity):
|
|
|
229
229
|
_original_variables = attr.ib(repr=False, type=List[Variable])
|
|
230
230
|
_repositories = attr.ib(repr=False)
|
|
231
231
|
|
|
232
|
+
updated_by = attr.ib(default=None)
|
|
233
|
+
|
|
232
234
|
@staticmethod
|
|
233
235
|
def _protected_from_json(_json, client_api, project, is_fetched=True):
|
|
234
236
|
"""
|
|
@@ -298,6 +300,7 @@ class Pipeline(entities.BaseEntity):
|
|
|
298
300
|
status=_json.get('status', None),
|
|
299
301
|
original_settings=settings,
|
|
300
302
|
original_variables=json_variables,
|
|
303
|
+
updated_by=_json.get('updatedBy', None),
|
|
301
304
|
)
|
|
302
305
|
for node in _json.get('nodes', list()):
|
|
303
306
|
inst.nodes.add(node=cls.pipeline_node(node))
|
|
@@ -352,7 +355,8 @@ class Pipeline(entities.BaseEntity):
|
|
|
352
355
|
attr.fields(Pipeline).settings,
|
|
353
356
|
attr.fields(Pipeline).variables,
|
|
354
357
|
attr.fields(Pipeline)._original_settings,
|
|
355
|
-
attr.fields(Pipeline)._original_variables
|
|
358
|
+
attr.fields(Pipeline)._original_variables,
|
|
359
|
+
attr.fields(Pipeline).updated_by,
|
|
356
360
|
))
|
|
357
361
|
|
|
358
362
|
_json['projectId'] = self.project_id
|
|
@@ -377,6 +381,8 @@ class Pipeline(entities.BaseEntity):
|
|
|
377
381
|
_json['description'] = self.description
|
|
378
382
|
if self.revisions is not None:
|
|
379
383
|
_json['revisions'] = self.revisions
|
|
384
|
+
if self.updated_by is not None:
|
|
385
|
+
_json['updatedBy'] = self.updated_by
|
|
380
386
|
|
|
381
387
|
return _json
|
|
382
388
|
|
dtlpy/entities/recipe.py
CHANGED
|
@@ -252,7 +252,7 @@ class Recipe(entities.BaseEntity):
|
|
|
252
252
|
message='file Must be pdf')
|
|
253
253
|
for project_id in self.project_ids:
|
|
254
254
|
project = repositories.Projects(client_api=self._client_api).get(project_id=project_id)
|
|
255
|
-
dataset = project.datasets.
|
|
255
|
+
dataset = project.datasets._get_binaries_dataset()
|
|
256
256
|
remote_path = '/.dataloop/recipes/{}/instructions'.format(self.id)
|
|
257
257
|
instruction_item = dataset.items.upload(local_path=annotation_instruction_file,
|
|
258
258
|
remote_path=remote_path,
|
dtlpy/entities/service.py
CHANGED
|
@@ -80,8 +80,6 @@ class InstanceCatalog(str, Enum):
|
|
|
80
80
|
- regular pod with medium size
|
|
81
81
|
* - REGULAR_L
|
|
82
82
|
- regular pod with large size
|
|
83
|
-
* - REGULAR_XL
|
|
84
|
-
- regular pod with extra large size
|
|
85
83
|
* - HIGHMEM_XS
|
|
86
84
|
- highmem pod with extra small size
|
|
87
85
|
* - HIGHMEM_S
|
|
@@ -90,25 +88,27 @@ class InstanceCatalog(str, Enum):
|
|
|
90
88
|
- highmem pod with medium size
|
|
91
89
|
* - HIGHMEM_L
|
|
92
90
|
- highmem pod with large size
|
|
93
|
-
* - HIGHMEM_XL
|
|
94
|
-
- highmem pod with extra large size
|
|
95
91
|
* - GPU_K80_S
|
|
96
|
-
- GPU pod with small size
|
|
92
|
+
- GPU NVIDIA K80 pod with small size
|
|
97
93
|
* - GPU_K80_M
|
|
98
|
-
- GPU pod with medium size
|
|
94
|
+
- GPU NVIDIA K80 pod with medium size
|
|
95
|
+
* - GPU_T4_S
|
|
96
|
+
- GPU NVIDIA T4 pod with regular memory
|
|
97
|
+
* - GPU_T4_M
|
|
98
|
+
- GPU NVIDIA T4 pod with highmem
|
|
99
99
|
"""
|
|
100
100
|
REGULAR_XS = "regular-xs"
|
|
101
101
|
REGULAR_S = "regular-s"
|
|
102
102
|
REGULAR_M = "regular-m"
|
|
103
103
|
REGULAR_L = "regular-l"
|
|
104
|
-
REGULAR_XL = "regular-xl"
|
|
105
104
|
HIGHMEM_XS = "highmem-xs"
|
|
106
105
|
HIGHMEM_S = "highmem-s"
|
|
107
106
|
HIGHMEM_M = "highmem-m"
|
|
108
107
|
HIGHMEM_L = "highmem-l"
|
|
109
|
-
HIGHMEM_XL = "highmem-xl"
|
|
110
108
|
GPU_K80_S = "gpu-k80-s"
|
|
111
109
|
GPU_K80_M = "gpu-k80-m"
|
|
110
|
+
GPU_T4_S = "gpu-t4"
|
|
111
|
+
GPU_T4_M = "gpu-t4-m"
|
|
112
112
|
|
|
113
113
|
|
|
114
114
|
class RuntimeType(str, Enum):
|
|
@@ -223,6 +223,7 @@ class Service(entities.BaseEntity):
|
|
|
223
223
|
max_attempts = attr.ib()
|
|
224
224
|
mode = attr.ib(repr=False)
|
|
225
225
|
metadata = attr.ib()
|
|
226
|
+
archive = attr.ib(repr=False)
|
|
226
227
|
|
|
227
228
|
# SDK
|
|
228
229
|
_package = attr.ib(repr=False)
|
|
@@ -231,6 +232,7 @@ class Service(entities.BaseEntity):
|
|
|
231
232
|
# repositories
|
|
232
233
|
_project = attr.ib(default=None, repr=False)
|
|
233
234
|
_repositories = attr.ib(repr=False)
|
|
235
|
+
updated_by = attr.ib(default=None)
|
|
234
236
|
|
|
235
237
|
@property
|
|
236
238
|
def createdAt(self):
|
|
@@ -265,7 +267,7 @@ class Service(entities.BaseEntity):
|
|
|
265
267
|
return status, service
|
|
266
268
|
|
|
267
269
|
@classmethod
|
|
268
|
-
def from_json(cls, _json: dict, client_api: ApiClient=None, package=None, project=None, is_fetched=True):
|
|
270
|
+
def from_json(cls, _json: dict, client_api: ApiClient = None, package=None, project=None, is_fetched=True):
|
|
269
271
|
"""
|
|
270
272
|
Build a service entity object from a json
|
|
271
273
|
|
|
@@ -325,7 +327,9 @@ class Service(entities.BaseEntity):
|
|
|
325
327
|
secrets=_json.get("secrets", None),
|
|
326
328
|
type=_json.get("type", None),
|
|
327
329
|
mode=_json.get('mode', dict()),
|
|
328
|
-
metadata=_json.get('metadata', None)
|
|
330
|
+
metadata=_json.get('metadata', None),
|
|
331
|
+
archive=_json.get('archive', None),
|
|
332
|
+
updated_by=_json.get('updatedBy', None)
|
|
329
333
|
)
|
|
330
334
|
inst.is_fetched = is_fetched
|
|
331
335
|
return inst
|
|
@@ -449,7 +453,9 @@ class Service(entities.BaseEntity):
|
|
|
449
453
|
attr.fields(Service).secrets,
|
|
450
454
|
attr.fields(Service)._type,
|
|
451
455
|
attr.fields(Service).mode,
|
|
452
|
-
attr.fields(Service).metadata
|
|
456
|
+
attr.fields(Service).metadata,
|
|
457
|
+
attr.fields(Service).archive,
|
|
458
|
+
attr.fields(Service).updated_by,
|
|
453
459
|
)
|
|
454
460
|
)
|
|
455
461
|
|
|
@@ -469,6 +475,9 @@ class Service(entities.BaseEntity):
|
|
|
469
475
|
_json['createdAt'] = self.created_at
|
|
470
476
|
_json['updatedAt'] = self.updated_at
|
|
471
477
|
|
|
478
|
+
if self.updated_by is not None:
|
|
479
|
+
_json['updatedBy'] = self.updated_by
|
|
480
|
+
|
|
472
481
|
if self.max_attempts is not None:
|
|
473
482
|
_json['maxAttempts'] = self.max_attempts
|
|
474
483
|
|
|
@@ -493,6 +502,8 @@ class Service(entities.BaseEntity):
|
|
|
493
502
|
if self.metadata:
|
|
494
503
|
_json['metadata'] = self.metadata
|
|
495
504
|
|
|
505
|
+
if self.archive:
|
|
506
|
+
_json['archive'] = self.archive
|
|
496
507
|
return _json
|
|
497
508
|
|
|
498
509
|
def update(self, force=False):
|
dtlpy/entities/task.py
CHANGED
|
@@ -92,6 +92,7 @@ class Task:
|
|
|
92
92
|
available_actions = attr.ib()
|
|
93
93
|
total_items = attr.ib()
|
|
94
94
|
priority = attr.ib()
|
|
95
|
+
_description = attr.ib()
|
|
95
96
|
|
|
96
97
|
# sdk
|
|
97
98
|
_client_api = attr.ib(repr=False)
|
|
@@ -102,6 +103,18 @@ class Task:
|
|
|
102
103
|
_tasks = attr.ib(default=None, repr=False)
|
|
103
104
|
_settings = attr.ib(default=None, repr=False)
|
|
104
105
|
|
|
106
|
+
@property
|
|
107
|
+
def description(self):
|
|
108
|
+
return self._description
|
|
109
|
+
|
|
110
|
+
@description.setter
|
|
111
|
+
def description(self, description):
|
|
112
|
+
if not isinstance(description, str):
|
|
113
|
+
raise ValueError('description should be a string')
|
|
114
|
+
if self._description is None:
|
|
115
|
+
self._description = {}
|
|
116
|
+
self._description['content'] = description
|
|
117
|
+
|
|
105
118
|
@staticmethod
|
|
106
119
|
def _protected_from_json(_json, client_api, project, dataset):
|
|
107
120
|
"""
|
|
@@ -176,7 +189,8 @@ class Task:
|
|
|
176
189
|
created_at=_json.get('createdAt', None),
|
|
177
190
|
available_actions=actions,
|
|
178
191
|
total_items=_json.get('totalItems', None),
|
|
179
|
-
priority=_json.get('priority', None)
|
|
192
|
+
priority=_json.get('priority', None),
|
|
193
|
+
description=_json.get('description', None)
|
|
180
194
|
)
|
|
181
195
|
|
|
182
196
|
def to_json(self):
|
|
@@ -208,6 +222,7 @@ class Task:
|
|
|
208
222
|
attr.fields(Task).created_at,
|
|
209
223
|
attr.fields(Task).total_items,
|
|
210
224
|
attr.fields(Task)._settings,
|
|
225
|
+
attr.fields(Task)._description
|
|
211
226
|
)
|
|
212
227
|
)
|
|
213
228
|
_json['projectId'] = self.project_id
|
|
@@ -217,6 +232,7 @@ class Task:
|
|
|
217
232
|
_json['dueDate'] = self.due_date
|
|
218
233
|
_json['totalItems'] = self.total_items
|
|
219
234
|
_json['forReview'] = self.for_review
|
|
235
|
+
_json['description'] = self.description
|
|
220
236
|
|
|
221
237
|
if self.available_actions is not None:
|
|
222
238
|
_json['availableActions'] = [action.to_json() for action in self.available_actions]
|
|
@@ -472,3 +488,4 @@ class Task:
|
|
|
472
488
|
:rtype: bool
|
|
473
489
|
"""
|
|
474
490
|
return self.tasks.set_status(status=status, operation=operation, item_ids=item_ids, task_id=self.id)
|
|
491
|
+
|
dtlpy/entities/trigger.py
CHANGED
|
@@ -84,6 +84,8 @@ class BaseTrigger(entities.BaseEntity):
|
|
|
84
84
|
_op_type = attr.ib(default='service')
|
|
85
85
|
_repositories = attr.ib(repr=False)
|
|
86
86
|
|
|
87
|
+
updated_by = attr.ib(default=None)
|
|
88
|
+
|
|
87
89
|
@staticmethod
|
|
88
90
|
def _get_operation(operation):
|
|
89
91
|
op_type = operation.get('type', None)
|
|
@@ -243,6 +245,7 @@ class BaseTrigger(entities.BaseEntity):
|
|
|
243
245
|
attr.fields(BaseTrigger).created_at,
|
|
244
246
|
attr.fields(BaseTrigger).updated_at,
|
|
245
247
|
attr.fields(BaseTrigger).operation,
|
|
248
|
+
attr.fields(BaseTrigger).updated_by,
|
|
246
249
|
))
|
|
247
250
|
|
|
248
251
|
# rename
|
|
@@ -251,6 +254,8 @@ class BaseTrigger(entities.BaseEntity):
|
|
|
251
254
|
_json['updatedAt'] = self.updated_at
|
|
252
255
|
if self.is_global is not None:
|
|
253
256
|
_json['global'] = self.is_global
|
|
257
|
+
if self.updated_by is not None:
|
|
258
|
+
_json['updatedBy'] = self.updated_by
|
|
254
259
|
return _json
|
|
255
260
|
|
|
256
261
|
def delete(self):
|
|
@@ -342,7 +347,8 @@ class Trigger(BaseTrigger):
|
|
|
342
347
|
op_type=operation.get('type', None),
|
|
343
348
|
spec=spec,
|
|
344
349
|
pipeline_id=pipeline_id,
|
|
345
|
-
operation=operation
|
|
350
|
+
operation=operation,
|
|
351
|
+
updated_by=_json.get('updatedBy', None),
|
|
346
352
|
)
|
|
347
353
|
|
|
348
354
|
|
dtlpy/ml/base_model_adapter.py
CHANGED
|
@@ -1,10 +1,10 @@
|
|
|
1
|
-
import copy
|
|
2
1
|
import tempfile
|
|
3
2
|
import datetime
|
|
4
3
|
import logging
|
|
5
4
|
import shutil
|
|
6
5
|
import base64
|
|
7
6
|
import tqdm
|
|
7
|
+
import sys
|
|
8
8
|
import io
|
|
9
9
|
import os
|
|
10
10
|
from PIL import Image
|
|
@@ -309,8 +309,6 @@ class BaseModelAdapter(utilities.BaseServiceRunner):
|
|
|
309
309
|
if cleanup:
|
|
310
310
|
shutil.rmtree(path=local_path, ignore_errors=True)
|
|
311
311
|
self.logger.info("Clean-up. deleting {}".format(local_path))
|
|
312
|
-
self.model_entity.status = 'trained'
|
|
313
|
-
self.model_entity = self.model_entity.update()
|
|
314
312
|
|
|
315
313
|
# ===============
|
|
316
314
|
# SERVICE METHODS
|
|
@@ -339,7 +337,7 @@ class BaseModelAdapter(utilities.BaseServiceRunner):
|
|
|
339
337
|
pool = ThreadPoolExecutor(max_workers=16)
|
|
340
338
|
|
|
341
339
|
annotations = list()
|
|
342
|
-
for i_batch in tqdm.tqdm(range(0, len(items), batch_size), desc='predicting', unit='bt', leave=None):
|
|
340
|
+
for i_batch in tqdm.tqdm(range(0, len(items), batch_size), desc='predicting', unit='bt', leave=None, file=sys.stdout):
|
|
343
341
|
batch_items = items[i_batch: i_batch + batch_size]
|
|
344
342
|
batch = list(pool.map(self.prepare_item_func, batch_items))
|
|
345
343
|
batch_collections = self.predict(batch, **kwargs)
|
|
@@ -415,12 +413,11 @@ class BaseModelAdapter(utilities.BaseServiceRunner):
|
|
|
415
413
|
cleanup=False,
|
|
416
414
|
progress: utilities.Progress = None,
|
|
417
415
|
context: utilities.Context = None):
|
|
418
|
-
# FROM PARENT
|
|
419
416
|
"""
|
|
420
|
-
|
|
421
|
-
|
|
422
|
-
|
|
423
|
-
|
|
417
|
+
Train on existing model.
|
|
418
|
+
data will be taken from dl.Model.datasetId
|
|
419
|
+
configuration is as defined in dl.Model.configuration
|
|
420
|
+
upload the output the model's bucket (model.bucket)
|
|
424
421
|
"""
|
|
425
422
|
if isinstance(model, dict):
|
|
426
423
|
model = repositories.Models(client_api=self._client_api).get(model_id=model['id'])
|
|
@@ -469,7 +466,8 @@ class BaseModelAdapter(utilities.BaseServiceRunner):
|
|
|
469
466
|
progress=99)
|
|
470
467
|
|
|
471
468
|
self.save_to_model(local_path=output_path, replace=True)
|
|
472
|
-
|
|
469
|
+
model.status = 'trained'
|
|
470
|
+
model.update()
|
|
473
471
|
###########
|
|
474
472
|
# cleanup #
|
|
475
473
|
###########
|
dtlpy/ml/train_utils.py
CHANGED
dtlpy/new_instance.py
CHANGED
|
@@ -166,14 +166,16 @@ class Dtlpy:
|
|
|
166
166
|
REGULAR_S = 'regular-s'
|
|
167
167
|
REGULAR_M = 'regular-m'
|
|
168
168
|
REGULAR_L = 'regular-l'
|
|
169
|
-
REGULAR_XL = 'regular-xl'
|
|
170
169
|
HIGHMEM_MICRO = 'highmem-micro'
|
|
171
170
|
HIGHMEM_XS = 'highmem-xs'
|
|
172
171
|
HIGHMEM_S = 'highmem-s'
|
|
173
172
|
HIGHMEM_M = 'highmem-m'
|
|
174
173
|
HIGHMEM_L = 'highmem-l'
|
|
175
|
-
|
|
176
|
-
|
|
174
|
+
GPU_K80_S = "gpu-k80-s"
|
|
175
|
+
GPU_K80_M = "gpu-k80-m"
|
|
176
|
+
GPU_T4_S = "gpu-t4-s"
|
|
177
|
+
GPU_T4_M = "gpu-t4-m"
|
|
178
|
+
|
|
177
179
|
|
|
178
180
|
class LoggingLevel:
|
|
179
181
|
DEBUG = 'debug'
|
dtlpy/repositories/artifacts.py
CHANGED
|
@@ -22,7 +22,7 @@ class Artifacts:
|
|
|
22
22
|
project_id: str = None,
|
|
23
23
|
model: entities.Model = None,
|
|
24
24
|
package: entities.Package = None,
|
|
25
|
-
dataset_name=
|
|
25
|
+
dataset_name=None):
|
|
26
26
|
self._client_api = client_api
|
|
27
27
|
self._project = project
|
|
28
28
|
self._dataset = dataset
|
|
@@ -40,21 +40,15 @@ class Artifacts:
|
|
|
40
40
|
if self._dataset is None:
|
|
41
41
|
# get dataset from project
|
|
42
42
|
try:
|
|
43
|
-
self.
|
|
43
|
+
if self.dataset_name is None:
|
|
44
|
+
self.dataset_name = 'Binaries'
|
|
45
|
+
self._dataset = self.project.datasets._get_binaries_dataset()
|
|
46
|
+
else:
|
|
47
|
+
self._dataset = self.project.datasets.get(dataset_name=self.dataset_name)
|
|
44
48
|
except exceptions.NotFound:
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
'Dataset for artifacts was not found. Creating... dataset name: {ds!r}. project_id={id}'.format(
|
|
49
|
-
ds=self.dataset_name, id=self.project.id))
|
|
50
|
-
self._dataset = self.project.datasets.create(dataset_name=self.dataset_name)
|
|
51
|
-
# add system to metadata
|
|
52
|
-
if 'metadata' not in self._dataset.to_json():
|
|
53
|
-
self._dataset.metadata = dict()
|
|
54
|
-
if 'system' not in self._dataset.metadata:
|
|
55
|
-
self._dataset.metadata['system'] = dict()
|
|
56
|
-
self._dataset.metadata['system']['scope'] = 'system'
|
|
57
|
-
self.project.datasets.update(dataset=self._dataset, system_metadata=True)
|
|
49
|
+
raise ValueError(
|
|
50
|
+
f'Missing "{self.dataset_name}" dataset in the project. Please contact support for help')
|
|
51
|
+
|
|
58
52
|
return self._dataset
|
|
59
53
|
|
|
60
54
|
@property
|
dtlpy/repositories/codebases.py
CHANGED
|
@@ -65,21 +65,9 @@ class Codebases:
|
|
|
65
65
|
if self._dataset is None:
|
|
66
66
|
# get dataset from project
|
|
67
67
|
try:
|
|
68
|
-
self._dataset = self.project.datasets.
|
|
68
|
+
self._dataset = self.project.datasets._get_binaries_dataset()
|
|
69
69
|
except exceptions.NotFound:
|
|
70
|
-
|
|
71
|
-
if self._dataset is None:
|
|
72
|
-
logger.debug(
|
|
73
|
-
'Dataset for codebases was not found. Creating... dataset name: "Binaries". project_id={}'.format(
|
|
74
|
-
self.project.id))
|
|
75
|
-
self._dataset = self.project.datasets.create(dataset_name='Binaries')
|
|
76
|
-
# add system to metadata
|
|
77
|
-
if 'metadata' not in self._dataset.to_json():
|
|
78
|
-
self._dataset.metadata = dict()
|
|
79
|
-
if 'system' not in self._dataset.metadata:
|
|
80
|
-
self._dataset.metadata['system'] = dict()
|
|
81
|
-
self._dataset.metadata['system']['scope'] = 'system'
|
|
82
|
-
self.project.datasets.update(dataset=self._dataset, system_metadata=True)
|
|
70
|
+
raise ValueError('Missing "Binaries" dataset in the project. Please contact support for help')
|
|
83
71
|
assert isinstance(self._dataset, entities.Dataset)
|
|
84
72
|
return self._dataset
|
|
85
73
|
|