dtlpy 1.113.10__py3-none-any.whl → 1.114.13__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dtlpy/__init__.py +488 -488
- dtlpy/__version__.py +1 -1
- dtlpy/assets/__init__.py +26 -26
- dtlpy/assets/__pycache__/__init__.cpython-38.pyc +0 -0
- dtlpy/assets/code_server/config.yaml +2 -2
- dtlpy/assets/code_server/installation.sh +24 -24
- dtlpy/assets/code_server/launch.json +13 -13
- dtlpy/assets/code_server/settings.json +2 -2
- dtlpy/assets/main.py +53 -53
- dtlpy/assets/main_partial.py +18 -18
- dtlpy/assets/mock.json +11 -11
- dtlpy/assets/model_adapter.py +83 -83
- dtlpy/assets/package.json +61 -61
- dtlpy/assets/package_catalog.json +29 -29
- dtlpy/assets/package_gitignore +307 -307
- dtlpy/assets/service_runners/__init__.py +33 -33
- dtlpy/assets/service_runners/converter.py +96 -96
- dtlpy/assets/service_runners/multi_method.py +49 -49
- dtlpy/assets/service_runners/multi_method_annotation.py +54 -54
- dtlpy/assets/service_runners/multi_method_dataset.py +55 -55
- dtlpy/assets/service_runners/multi_method_item.py +52 -52
- dtlpy/assets/service_runners/multi_method_json.py +52 -52
- dtlpy/assets/service_runners/single_method.py +37 -37
- dtlpy/assets/service_runners/single_method_annotation.py +43 -43
- dtlpy/assets/service_runners/single_method_dataset.py +43 -43
- dtlpy/assets/service_runners/single_method_item.py +41 -41
- dtlpy/assets/service_runners/single_method_json.py +42 -42
- dtlpy/assets/service_runners/single_method_multi_input.py +45 -45
- dtlpy/assets/voc_annotation_template.xml +23 -23
- dtlpy/caches/base_cache.py +32 -32
- dtlpy/caches/cache.py +473 -473
- dtlpy/caches/dl_cache.py +201 -201
- dtlpy/caches/filesystem_cache.py +89 -89
- dtlpy/caches/redis_cache.py +84 -84
- dtlpy/dlp/__init__.py +20 -20
- dtlpy/dlp/cli_utilities.py +367 -367
- dtlpy/dlp/command_executor.py +764 -764
- dtlpy/dlp/dlp +1 -1
- dtlpy/dlp/dlp.bat +1 -1
- dtlpy/dlp/dlp.py +128 -128
- dtlpy/dlp/parser.py +651 -651
- dtlpy/entities/__init__.py +83 -83
- dtlpy/entities/analytic.py +311 -311
- dtlpy/entities/annotation.py +1879 -1879
- dtlpy/entities/annotation_collection.py +699 -699
- dtlpy/entities/annotation_definitions/__init__.py +20 -20
- dtlpy/entities/annotation_definitions/base_annotation_definition.py +100 -100
- dtlpy/entities/annotation_definitions/box.py +195 -195
- dtlpy/entities/annotation_definitions/classification.py +67 -67
- dtlpy/entities/annotation_definitions/comparison.py +72 -72
- dtlpy/entities/annotation_definitions/cube.py +204 -204
- dtlpy/entities/annotation_definitions/cube_3d.py +149 -149
- dtlpy/entities/annotation_definitions/description.py +32 -32
- dtlpy/entities/annotation_definitions/ellipse.py +124 -124
- dtlpy/entities/annotation_definitions/free_text.py +62 -62
- dtlpy/entities/annotation_definitions/gis.py +69 -69
- dtlpy/entities/annotation_definitions/note.py +139 -139
- dtlpy/entities/annotation_definitions/point.py +117 -117
- dtlpy/entities/annotation_definitions/polygon.py +182 -182
- dtlpy/entities/annotation_definitions/polyline.py +111 -111
- dtlpy/entities/annotation_definitions/pose.py +92 -92
- dtlpy/entities/annotation_definitions/ref_image.py +86 -86
- dtlpy/entities/annotation_definitions/segmentation.py +240 -240
- dtlpy/entities/annotation_definitions/subtitle.py +34 -34
- dtlpy/entities/annotation_definitions/text.py +85 -85
- dtlpy/entities/annotation_definitions/undefined_annotation.py +74 -74
- dtlpy/entities/app.py +220 -220
- dtlpy/entities/app_module.py +107 -107
- dtlpy/entities/artifact.py +174 -174
- dtlpy/entities/assignment.py +399 -399
- dtlpy/entities/base_entity.py +214 -214
- dtlpy/entities/bot.py +113 -113
- dtlpy/entities/codebase.py +296 -296
- dtlpy/entities/collection.py +38 -38
- dtlpy/entities/command.py +169 -169
- dtlpy/entities/compute.py +442 -442
- dtlpy/entities/dataset.py +1285 -1285
- dtlpy/entities/directory_tree.py +44 -44
- dtlpy/entities/dpk.py +470 -470
- dtlpy/entities/driver.py +222 -222
- dtlpy/entities/execution.py +397 -397
- dtlpy/entities/feature.py +124 -124
- dtlpy/entities/feature_set.py +145 -145
- dtlpy/entities/filters.py +641 -641
- dtlpy/entities/gis_item.py +107 -107
- dtlpy/entities/integration.py +184 -184
- dtlpy/entities/item.py +953 -953
- dtlpy/entities/label.py +123 -123
- dtlpy/entities/links.py +85 -85
- dtlpy/entities/message.py +175 -175
- dtlpy/entities/model.py +694 -691
- dtlpy/entities/node.py +1005 -1005
- dtlpy/entities/ontology.py +803 -803
- dtlpy/entities/organization.py +287 -287
- dtlpy/entities/package.py +657 -657
- dtlpy/entities/package_defaults.py +5 -5
- dtlpy/entities/package_function.py +185 -185
- dtlpy/entities/package_module.py +113 -113
- dtlpy/entities/package_slot.py +118 -118
- dtlpy/entities/paged_entities.py +290 -267
- dtlpy/entities/pipeline.py +593 -593
- dtlpy/entities/pipeline_execution.py +279 -279
- dtlpy/entities/project.py +394 -394
- dtlpy/entities/prompt_item.py +499 -499
- dtlpy/entities/recipe.py +301 -301
- dtlpy/entities/reflect_dict.py +102 -102
- dtlpy/entities/resource_execution.py +138 -138
- dtlpy/entities/service.py +958 -958
- dtlpy/entities/service_driver.py +117 -117
- dtlpy/entities/setting.py +294 -294
- dtlpy/entities/task.py +491 -491
- dtlpy/entities/time_series.py +143 -143
- dtlpy/entities/trigger.py +426 -426
- dtlpy/entities/user.py +118 -118
- dtlpy/entities/webhook.py +124 -124
- dtlpy/examples/__init__.py +19 -19
- dtlpy/examples/add_labels.py +135 -135
- dtlpy/examples/add_metadata_to_item.py +21 -21
- dtlpy/examples/annotate_items_using_model.py +65 -65
- dtlpy/examples/annotate_video_using_model_and_tracker.py +75 -75
- dtlpy/examples/annotations_convert_to_voc.py +9 -9
- dtlpy/examples/annotations_convert_to_yolo.py +9 -9
- dtlpy/examples/convert_annotation_types.py +51 -51
- dtlpy/examples/converter.py +143 -143
- dtlpy/examples/copy_annotations.py +22 -22
- dtlpy/examples/copy_folder.py +31 -31
- dtlpy/examples/create_annotations.py +51 -51
- dtlpy/examples/create_video_annotations.py +83 -83
- dtlpy/examples/delete_annotations.py +26 -26
- dtlpy/examples/filters.py +113 -113
- dtlpy/examples/move_item.py +23 -23
- dtlpy/examples/play_video_annotation.py +13 -13
- dtlpy/examples/show_item_and_mask.py +53 -53
- dtlpy/examples/triggers.py +49 -49
- dtlpy/examples/upload_batch_of_items.py +20 -20
- dtlpy/examples/upload_items_and_custom_format_annotations.py +55 -55
- dtlpy/examples/upload_items_with_modalities.py +43 -43
- dtlpy/examples/upload_segmentation_annotations_from_mask_image.py +44 -44
- dtlpy/examples/upload_yolo_format_annotations.py +70 -70
- dtlpy/exceptions.py +125 -125
- dtlpy/miscellaneous/__init__.py +20 -20
- dtlpy/miscellaneous/dict_differ.py +95 -95
- dtlpy/miscellaneous/git_utils.py +217 -217
- dtlpy/miscellaneous/json_utils.py +14 -14
- dtlpy/miscellaneous/list_print.py +105 -105
- dtlpy/miscellaneous/zipping.py +130 -130
- dtlpy/ml/__init__.py +20 -20
- dtlpy/ml/base_feature_extractor_adapter.py +27 -27
- dtlpy/ml/base_model_adapter.py +945 -940
- dtlpy/ml/metrics.py +461 -461
- dtlpy/ml/predictions_utils.py +274 -274
- dtlpy/ml/summary_writer.py +57 -57
- dtlpy/ml/train_utils.py +60 -60
- dtlpy/new_instance.py +252 -252
- dtlpy/repositories/__init__.py +56 -56
- dtlpy/repositories/analytics.py +85 -85
- dtlpy/repositories/annotations.py +916 -916
- dtlpy/repositories/apps.py +383 -383
- dtlpy/repositories/artifacts.py +452 -452
- dtlpy/repositories/assignments.py +599 -599
- dtlpy/repositories/bots.py +213 -213
- dtlpy/repositories/codebases.py +559 -559
- dtlpy/repositories/collections.py +332 -348
- dtlpy/repositories/commands.py +158 -158
- dtlpy/repositories/compositions.py +61 -61
- dtlpy/repositories/computes.py +434 -406
- dtlpy/repositories/datasets.py +1291 -1291
- dtlpy/repositories/downloader.py +895 -895
- dtlpy/repositories/dpks.py +433 -433
- dtlpy/repositories/drivers.py +266 -266
- dtlpy/repositories/executions.py +817 -817
- dtlpy/repositories/feature_sets.py +226 -226
- dtlpy/repositories/features.py +238 -238
- dtlpy/repositories/integrations.py +484 -484
- dtlpy/repositories/items.py +909 -915
- dtlpy/repositories/messages.py +94 -94
- dtlpy/repositories/models.py +877 -867
- dtlpy/repositories/nodes.py +80 -80
- dtlpy/repositories/ontologies.py +511 -511
- dtlpy/repositories/organizations.py +525 -525
- dtlpy/repositories/packages.py +1941 -1941
- dtlpy/repositories/pipeline_executions.py +448 -448
- dtlpy/repositories/pipelines.py +642 -642
- dtlpy/repositories/projects.py +539 -539
- dtlpy/repositories/recipes.py +399 -399
- dtlpy/repositories/resource_executions.py +137 -137
- dtlpy/repositories/schema.py +120 -120
- dtlpy/repositories/service_drivers.py +213 -213
- dtlpy/repositories/services.py +1704 -1704
- dtlpy/repositories/settings.py +339 -339
- dtlpy/repositories/tasks.py +1124 -1124
- dtlpy/repositories/times_series.py +278 -278
- dtlpy/repositories/triggers.py +536 -536
- dtlpy/repositories/upload_element.py +257 -257
- dtlpy/repositories/uploader.py +651 -651
- dtlpy/repositories/webhooks.py +249 -249
- dtlpy/services/__init__.py +22 -22
- dtlpy/services/aihttp_retry.py +131 -131
- dtlpy/services/api_client.py +1782 -1782
- dtlpy/services/api_reference.py +40 -40
- dtlpy/services/async_utils.py +133 -133
- dtlpy/services/calls_counter.py +44 -44
- dtlpy/services/check_sdk.py +68 -68
- dtlpy/services/cookie.py +115 -115
- dtlpy/services/create_logger.py +156 -156
- dtlpy/services/events.py +84 -84
- dtlpy/services/logins.py +235 -235
- dtlpy/services/reporter.py +256 -256
- dtlpy/services/service_defaults.py +91 -91
- dtlpy/utilities/__init__.py +20 -20
- dtlpy/utilities/annotations/__init__.py +16 -16
- dtlpy/utilities/annotations/annotation_converters.py +269 -269
- dtlpy/utilities/base_package_runner.py +264 -264
- dtlpy/utilities/converter.py +1650 -1650
- dtlpy/utilities/dataset_generators/__init__.py +1 -1
- dtlpy/utilities/dataset_generators/dataset_generator.py +670 -670
- dtlpy/utilities/dataset_generators/dataset_generator_tensorflow.py +23 -23
- dtlpy/utilities/dataset_generators/dataset_generator_torch.py +21 -21
- dtlpy/utilities/local_development/__init__.py +1 -1
- dtlpy/utilities/local_development/local_session.py +179 -179
- dtlpy/utilities/reports/__init__.py +2 -2
- dtlpy/utilities/reports/figures.py +343 -343
- dtlpy/utilities/reports/report.py +71 -71
- dtlpy/utilities/videos/__init__.py +17 -17
- dtlpy/utilities/videos/video_player.py +598 -598
- dtlpy/utilities/videos/videos.py +470 -470
- {dtlpy-1.113.10.data → dtlpy-1.114.13.data}/scripts/dlp +1 -1
- dtlpy-1.114.13.data/scripts/dlp.bat +2 -0
- {dtlpy-1.113.10.data → dtlpy-1.114.13.data}/scripts/dlp.py +128 -128
- {dtlpy-1.113.10.dist-info → dtlpy-1.114.13.dist-info}/LICENSE +200 -200
- {dtlpy-1.113.10.dist-info → dtlpy-1.114.13.dist-info}/METADATA +172 -172
- dtlpy-1.114.13.dist-info/RECORD +240 -0
- {dtlpy-1.113.10.dist-info → dtlpy-1.114.13.dist-info}/WHEEL +1 -1
- tests/features/environment.py +551 -550
- dtlpy-1.113.10.data/scripts/dlp.bat +0 -2
- dtlpy-1.113.10.dist-info/RECORD +0 -244
- tests/assets/__init__.py +0 -0
- tests/assets/models_flow/__init__.py +0 -0
- tests/assets/models_flow/failedmain.py +0 -52
- tests/assets/models_flow/main.py +0 -62
- tests/assets/models_flow/main_model.py +0 -54
- {dtlpy-1.113.10.dist-info → dtlpy-1.114.13.dist-info}/entry_points.txt +0 -0
- {dtlpy-1.113.10.dist-info → dtlpy-1.114.13.dist-info}/top_level.txt +0 -0
dtlpy/entities/model.py
CHANGED
|
@@ -1,691 +1,694 @@
|
|
|
1
|
-
from collections import namedtuple
|
|
2
|
-
from enum import Enum
|
|
3
|
-
import traceback
|
|
4
|
-
import logging
|
|
5
|
-
import attr
|
|
6
|
-
from .. import repositories, entities
|
|
7
|
-
from ..services.api_client import ApiClient
|
|
8
|
-
|
|
9
|
-
logger = logging.getLogger(name='dtlpy')
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
class DatasetSubsetType(str, Enum):
|
|
13
|
-
"""Available types for dataset subsets"""
|
|
14
|
-
TRAIN = 'train'
|
|
15
|
-
VALIDATION = 'validation'
|
|
16
|
-
TEST = 'test'
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
class ModelStatus(str, Enum):
|
|
20
|
-
"""Available types for model status"""
|
|
21
|
-
CREATED = "created",
|
|
22
|
-
PRE_TRAINED = "pre-trained",
|
|
23
|
-
PENDING = "pending",
|
|
24
|
-
TRAINING = "training",
|
|
25
|
-
TRAINED = "trained",
|
|
26
|
-
DEPLOYED = "deployed",
|
|
27
|
-
FAILED = "failed",
|
|
28
|
-
CLONING = "cloning"
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
class PlotSample:
|
|
32
|
-
def __init__(self, figure, legend, x, y):
|
|
33
|
-
"""
|
|
34
|
-
Create a single metric sample for Model
|
|
35
|
-
|
|
36
|
-
:param figure: figure name identifier
|
|
37
|
-
:param legend: line name identifier
|
|
38
|
-
:param x: x value for the current sample
|
|
39
|
-
:param y: y value for the current sample
|
|
40
|
-
"""
|
|
41
|
-
self.figure = figure
|
|
42
|
-
self.legend = legend
|
|
43
|
-
self.x = x
|
|
44
|
-
self.y = y
|
|
45
|
-
|
|
46
|
-
def to_json(self) -> dict:
|
|
47
|
-
_json = {'figure': self.figure,
|
|
48
|
-
'legend': self.legend,
|
|
49
|
-
'data': {'x': self.x,
|
|
50
|
-
'y': self.y}}
|
|
51
|
-
return _json
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
# class MatrixSample:
|
|
55
|
-
# def __init__(self, figure, legend, x, y):
|
|
56
|
-
# """
|
|
57
|
-
# Create a single metric sample for Model
|
|
58
|
-
#
|
|
59
|
-
# :param figure: figure name identifier
|
|
60
|
-
# :param legend: line name identifier
|
|
61
|
-
# :param x: x value for the current sample
|
|
62
|
-
# :param y: y value for the current sample
|
|
63
|
-
# """
|
|
64
|
-
# self.figure = figure
|
|
65
|
-
# self.legend = legend
|
|
66
|
-
# self.x = x
|
|
67
|
-
# self.y = y
|
|
68
|
-
#
|
|
69
|
-
# def to_json(self) -> dict:
|
|
70
|
-
# _json = {'figure': self.figure,
|
|
71
|
-
# 'legend': self.legend,
|
|
72
|
-
# 'data': {'x': self.x,
|
|
73
|
-
# 'y': self.y}}
|
|
74
|
-
# return _json
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
@attr.s
|
|
78
|
-
class Model(entities.BaseEntity):
|
|
79
|
-
"""
|
|
80
|
-
Model object
|
|
81
|
-
"""
|
|
82
|
-
# platform
|
|
83
|
-
id = attr.ib()
|
|
84
|
-
creator = attr.ib()
|
|
85
|
-
created_at = attr.ib()
|
|
86
|
-
updated_at = attr.ib(repr=False)
|
|
87
|
-
model_artifacts = attr.ib()
|
|
88
|
-
name = attr.ib()
|
|
89
|
-
description = attr.ib()
|
|
90
|
-
ontology_id = attr.ib(repr=False)
|
|
91
|
-
labels = attr.ib()
|
|
92
|
-
status = attr.ib()
|
|
93
|
-
tags = attr.ib()
|
|
94
|
-
configuration = attr.ib()
|
|
95
|
-
metadata = attr.ib()
|
|
96
|
-
input_type = attr.ib()
|
|
97
|
-
output_type = attr.ib()
|
|
98
|
-
module_name = attr.ib()
|
|
99
|
-
|
|
100
|
-
url = attr.ib()
|
|
101
|
-
scope = attr.ib()
|
|
102
|
-
version = attr.ib()
|
|
103
|
-
context = attr.ib()
|
|
104
|
-
status_logs = attr.ib()
|
|
105
|
-
|
|
106
|
-
# name change
|
|
107
|
-
package_id = attr.ib(repr=False)
|
|
108
|
-
project_id = attr.ib()
|
|
109
|
-
dataset_id = attr.ib(repr=False)
|
|
110
|
-
|
|
111
|
-
# sdk
|
|
112
|
-
_project = attr.ib(repr=False)
|
|
113
|
-
_package = attr.ib(repr=False)
|
|
114
|
-
_dataset = attr.ib(repr=False)
|
|
115
|
-
_feature_set = attr.ib(repr=False)
|
|
116
|
-
_client_api = attr.ib(type=ApiClient, repr=False)
|
|
117
|
-
_repositories = attr.ib(repr=False)
|
|
118
|
-
_ontology = attr.ib(repr=False, default=None)
|
|
119
|
-
updated_by = attr.ib(default=None)
|
|
120
|
-
app = attr.ib(default=None)
|
|
121
|
-
|
|
122
|
-
@staticmethod
|
|
123
|
-
def _protected_from_json(_json, client_api, project=None, package=None, is_fetched=True):
|
|
124
|
-
"""
|
|
125
|
-
Same as from_json but with try-except to catch if error
|
|
126
|
-
|
|
127
|
-
:param _json: platform representation of Model
|
|
128
|
-
:param client_api: ApiClient entity
|
|
129
|
-
:param project: project that owns the model
|
|
130
|
-
:param package: package entity of the model
|
|
131
|
-
:param is_fetched: is Entity fetched from Platform
|
|
132
|
-
:return: Model entity
|
|
133
|
-
"""
|
|
134
|
-
try:
|
|
135
|
-
model = Model.from_json(_json=_json,
|
|
136
|
-
client_api=client_api,
|
|
137
|
-
project=project,
|
|
138
|
-
package=package,
|
|
139
|
-
is_fetched=is_fetched)
|
|
140
|
-
status = True
|
|
141
|
-
except Exception:
|
|
142
|
-
model = traceback.format_exc()
|
|
143
|
-
status = False
|
|
144
|
-
return status, model
|
|
145
|
-
|
|
146
|
-
@classmethod
|
|
147
|
-
def from_json(cls, _json, client_api, project=None, package=None, is_fetched=True):
|
|
148
|
-
"""
|
|
149
|
-
Turn platform representation of model into a model entity
|
|
150
|
-
|
|
151
|
-
:param _json: platform representation of model
|
|
152
|
-
:param client_api: ApiClient entity
|
|
153
|
-
:param project: project that owns the model
|
|
154
|
-
:param package: package entity of the model
|
|
155
|
-
:param is_fetched: is Entity fetched from Platform
|
|
156
|
-
:return: Model entity
|
|
157
|
-
"""
|
|
158
|
-
if project is not None:
|
|
159
|
-
if project.id != _json.get('context', {}).get('project', None):
|
|
160
|
-
logger.warning("Model's project is different then the input project")
|
|
161
|
-
project = None
|
|
162
|
-
|
|
163
|
-
if package is not None:
|
|
164
|
-
if package.id != _json.get('packageId', None):
|
|
165
|
-
logger.warning("Model's package is different then the input package")
|
|
166
|
-
package = None
|
|
167
|
-
|
|
168
|
-
model_artifacts = [entities.Artifact.from_json(_json=artifact,
|
|
169
|
-
client_api=client_api,
|
|
170
|
-
project=project)
|
|
171
|
-
for artifact in _json.get('artifacts', list())]
|
|
172
|
-
|
|
173
|
-
inst = cls(
|
|
174
|
-
configuration=_json.get('configuration', None),
|
|
175
|
-
description=_json.get('description', None),
|
|
176
|
-
status=_json.get('status', None),
|
|
177
|
-
tags=_json.get('tags', None),
|
|
178
|
-
metadata=_json.get('metadata', dict()),
|
|
179
|
-
project_id=_json.get('context', {}).get('project', None),
|
|
180
|
-
dataset_id=_json.get('datasetId', None),
|
|
181
|
-
package_id=_json.get('packageId', None),
|
|
182
|
-
model_artifacts=model_artifacts,
|
|
183
|
-
labels=_json.get('labels', None),
|
|
184
|
-
ontology_id=_json.get('ontology_id', None),
|
|
185
|
-
created_at=_json.get('createdAt', None),
|
|
186
|
-
updated_at=_json.get('updatedAt', None),
|
|
187
|
-
creator=_json.get('context', {}).get('creator', None),
|
|
188
|
-
client_api=client_api,
|
|
189
|
-
name=_json.get('name', None),
|
|
190
|
-
project=project,
|
|
191
|
-
package=package,
|
|
192
|
-
dataset=None,
|
|
193
|
-
feature_set=None,
|
|
194
|
-
id=_json.get('id', None),
|
|
195
|
-
url=_json.get('url', None),
|
|
196
|
-
scope=_json.get('scope', entities.EntityScopeLevel.PROJECT),
|
|
197
|
-
version=_json.get('version', '1.0.0'),
|
|
198
|
-
context=_json.get('context', {}),
|
|
199
|
-
input_type=_json.get('inputType', None),
|
|
200
|
-
output_type=_json.get('outputType', None),
|
|
201
|
-
module_name=_json.get('moduleName', None),
|
|
202
|
-
updated_by=_json.get('updatedBy', None),
|
|
203
|
-
app=_json.get('app', None),
|
|
204
|
-
status_logs=_json.get('statusLogs', []),
|
|
205
|
-
)
|
|
206
|
-
inst.is_fetched = is_fetched
|
|
207
|
-
return inst
|
|
208
|
-
|
|
209
|
-
def to_json(self):
|
|
210
|
-
"""
|
|
211
|
-
Get the dict of Model
|
|
212
|
-
|
|
213
|
-
:return: platform json of model
|
|
214
|
-
:rtype: dict
|
|
215
|
-
"""
|
|
216
|
-
_json = attr.asdict(self,
|
|
217
|
-
filter=attr.filters.exclude(attr.fields(Model)._project,
|
|
218
|
-
attr.fields(Model)._package,
|
|
219
|
-
attr.fields(Model)._dataset,
|
|
220
|
-
attr.fields(Model)._ontology,
|
|
221
|
-
attr.fields(Model)._repositories,
|
|
222
|
-
attr.fields(Model)._feature_set,
|
|
223
|
-
attr.fields(Model)._client_api,
|
|
224
|
-
attr.fields(Model).package_id,
|
|
225
|
-
attr.fields(Model).project_id,
|
|
226
|
-
attr.fields(Model).dataset_id,
|
|
227
|
-
attr.fields(Model).ontology_id,
|
|
228
|
-
attr.fields(Model).model_artifacts,
|
|
229
|
-
attr.fields(Model).created_at,
|
|
230
|
-
attr.fields(Model).updated_at,
|
|
231
|
-
attr.fields(Model).input_type,
|
|
232
|
-
attr.fields(Model).output_type,
|
|
233
|
-
attr.fields(Model).updated_by,
|
|
234
|
-
attr.fields(Model).app,
|
|
235
|
-
attr.fields(Model).status_logs
|
|
236
|
-
))
|
|
237
|
-
_json['packageId'] = self.package_id
|
|
238
|
-
_json['datasetId'] = self.dataset_id
|
|
239
|
-
_json['createdAt'] = self.created_at
|
|
240
|
-
_json['updatedAt'] = self.updated_at
|
|
241
|
-
_json['inputType'] = self.input_type
|
|
242
|
-
_json['outputType'] = self.output_type
|
|
243
|
-
_json['moduleName'] = self.module_name
|
|
244
|
-
|
|
245
|
-
model_artifacts = list()
|
|
246
|
-
for artifact in self.model_artifacts:
|
|
247
|
-
if artifact.type in ['file', 'dir']:
|
|
248
|
-
artifact = {'type': 'item',
|
|
249
|
-
'itemId': artifact.id}
|
|
250
|
-
else:
|
|
251
|
-
artifact = artifact.to_json(as_artifact=True)
|
|
252
|
-
model_artifacts.append(artifact)
|
|
253
|
-
_json['artifacts'] = model_artifacts
|
|
254
|
-
|
|
255
|
-
if self.updated_by:
|
|
256
|
-
_json['updatedBy'] = self.updated_by
|
|
257
|
-
if self.app:
|
|
258
|
-
_json['app'] = self.app
|
|
259
|
-
if self.status_logs:
|
|
260
|
-
_json['statusLogs'] = self.status_logs
|
|
261
|
-
|
|
262
|
-
return _json
|
|
263
|
-
|
|
264
|
-
############
|
|
265
|
-
# entities #
|
|
266
|
-
############
|
|
267
|
-
@property
|
|
268
|
-
def project(self):
|
|
269
|
-
if self._project is None:
|
|
270
|
-
self._project = self.projects.get(project_id=self.project_id, fetch=None)
|
|
271
|
-
self._repositories = self.set_repositories() # update the repos with the new fetched entity
|
|
272
|
-
assert isinstance(self._project, entities.Project)
|
|
273
|
-
return self._project
|
|
274
|
-
|
|
275
|
-
@property
|
|
276
|
-
def feature_set(self) -> 'entities.FeatureSet':
|
|
277
|
-
if self._feature_set is None:
|
|
278
|
-
filters = entities.Filters(field='modelId',
|
|
279
|
-
values=self.id,
|
|
280
|
-
resource=entities.FiltersResource.FEATURE_SET)
|
|
281
|
-
feature_sets = self.project.feature_sets.list(filters=filters)
|
|
282
|
-
if feature_sets.items_count > 1:
|
|
283
|
-
logger.warning("Found more than one feature set associated with model entity. Returning first result."
|
|
284
|
-
"Set feature_set if other feature set entity is needed.")
|
|
285
|
-
self._feature_set = feature_sets.items[0]
|
|
286
|
-
elif feature_sets.items_count == 1:
|
|
287
|
-
self._feature_set = feature_sets.items[0]
|
|
288
|
-
else:
|
|
289
|
-
self._feature_set = None
|
|
290
|
-
return self._feature_set
|
|
291
|
-
|
|
292
|
-
@feature_set.setter
|
|
293
|
-
def feature_set(self, feature_set: 'entities.FeatureSet'):
|
|
294
|
-
if not isinstance(feature_set, entities.FeatureSet):
|
|
295
|
-
raise ValueError("feature_set must be of type dl.FeatureSet")
|
|
296
|
-
else:
|
|
297
|
-
self._feature_set = feature_set
|
|
298
|
-
|
|
299
|
-
@property
|
|
300
|
-
def package(self):
|
|
301
|
-
if self._package is None:
|
|
302
|
-
try:
|
|
303
|
-
if self.app:
|
|
304
|
-
self._package = self.dpks.get_revisions(dpk_id=self.app['dpkId'], version=self.app['dpkVersion'])
|
|
305
|
-
else:
|
|
306
|
-
self._package = self.packages.get(package_id=self.package_id)
|
|
307
|
-
except Exception as e:
|
|
308
|
-
error = e
|
|
309
|
-
try:
|
|
310
|
-
self._package = self.dpks.get(dpk_id=self.package_id)
|
|
311
|
-
except Exception:
|
|
312
|
-
raise error
|
|
313
|
-
self._repositories = self.set_repositories() # update the repos with the new fetched entity
|
|
314
|
-
assert isinstance(self._package, (entities.Package, entities.Dpk))
|
|
315
|
-
return self._package
|
|
316
|
-
|
|
317
|
-
@property
|
|
318
|
-
def dataset(self):
|
|
319
|
-
if self._dataset is None:
|
|
320
|
-
if self.dataset_id is None:
|
|
321
|
-
raise RuntimeError("Model {!r} has no dataset. Can be used only for inference".format(self.id))
|
|
322
|
-
self._dataset = self.datasets.get(dataset_id=self.dataset_id, fetch=None)
|
|
323
|
-
self._repositories = self.set_repositories() # update the repos with the new fetched entity
|
|
324
|
-
assert isinstance(self._dataset, entities.Dataset)
|
|
325
|
-
return self._dataset
|
|
326
|
-
|
|
327
|
-
@property
|
|
328
|
-
def ontology(self):
|
|
329
|
-
if self._ontology is None:
|
|
330
|
-
if self.ontology_id is None:
|
|
331
|
-
raise RuntimeError("Model {!r} has no ontology.".format(self.id))
|
|
332
|
-
self._ontology = self.ontologies.get(ontology_id=self.ontology_id)
|
|
333
|
-
assert isinstance(self._ontology, entities.Ontology)
|
|
334
|
-
return self._ontology
|
|
335
|
-
|
|
336
|
-
################
|
|
337
|
-
# repositories #
|
|
338
|
-
################
|
|
339
|
-
@_repositories.default
|
|
340
|
-
def set_repositories(self):
|
|
341
|
-
reps = namedtuple('repositories',
|
|
342
|
-
field_names=['projects', 'datasets', 'models', 'packages', 'ontologies', 'artifacts',
|
|
343
|
-
'metrics', 'dpks', 'services'])
|
|
344
|
-
|
|
345
|
-
r = reps(projects=repositories.Projects(client_api=self._client_api),
|
|
346
|
-
datasets=repositories.Datasets(client_api=self._client_api,
|
|
347
|
-
project=self._project),
|
|
348
|
-
models=repositories.Models(client_api=self._client_api,
|
|
349
|
-
project=self._project,
|
|
350
|
-
project_id=self.project_id,
|
|
351
|
-
package=self._package),
|
|
352
|
-
packages=repositories.Packages(client_api=self._client_api,
|
|
353
|
-
project=self._project),
|
|
354
|
-
ontologies=repositories.Ontologies(client_api=self._client_api,
|
|
355
|
-
project=self._project,
|
|
356
|
-
dataset=self._dataset),
|
|
357
|
-
artifacts=repositories.Artifacts(client_api=self._client_api,
|
|
358
|
-
project=self._project,
|
|
359
|
-
project_id=self.project_id,
|
|
360
|
-
model=self),
|
|
361
|
-
metrics=repositories.Metrics(client_api=self._client_api,
|
|
362
|
-
model=self),
|
|
363
|
-
dpks=repositories.Dpks(client_api=self._client_api),
|
|
364
|
-
services=repositories.Services(client_api=self._client_api,
|
|
365
|
-
project=self._project,
|
|
366
|
-
project_id=self.project_id,
|
|
367
|
-
model_id=self.id,
|
|
368
|
-
model=self),
|
|
369
|
-
)
|
|
370
|
-
return r
|
|
371
|
-
|
|
372
|
-
@property
|
|
373
|
-
def platform_url(self):
|
|
374
|
-
return self._client_api._get_resource_url("projects/{}/model/{}".format(self.project_id, self.id))
|
|
375
|
-
|
|
376
|
-
@property
|
|
377
|
-
def projects(self):
|
|
378
|
-
assert isinstance(self._repositories.projects, repositories.Projects)
|
|
379
|
-
return self._repositories.projects
|
|
380
|
-
|
|
381
|
-
@property
|
|
382
|
-
def datasets(self):
|
|
383
|
-
assert isinstance(self._repositories.datasets, repositories.Datasets)
|
|
384
|
-
return self._repositories.datasets
|
|
385
|
-
|
|
386
|
-
@property
|
|
387
|
-
def models(self):
|
|
388
|
-
assert isinstance(self._repositories.models, repositories.Models)
|
|
389
|
-
return self._repositories.models
|
|
390
|
-
|
|
391
|
-
@property
|
|
392
|
-
def packages(self):
|
|
393
|
-
assert isinstance(self._repositories.packages, repositories.Packages)
|
|
394
|
-
return self._repositories.packages
|
|
395
|
-
|
|
396
|
-
@property
|
|
397
|
-
def dpks(self):
|
|
398
|
-
assert isinstance(self._repositories.dpks, repositories.Dpks)
|
|
399
|
-
return self._repositories.dpks
|
|
400
|
-
|
|
401
|
-
@property
|
|
402
|
-
def ontologies(self):
|
|
403
|
-
assert isinstance(self._repositories.ontologies, repositories.Ontologies)
|
|
404
|
-
return self._repositories.ontologies
|
|
405
|
-
|
|
406
|
-
@property
|
|
407
|
-
def artifacts(self):
|
|
408
|
-
assert isinstance(self._repositories.artifacts, repositories.Artifacts)
|
|
409
|
-
return self._repositories.artifacts
|
|
410
|
-
|
|
411
|
-
@property
|
|
412
|
-
def metrics(self):
|
|
413
|
-
assert isinstance(self._repositories.metrics, repositories.Metrics)
|
|
414
|
-
return self._repositories.metrics
|
|
415
|
-
|
|
416
|
-
@property
|
|
417
|
-
def services(self):
|
|
418
|
-
assert isinstance(self._repositories.services, repositories.Services)
|
|
419
|
-
return self._repositories.services
|
|
420
|
-
|
|
421
|
-
@property
|
|
422
|
-
def id_to_label_map(self):
|
|
423
|
-
# default
|
|
424
|
-
if 'id_to_label_map' not in self.configuration:
|
|
425
|
-
if not (self.dataset_id == 'null' or self.dataset_id is None):
|
|
426
|
-
self.labels = [label.tag for label in self.dataset.labels]
|
|
427
|
-
self.configuration['id_to_label_map'] = {int(idx): lbl for idx, lbl in enumerate(self.labels)}
|
|
428
|
-
# use existing
|
|
429
|
-
else:
|
|
430
|
-
self.configuration['id_to_label_map'] = {int(idx): lbl for idx, lbl in
|
|
431
|
-
self.configuration['id_to_label_map'].items()}
|
|
432
|
-
return self.configuration['id_to_label_map']
|
|
433
|
-
|
|
434
|
-
@id_to_label_map.setter
|
|
435
|
-
def id_to_label_map(self, mapping: dict):
|
|
436
|
-
self.configuration['id_to_label_map'] = {int(idx): lbl for idx, lbl in mapping.items()}
|
|
437
|
-
|
|
438
|
-
@property
|
|
439
|
-
def label_to_id_map(self):
|
|
440
|
-
if 'label_to_id_map' not in self.configuration:
|
|
441
|
-
self.configuration['label_to_id_map'] = {v: int(k) for k, v in self.id_to_label_map.items()}
|
|
442
|
-
return self.configuration['label_to_id_map']
|
|
443
|
-
|
|
444
|
-
@label_to_id_map.setter
|
|
445
|
-
def label_to_id_map(self, mapping: dict):
|
|
446
|
-
self.configuration['label_to_id_map'] = {v: int(k) for k, v in mapping.items()}
|
|
447
|
-
|
|
448
|
-
###########
|
|
449
|
-
# methods #
|
|
450
|
-
###########
|
|
451
|
-
|
|
452
|
-
def add_subset(self, subset_name: str, subset_filter: entities.Filters):
|
|
453
|
-
"""
|
|
454
|
-
Adds a subset for the model, specifying a subset of the model's dataset that could be used for training or
|
|
455
|
-
validation.
|
|
456
|
-
|
|
457
|
-
:param str subset_name: the name of the subset
|
|
458
|
-
:param dtlpy.entities.Filters subset_filter: the filtering operation that this subset performs in the dataset.
|
|
459
|
-
|
|
460
|
-
**Example**
|
|
461
|
-
|
|
462
|
-
.. code-block:: python
|
|
463
|
-
|
|
464
|
-
model.add_subset(subset_name='train', subset_filter=dtlpy.Filters(field='dir', values='/train'))
|
|
465
|
-
model.metadata['system']['subsets']
|
|
466
|
-
{'train': <dtlpy.entities.filters.Filters object at 0x1501dfe20>}
|
|
467
|
-
|
|
468
|
-
"""
|
|
469
|
-
self.models.add_subset(self, subset_name, subset_filter)
|
|
470
|
-
|
|
471
|
-
def delete_subset(self, subset_name: str):
|
|
472
|
-
"""
|
|
473
|
-
Removes a subset from the model's metadata.
|
|
474
|
-
|
|
475
|
-
:param str subset_name: the name of the subset
|
|
476
|
-
|
|
477
|
-
**Example**
|
|
478
|
-
|
|
479
|
-
.. code-block:: python
|
|
480
|
-
|
|
481
|
-
model.add_subset(subset_name='train', subset_filter=dtlpy.Filters(field='dir', values='/train'))
|
|
482
|
-
model.metadata['system']['subsets']
|
|
483
|
-
{'train': <dtlpy.entities.filters.Filters object at 0x1501dfe20>}
|
|
484
|
-
models.delete_subset(subset_name='train')
|
|
485
|
-
metadata['system']['subsets']
|
|
486
|
-
{}
|
|
487
|
-
|
|
488
|
-
"""
|
|
489
|
-
self.models.delete_subset(self, subset_name)
|
|
490
|
-
|
|
491
|
-
def update(self, system_metadata=False):
|
|
492
|
-
"""
|
|
493
|
-
Update Models changes to platform
|
|
494
|
-
|
|
495
|
-
:param bool system_metadata: bool - True, if you want to change metadata system
|
|
496
|
-
:
|
|
497
|
-
|
|
498
|
-
|
|
499
|
-
|
|
500
|
-
|
|
501
|
-
|
|
502
|
-
|
|
503
|
-
|
|
504
|
-
|
|
505
|
-
|
|
506
|
-
|
|
507
|
-
|
|
508
|
-
|
|
509
|
-
|
|
510
|
-
|
|
511
|
-
|
|
512
|
-
|
|
513
|
-
|
|
514
|
-
|
|
515
|
-
|
|
516
|
-
|
|
517
|
-
|
|
518
|
-
|
|
519
|
-
|
|
520
|
-
|
|
521
|
-
|
|
522
|
-
|
|
523
|
-
|
|
524
|
-
|
|
525
|
-
|
|
526
|
-
|
|
527
|
-
|
|
528
|
-
|
|
529
|
-
|
|
530
|
-
|
|
531
|
-
|
|
532
|
-
|
|
533
|
-
|
|
534
|
-
|
|
535
|
-
|
|
536
|
-
|
|
537
|
-
:param str
|
|
538
|
-
:param str
|
|
539
|
-
:param
|
|
540
|
-
:param
|
|
541
|
-
:param str
|
|
542
|
-
:param
|
|
543
|
-
:param
|
|
544
|
-
:param
|
|
545
|
-
:param
|
|
546
|
-
|
|
547
|
-
:
|
|
548
|
-
|
|
549
|
-
|
|
550
|
-
|
|
551
|
-
|
|
552
|
-
|
|
553
|
-
|
|
554
|
-
|
|
555
|
-
|
|
556
|
-
|
|
557
|
-
|
|
558
|
-
|
|
559
|
-
|
|
560
|
-
|
|
561
|
-
|
|
562
|
-
|
|
563
|
-
|
|
564
|
-
|
|
565
|
-
|
|
566
|
-
|
|
567
|
-
|
|
568
|
-
|
|
569
|
-
|
|
570
|
-
|
|
571
|
-
|
|
572
|
-
|
|
573
|
-
|
|
574
|
-
|
|
575
|
-
|
|
576
|
-
|
|
577
|
-
|
|
578
|
-
|
|
579
|
-
|
|
580
|
-
:
|
|
581
|
-
|
|
582
|
-
|
|
583
|
-
|
|
584
|
-
|
|
585
|
-
|
|
586
|
-
|
|
587
|
-
|
|
588
|
-
|
|
589
|
-
|
|
590
|
-
|
|
591
|
-
|
|
592
|
-
|
|
593
|
-
|
|
594
|
-
|
|
595
|
-
|
|
596
|
-
|
|
597
|
-
|
|
598
|
-
|
|
599
|
-
|
|
600
|
-
|
|
601
|
-
|
|
602
|
-
|
|
603
|
-
|
|
604
|
-
|
|
605
|
-
|
|
606
|
-
|
|
607
|
-
|
|
608
|
-
|
|
609
|
-
|
|
610
|
-
|
|
611
|
-
|
|
612
|
-
|
|
613
|
-
|
|
614
|
-
|
|
615
|
-
|
|
616
|
-
|
|
617
|
-
|
|
618
|
-
|
|
619
|
-
|
|
620
|
-
|
|
621
|
-
|
|
622
|
-
|
|
623
|
-
|
|
624
|
-
|
|
625
|
-
|
|
626
|
-
|
|
627
|
-
|
|
628
|
-
|
|
629
|
-
|
|
630
|
-
|
|
631
|
-
|
|
632
|
-
|
|
633
|
-
|
|
634
|
-
|
|
635
|
-
|
|
636
|
-
|
|
637
|
-
|
|
638
|
-
|
|
639
|
-
|
|
640
|
-
|
|
641
|
-
|
|
642
|
-
|
|
643
|
-
|
|
644
|
-
|
|
645
|
-
|
|
646
|
-
|
|
647
|
-
|
|
648
|
-
|
|
649
|
-
|
|
650
|
-
|
|
651
|
-
|
|
652
|
-
|
|
653
|
-
|
|
654
|
-
|
|
655
|
-
|
|
656
|
-
:param
|
|
657
|
-
:param
|
|
658
|
-
:param
|
|
659
|
-
:param str
|
|
660
|
-
:param str
|
|
661
|
-
:param
|
|
662
|
-
:param str
|
|
663
|
-
:param
|
|
664
|
-
:param
|
|
665
|
-
:param
|
|
666
|
-
:param
|
|
667
|
-
:
|
|
668
|
-
:
|
|
669
|
-
|
|
670
|
-
|
|
671
|
-
|
|
672
|
-
|
|
673
|
-
|
|
674
|
-
|
|
675
|
-
|
|
676
|
-
|
|
677
|
-
|
|
678
|
-
|
|
679
|
-
|
|
680
|
-
|
|
681
|
-
|
|
682
|
-
|
|
683
|
-
|
|
684
|
-
|
|
685
|
-
|
|
686
|
-
|
|
687
|
-
|
|
688
|
-
|
|
689
|
-
|
|
690
|
-
|
|
691
|
-
|
|
1
|
+
from collections import namedtuple
|
|
2
|
+
from enum import Enum
|
|
3
|
+
import traceback
|
|
4
|
+
import logging
|
|
5
|
+
import attr
|
|
6
|
+
from .. import repositories, entities
|
|
7
|
+
from ..services.api_client import ApiClient
|
|
8
|
+
|
|
9
|
+
logger = logging.getLogger(name='dtlpy')
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class DatasetSubsetType(str, Enum):
|
|
13
|
+
"""Available types for dataset subsets"""
|
|
14
|
+
TRAIN = 'train'
|
|
15
|
+
VALIDATION = 'validation'
|
|
16
|
+
TEST = 'test'
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class ModelStatus(str, Enum):
|
|
20
|
+
"""Available types for model status"""
|
|
21
|
+
CREATED = "created",
|
|
22
|
+
PRE_TRAINED = "pre-trained",
|
|
23
|
+
PENDING = "pending",
|
|
24
|
+
TRAINING = "training",
|
|
25
|
+
TRAINED = "trained",
|
|
26
|
+
DEPLOYED = "deployed",
|
|
27
|
+
FAILED = "failed",
|
|
28
|
+
CLONING = "cloning"
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
class PlotSample:
|
|
32
|
+
def __init__(self, figure, legend, x, y):
|
|
33
|
+
"""
|
|
34
|
+
Create a single metric sample for Model
|
|
35
|
+
|
|
36
|
+
:param figure: figure name identifier
|
|
37
|
+
:param legend: line name identifier
|
|
38
|
+
:param x: x value for the current sample
|
|
39
|
+
:param y: y value for the current sample
|
|
40
|
+
"""
|
|
41
|
+
self.figure = figure
|
|
42
|
+
self.legend = legend
|
|
43
|
+
self.x = x
|
|
44
|
+
self.y = y
|
|
45
|
+
|
|
46
|
+
def to_json(self) -> dict:
|
|
47
|
+
_json = {'figure': self.figure,
|
|
48
|
+
'legend': self.legend,
|
|
49
|
+
'data': {'x': self.x,
|
|
50
|
+
'y': self.y}}
|
|
51
|
+
return _json
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
# class MatrixSample:
|
|
55
|
+
# def __init__(self, figure, legend, x, y):
|
|
56
|
+
# """
|
|
57
|
+
# Create a single metric sample for Model
|
|
58
|
+
#
|
|
59
|
+
# :param figure: figure name identifier
|
|
60
|
+
# :param legend: line name identifier
|
|
61
|
+
# :param x: x value for the current sample
|
|
62
|
+
# :param y: y value for the current sample
|
|
63
|
+
# """
|
|
64
|
+
# self.figure = figure
|
|
65
|
+
# self.legend = legend
|
|
66
|
+
# self.x = x
|
|
67
|
+
# self.y = y
|
|
68
|
+
#
|
|
69
|
+
# def to_json(self) -> dict:
|
|
70
|
+
# _json = {'figure': self.figure,
|
|
71
|
+
# 'legend': self.legend,
|
|
72
|
+
# 'data': {'x': self.x,
|
|
73
|
+
# 'y': self.y}}
|
|
74
|
+
# return _json
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
@attr.s
|
|
78
|
+
class Model(entities.BaseEntity):
|
|
79
|
+
"""
|
|
80
|
+
Model object
|
|
81
|
+
"""
|
|
82
|
+
# platform
|
|
83
|
+
id = attr.ib()
|
|
84
|
+
creator = attr.ib()
|
|
85
|
+
created_at = attr.ib()
|
|
86
|
+
updated_at = attr.ib(repr=False)
|
|
87
|
+
model_artifacts = attr.ib()
|
|
88
|
+
name = attr.ib()
|
|
89
|
+
description = attr.ib()
|
|
90
|
+
ontology_id = attr.ib(repr=False)
|
|
91
|
+
labels = attr.ib()
|
|
92
|
+
status = attr.ib()
|
|
93
|
+
tags = attr.ib()
|
|
94
|
+
configuration = attr.ib()
|
|
95
|
+
metadata = attr.ib()
|
|
96
|
+
input_type = attr.ib()
|
|
97
|
+
output_type = attr.ib()
|
|
98
|
+
module_name = attr.ib()
|
|
99
|
+
|
|
100
|
+
url = attr.ib()
|
|
101
|
+
scope = attr.ib()
|
|
102
|
+
version = attr.ib()
|
|
103
|
+
context = attr.ib()
|
|
104
|
+
status_logs = attr.ib()
|
|
105
|
+
|
|
106
|
+
# name change
|
|
107
|
+
package_id = attr.ib(repr=False)
|
|
108
|
+
project_id = attr.ib()
|
|
109
|
+
dataset_id = attr.ib(repr=False)
|
|
110
|
+
|
|
111
|
+
# sdk
|
|
112
|
+
_project = attr.ib(repr=False)
|
|
113
|
+
_package = attr.ib(repr=False)
|
|
114
|
+
_dataset = attr.ib(repr=False)
|
|
115
|
+
_feature_set = attr.ib(repr=False)
|
|
116
|
+
_client_api = attr.ib(type=ApiClient, repr=False)
|
|
117
|
+
_repositories = attr.ib(repr=False)
|
|
118
|
+
_ontology = attr.ib(repr=False, default=None)
|
|
119
|
+
updated_by = attr.ib(default=None)
|
|
120
|
+
app = attr.ib(default=None)
|
|
121
|
+
|
|
122
|
+
@staticmethod
|
|
123
|
+
def _protected_from_json(_json, client_api, project=None, package=None, is_fetched=True):
|
|
124
|
+
"""
|
|
125
|
+
Same as from_json but with try-except to catch if error
|
|
126
|
+
|
|
127
|
+
:param _json: platform representation of Model
|
|
128
|
+
:param client_api: ApiClient entity
|
|
129
|
+
:param project: project that owns the model
|
|
130
|
+
:param package: package entity of the model
|
|
131
|
+
:param is_fetched: is Entity fetched from Platform
|
|
132
|
+
:return: Model entity
|
|
133
|
+
"""
|
|
134
|
+
try:
|
|
135
|
+
model = Model.from_json(_json=_json,
|
|
136
|
+
client_api=client_api,
|
|
137
|
+
project=project,
|
|
138
|
+
package=package,
|
|
139
|
+
is_fetched=is_fetched)
|
|
140
|
+
status = True
|
|
141
|
+
except Exception:
|
|
142
|
+
model = traceback.format_exc()
|
|
143
|
+
status = False
|
|
144
|
+
return status, model
|
|
145
|
+
|
|
146
|
+
@classmethod
|
|
147
|
+
def from_json(cls, _json, client_api, project=None, package=None, is_fetched=True):
|
|
148
|
+
"""
|
|
149
|
+
Turn platform representation of model into a model entity
|
|
150
|
+
|
|
151
|
+
:param _json: platform representation of model
|
|
152
|
+
:param client_api: ApiClient entity
|
|
153
|
+
:param project: project that owns the model
|
|
154
|
+
:param package: package entity of the model
|
|
155
|
+
:param is_fetched: is Entity fetched from Platform
|
|
156
|
+
:return: Model entity
|
|
157
|
+
"""
|
|
158
|
+
if project is not None:
|
|
159
|
+
if project.id != _json.get('context', {}).get('project', None):
|
|
160
|
+
logger.warning("Model's project is different then the input project")
|
|
161
|
+
project = None
|
|
162
|
+
|
|
163
|
+
if package is not None:
|
|
164
|
+
if package.id != _json.get('packageId', None):
|
|
165
|
+
logger.warning("Model's package is different then the input package")
|
|
166
|
+
package = None
|
|
167
|
+
|
|
168
|
+
model_artifacts = [entities.Artifact.from_json(_json=artifact,
|
|
169
|
+
client_api=client_api,
|
|
170
|
+
project=project)
|
|
171
|
+
for artifact in _json.get('artifacts', list())]
|
|
172
|
+
|
|
173
|
+
inst = cls(
|
|
174
|
+
configuration=_json.get('configuration', None),
|
|
175
|
+
description=_json.get('description', None),
|
|
176
|
+
status=_json.get('status', None),
|
|
177
|
+
tags=_json.get('tags', None),
|
|
178
|
+
metadata=_json.get('metadata', dict()),
|
|
179
|
+
project_id=_json.get('context', {}).get('project', None),
|
|
180
|
+
dataset_id=_json.get('datasetId', None),
|
|
181
|
+
package_id=_json.get('packageId', None),
|
|
182
|
+
model_artifacts=model_artifacts,
|
|
183
|
+
labels=_json.get('labels', None),
|
|
184
|
+
ontology_id=_json.get('ontology_id', None),
|
|
185
|
+
created_at=_json.get('createdAt', None),
|
|
186
|
+
updated_at=_json.get('updatedAt', None),
|
|
187
|
+
creator=_json.get('context', {}).get('creator', None),
|
|
188
|
+
client_api=client_api,
|
|
189
|
+
name=_json.get('name', None),
|
|
190
|
+
project=project,
|
|
191
|
+
package=package,
|
|
192
|
+
dataset=None,
|
|
193
|
+
feature_set=None,
|
|
194
|
+
id=_json.get('id', None),
|
|
195
|
+
url=_json.get('url', None),
|
|
196
|
+
scope=_json.get('scope', entities.EntityScopeLevel.PROJECT),
|
|
197
|
+
version=_json.get('version', '1.0.0'),
|
|
198
|
+
context=_json.get('context', {}),
|
|
199
|
+
input_type=_json.get('inputType', None),
|
|
200
|
+
output_type=_json.get('outputType', None),
|
|
201
|
+
module_name=_json.get('moduleName', None),
|
|
202
|
+
updated_by=_json.get('updatedBy', None),
|
|
203
|
+
app=_json.get('app', None),
|
|
204
|
+
status_logs=_json.get('statusLogs', []),
|
|
205
|
+
)
|
|
206
|
+
inst.is_fetched = is_fetched
|
|
207
|
+
return inst
|
|
208
|
+
|
|
209
|
+
def to_json(self):
|
|
210
|
+
"""
|
|
211
|
+
Get the dict of Model
|
|
212
|
+
|
|
213
|
+
:return: platform json of model
|
|
214
|
+
:rtype: dict
|
|
215
|
+
"""
|
|
216
|
+
_json = attr.asdict(self,
|
|
217
|
+
filter=attr.filters.exclude(attr.fields(Model)._project,
|
|
218
|
+
attr.fields(Model)._package,
|
|
219
|
+
attr.fields(Model)._dataset,
|
|
220
|
+
attr.fields(Model)._ontology,
|
|
221
|
+
attr.fields(Model)._repositories,
|
|
222
|
+
attr.fields(Model)._feature_set,
|
|
223
|
+
attr.fields(Model)._client_api,
|
|
224
|
+
attr.fields(Model).package_id,
|
|
225
|
+
attr.fields(Model).project_id,
|
|
226
|
+
attr.fields(Model).dataset_id,
|
|
227
|
+
attr.fields(Model).ontology_id,
|
|
228
|
+
attr.fields(Model).model_artifacts,
|
|
229
|
+
attr.fields(Model).created_at,
|
|
230
|
+
attr.fields(Model).updated_at,
|
|
231
|
+
attr.fields(Model).input_type,
|
|
232
|
+
attr.fields(Model).output_type,
|
|
233
|
+
attr.fields(Model).updated_by,
|
|
234
|
+
attr.fields(Model).app,
|
|
235
|
+
attr.fields(Model).status_logs
|
|
236
|
+
))
|
|
237
|
+
_json['packageId'] = self.package_id
|
|
238
|
+
_json['datasetId'] = self.dataset_id
|
|
239
|
+
_json['createdAt'] = self.created_at
|
|
240
|
+
_json['updatedAt'] = self.updated_at
|
|
241
|
+
_json['inputType'] = self.input_type
|
|
242
|
+
_json['outputType'] = self.output_type
|
|
243
|
+
_json['moduleName'] = self.module_name
|
|
244
|
+
|
|
245
|
+
model_artifacts = list()
|
|
246
|
+
for artifact in self.model_artifacts:
|
|
247
|
+
if artifact.type in ['file', 'dir']:
|
|
248
|
+
artifact = {'type': 'item',
|
|
249
|
+
'itemId': artifact.id}
|
|
250
|
+
else:
|
|
251
|
+
artifact = artifact.to_json(as_artifact=True)
|
|
252
|
+
model_artifacts.append(artifact)
|
|
253
|
+
_json['artifacts'] = model_artifacts
|
|
254
|
+
|
|
255
|
+
if self.updated_by:
|
|
256
|
+
_json['updatedBy'] = self.updated_by
|
|
257
|
+
if self.app:
|
|
258
|
+
_json['app'] = self.app
|
|
259
|
+
if self.status_logs:
|
|
260
|
+
_json['statusLogs'] = self.status_logs
|
|
261
|
+
|
|
262
|
+
return _json
|
|
263
|
+
|
|
264
|
+
############
|
|
265
|
+
# entities #
|
|
266
|
+
############
|
|
267
|
+
@property
|
|
268
|
+
def project(self):
|
|
269
|
+
if self._project is None:
|
|
270
|
+
self._project = self.projects.get(project_id=self.project_id, fetch=None)
|
|
271
|
+
self._repositories = self.set_repositories() # update the repos with the new fetched entity
|
|
272
|
+
assert isinstance(self._project, entities.Project)
|
|
273
|
+
return self._project
|
|
274
|
+
|
|
275
|
+
@property
|
|
276
|
+
def feature_set(self) -> 'entities.FeatureSet':
|
|
277
|
+
if self._feature_set is None:
|
|
278
|
+
filters = entities.Filters(field='modelId',
|
|
279
|
+
values=self.id,
|
|
280
|
+
resource=entities.FiltersResource.FEATURE_SET)
|
|
281
|
+
feature_sets = self.project.feature_sets.list(filters=filters)
|
|
282
|
+
if feature_sets.items_count > 1:
|
|
283
|
+
logger.warning("Found more than one feature set associated with model entity. Returning first result."
|
|
284
|
+
"Set feature_set if other feature set entity is needed.")
|
|
285
|
+
self._feature_set = feature_sets.items[0]
|
|
286
|
+
elif feature_sets.items_count == 1:
|
|
287
|
+
self._feature_set = feature_sets.items[0]
|
|
288
|
+
else:
|
|
289
|
+
self._feature_set = None
|
|
290
|
+
return self._feature_set
|
|
291
|
+
|
|
292
|
+
@feature_set.setter
|
|
293
|
+
def feature_set(self, feature_set: 'entities.FeatureSet'):
|
|
294
|
+
if not isinstance(feature_set, entities.FeatureSet):
|
|
295
|
+
raise ValueError("feature_set must be of type dl.FeatureSet")
|
|
296
|
+
else:
|
|
297
|
+
self._feature_set = feature_set
|
|
298
|
+
|
|
299
|
+
@property
|
|
300
|
+
def package(self):
|
|
301
|
+
if self._package is None:
|
|
302
|
+
try:
|
|
303
|
+
if self.app:
|
|
304
|
+
self._package = self.dpks.get_revisions(dpk_id=self.app['dpkId'], version=self.app['dpkVersion'])
|
|
305
|
+
else:
|
|
306
|
+
self._package = self.packages.get(package_id=self.package_id)
|
|
307
|
+
except Exception as e:
|
|
308
|
+
error = e
|
|
309
|
+
try:
|
|
310
|
+
self._package = self.dpks.get(dpk_id=self.package_id)
|
|
311
|
+
except Exception:
|
|
312
|
+
raise error
|
|
313
|
+
self._repositories = self.set_repositories() # update the repos with the new fetched entity
|
|
314
|
+
assert isinstance(self._package, (entities.Package, entities.Dpk))
|
|
315
|
+
return self._package
|
|
316
|
+
|
|
317
|
+
@property
|
|
318
|
+
def dataset(self):
|
|
319
|
+
if self._dataset is None:
|
|
320
|
+
if self.dataset_id is None:
|
|
321
|
+
raise RuntimeError("Model {!r} has no dataset. Can be used only for inference".format(self.id))
|
|
322
|
+
self._dataset = self.datasets.get(dataset_id=self.dataset_id, fetch=None)
|
|
323
|
+
self._repositories = self.set_repositories() # update the repos with the new fetched entity
|
|
324
|
+
assert isinstance(self._dataset, entities.Dataset)
|
|
325
|
+
return self._dataset
|
|
326
|
+
|
|
327
|
+
@property
|
|
328
|
+
def ontology(self):
|
|
329
|
+
if self._ontology is None:
|
|
330
|
+
if self.ontology_id is None:
|
|
331
|
+
raise RuntimeError("Model {!r} has no ontology.".format(self.id))
|
|
332
|
+
self._ontology = self.ontologies.get(ontology_id=self.ontology_id)
|
|
333
|
+
assert isinstance(self._ontology, entities.Ontology)
|
|
334
|
+
return self._ontology
|
|
335
|
+
|
|
336
|
+
################
|
|
337
|
+
# repositories #
|
|
338
|
+
################
|
|
339
|
+
@_repositories.default
|
|
340
|
+
def set_repositories(self):
|
|
341
|
+
reps = namedtuple('repositories',
|
|
342
|
+
field_names=['projects', 'datasets', 'models', 'packages', 'ontologies', 'artifacts',
|
|
343
|
+
'metrics', 'dpks', 'services'])
|
|
344
|
+
|
|
345
|
+
r = reps(projects=repositories.Projects(client_api=self._client_api),
|
|
346
|
+
datasets=repositories.Datasets(client_api=self._client_api,
|
|
347
|
+
project=self._project),
|
|
348
|
+
models=repositories.Models(client_api=self._client_api,
|
|
349
|
+
project=self._project,
|
|
350
|
+
project_id=self.project_id,
|
|
351
|
+
package=self._package),
|
|
352
|
+
packages=repositories.Packages(client_api=self._client_api,
|
|
353
|
+
project=self._project),
|
|
354
|
+
ontologies=repositories.Ontologies(client_api=self._client_api,
|
|
355
|
+
project=self._project,
|
|
356
|
+
dataset=self._dataset),
|
|
357
|
+
artifacts=repositories.Artifacts(client_api=self._client_api,
|
|
358
|
+
project=self._project,
|
|
359
|
+
project_id=self.project_id,
|
|
360
|
+
model=self),
|
|
361
|
+
metrics=repositories.Metrics(client_api=self._client_api,
|
|
362
|
+
model=self),
|
|
363
|
+
dpks=repositories.Dpks(client_api=self._client_api),
|
|
364
|
+
services=repositories.Services(client_api=self._client_api,
|
|
365
|
+
project=self._project,
|
|
366
|
+
project_id=self.project_id,
|
|
367
|
+
model_id=self.id,
|
|
368
|
+
model=self),
|
|
369
|
+
)
|
|
370
|
+
return r
|
|
371
|
+
|
|
372
|
+
@property
|
|
373
|
+
def platform_url(self):
|
|
374
|
+
return self._client_api._get_resource_url("projects/{}/model/{}".format(self.project_id, self.id))
|
|
375
|
+
|
|
376
|
+
@property
|
|
377
|
+
def projects(self):
|
|
378
|
+
assert isinstance(self._repositories.projects, repositories.Projects)
|
|
379
|
+
return self._repositories.projects
|
|
380
|
+
|
|
381
|
+
@property
|
|
382
|
+
def datasets(self):
|
|
383
|
+
assert isinstance(self._repositories.datasets, repositories.Datasets)
|
|
384
|
+
return self._repositories.datasets
|
|
385
|
+
|
|
386
|
+
@property
|
|
387
|
+
def models(self):
|
|
388
|
+
assert isinstance(self._repositories.models, repositories.Models)
|
|
389
|
+
return self._repositories.models
|
|
390
|
+
|
|
391
|
+
@property
|
|
392
|
+
def packages(self):
|
|
393
|
+
assert isinstance(self._repositories.packages, repositories.Packages)
|
|
394
|
+
return self._repositories.packages
|
|
395
|
+
|
|
396
|
+
@property
|
|
397
|
+
def dpks(self):
|
|
398
|
+
assert isinstance(self._repositories.dpks, repositories.Dpks)
|
|
399
|
+
return self._repositories.dpks
|
|
400
|
+
|
|
401
|
+
@property
|
|
402
|
+
def ontologies(self):
|
|
403
|
+
assert isinstance(self._repositories.ontologies, repositories.Ontologies)
|
|
404
|
+
return self._repositories.ontologies
|
|
405
|
+
|
|
406
|
+
@property
|
|
407
|
+
def artifacts(self):
|
|
408
|
+
assert isinstance(self._repositories.artifacts, repositories.Artifacts)
|
|
409
|
+
return self._repositories.artifacts
|
|
410
|
+
|
|
411
|
+
@property
|
|
412
|
+
def metrics(self):
|
|
413
|
+
assert isinstance(self._repositories.metrics, repositories.Metrics)
|
|
414
|
+
return self._repositories.metrics
|
|
415
|
+
|
|
416
|
+
@property
|
|
417
|
+
def services(self):
|
|
418
|
+
assert isinstance(self._repositories.services, repositories.Services)
|
|
419
|
+
return self._repositories.services
|
|
420
|
+
|
|
421
|
+
@property
|
|
422
|
+
def id_to_label_map(self):
|
|
423
|
+
# default
|
|
424
|
+
if 'id_to_label_map' not in self.configuration:
|
|
425
|
+
if not (self.dataset_id == 'null' or self.dataset_id is None):
|
|
426
|
+
self.labels = [label.tag for label in self.dataset.labels]
|
|
427
|
+
self.configuration['id_to_label_map'] = {int(idx): lbl for idx, lbl in enumerate(self.labels)}
|
|
428
|
+
# use existing
|
|
429
|
+
else:
|
|
430
|
+
self.configuration['id_to_label_map'] = {int(idx): lbl for idx, lbl in
|
|
431
|
+
self.configuration['id_to_label_map'].items()}
|
|
432
|
+
return self.configuration['id_to_label_map']
|
|
433
|
+
|
|
434
|
+
@id_to_label_map.setter
|
|
435
|
+
def id_to_label_map(self, mapping: dict):
|
|
436
|
+
self.configuration['id_to_label_map'] = {int(idx): lbl for idx, lbl in mapping.items()}
|
|
437
|
+
|
|
438
|
+
@property
|
|
439
|
+
def label_to_id_map(self):
|
|
440
|
+
if 'label_to_id_map' not in self.configuration:
|
|
441
|
+
self.configuration['label_to_id_map'] = {v: int(k) for k, v in self.id_to_label_map.items()}
|
|
442
|
+
return self.configuration['label_to_id_map']
|
|
443
|
+
|
|
444
|
+
@label_to_id_map.setter
|
|
445
|
+
def label_to_id_map(self, mapping: dict):
|
|
446
|
+
self.configuration['label_to_id_map'] = {v: int(k) for k, v in mapping.items()}
|
|
447
|
+
|
|
448
|
+
###########
|
|
449
|
+
# methods #
|
|
450
|
+
###########
|
|
451
|
+
|
|
452
|
+
def add_subset(self, subset_name: str, subset_filter: entities.Filters):
|
|
453
|
+
"""
|
|
454
|
+
Adds a subset for the model, specifying a subset of the model's dataset that could be used for training or
|
|
455
|
+
validation.
|
|
456
|
+
|
|
457
|
+
:param str subset_name: the name of the subset
|
|
458
|
+
:param dtlpy.entities.Filters subset_filter: the filtering operation that this subset performs in the dataset.
|
|
459
|
+
|
|
460
|
+
**Example**
|
|
461
|
+
|
|
462
|
+
.. code-block:: python
|
|
463
|
+
|
|
464
|
+
model.add_subset(subset_name='train', subset_filter=dtlpy.Filters(field='dir', values='/train'))
|
|
465
|
+
model.metadata['system']['subsets']
|
|
466
|
+
{'train': <dtlpy.entities.filters.Filters object at 0x1501dfe20>}
|
|
467
|
+
|
|
468
|
+
"""
|
|
469
|
+
self.models.add_subset(self, subset_name, subset_filter)
|
|
470
|
+
|
|
471
|
+
def delete_subset(self, subset_name: str):
|
|
472
|
+
"""
|
|
473
|
+
Removes a subset from the model's metadata.
|
|
474
|
+
|
|
475
|
+
:param str subset_name: the name of the subset
|
|
476
|
+
|
|
477
|
+
**Example**
|
|
478
|
+
|
|
479
|
+
.. code-block:: python
|
|
480
|
+
|
|
481
|
+
model.add_subset(subset_name='train', subset_filter=dtlpy.Filters(field='dir', values='/train'))
|
|
482
|
+
model.metadata['system']['subsets']
|
|
483
|
+
{'train': <dtlpy.entities.filters.Filters object at 0x1501dfe20>}
|
|
484
|
+
models.delete_subset(subset_name='train')
|
|
485
|
+
metadata['system']['subsets']
|
|
486
|
+
{}
|
|
487
|
+
|
|
488
|
+
"""
|
|
489
|
+
self.models.delete_subset(self, subset_name)
|
|
490
|
+
|
|
491
|
+
def update(self, system_metadata=False, reload_services=True):
|
|
492
|
+
"""
|
|
493
|
+
Update Models changes to platform
|
|
494
|
+
|
|
495
|
+
:param bool system_metadata: bool - True, if you want to change metadata system
|
|
496
|
+
:param bool reload_services: bool - True, if you want to update the services with the new model
|
|
497
|
+
:return: Models entity
|
|
498
|
+
"""
|
|
499
|
+
return self.models.update(model=self,
|
|
500
|
+
system_metadata=system_metadata,
|
|
501
|
+
reload_services=reload_services
|
|
502
|
+
)
|
|
503
|
+
|
|
504
|
+
def open_in_web(self):
|
|
505
|
+
"""
|
|
506
|
+
Open the model in web platform
|
|
507
|
+
|
|
508
|
+
:return:
|
|
509
|
+
"""
|
|
510
|
+
self._client_api._open_in_web(url=self.platform_url)
|
|
511
|
+
|
|
512
|
+
def delete(self):
|
|
513
|
+
"""
|
|
514
|
+
Delete Model object
|
|
515
|
+
|
|
516
|
+
:return: True
|
|
517
|
+
"""
|
|
518
|
+
return self.models.delete(model=self)
|
|
519
|
+
|
|
520
|
+
def clone(self,
|
|
521
|
+
model_name: str,
|
|
522
|
+
dataset: entities.Dataset = None,
|
|
523
|
+
configuration: dict = None,
|
|
524
|
+
status=None,
|
|
525
|
+
scope=None,
|
|
526
|
+
project_id: str = None,
|
|
527
|
+
labels: list = None,
|
|
528
|
+
description: str = None,
|
|
529
|
+
tags: list = None,
|
|
530
|
+
train_filter: entities.Filters = None,
|
|
531
|
+
validation_filter: entities.Filters = None,
|
|
532
|
+
wait=True
|
|
533
|
+
):
|
|
534
|
+
"""
|
|
535
|
+
Clones and creates a new model out of existing one
|
|
536
|
+
|
|
537
|
+
:param str model_name: `str` new model name
|
|
538
|
+
:param str dataset: dataset object for the cloned model
|
|
539
|
+
:param dict configuration: `dict` (optional) if passed replaces the current configuration
|
|
540
|
+
:param str status: `str` (optional) set the new status
|
|
541
|
+
:param str scope: `str` (optional) set the new scope. default is "project"
|
|
542
|
+
:param str project_id: `str` specify the project id to create the new model on (if other than the source model)
|
|
543
|
+
:param list labels: `list` of `str` - label of the model
|
|
544
|
+
:param str description: `str` description of the new model
|
|
545
|
+
:param list tags: `list` of `str` - label of the model
|
|
546
|
+
:param dtlpy.entities.filters.Filters train_filter: Filters entity or a dictionary to define the items' scope in the specified dataset_id for the model train
|
|
547
|
+
:param dtlpy.entities.filters.Filters validation_filter: Filters entity or a dictionary to define the items' scope in the specified dataset_id for the model validation
|
|
548
|
+
:param bool wait: `bool` wait for the model to be ready before returning
|
|
549
|
+
|
|
550
|
+
:return: dl.Model which is a clone version of the existing model
|
|
551
|
+
"""
|
|
552
|
+
return self.models.clone(from_model=self,
|
|
553
|
+
model_name=model_name,
|
|
554
|
+
project_id=project_id,
|
|
555
|
+
dataset=dataset,
|
|
556
|
+
scope=scope,
|
|
557
|
+
status=status,
|
|
558
|
+
configuration=configuration,
|
|
559
|
+
labels=labels,
|
|
560
|
+
description=description,
|
|
561
|
+
tags=tags,
|
|
562
|
+
train_filter=train_filter,
|
|
563
|
+
validation_filter=validation_filter,
|
|
564
|
+
wait=wait
|
|
565
|
+
)
|
|
566
|
+
|
|
567
|
+
def train(self, service_config=None):
|
|
568
|
+
"""
|
|
569
|
+
Train the model in the cloud. This will create a service and will run the adapter's train function as an execution
|
|
570
|
+
|
|
571
|
+
:param dict service_config : Service object as dict. Contains the spec of the default service to create.
|
|
572
|
+
:return:
|
|
573
|
+
"""
|
|
574
|
+
return self.models.train(model_id=self.id, service_config=service_config)
|
|
575
|
+
|
|
576
|
+
def evaluate(self, dataset_id, filters: entities.Filters = None, service_config=None):
|
|
577
|
+
"""
|
|
578
|
+
Evaluate Model, provide data to evaluate the model on You can also provide specific config for the deployed service
|
|
579
|
+
|
|
580
|
+
:param dict service_config : Service object as dict. Contains the spec of the default service to create.
|
|
581
|
+
:param str dataset_id: ID of the dataset to evaluate
|
|
582
|
+
:param entities.Filters filters: dl.Filter entity to run the predictions on
|
|
583
|
+
:return:
|
|
584
|
+
"""
|
|
585
|
+
return self.models.evaluate(model_id=self.id,
|
|
586
|
+
dataset_id=dataset_id,
|
|
587
|
+
filters=filters,
|
|
588
|
+
service_config=service_config)
|
|
589
|
+
|
|
590
|
+
def predict(self, item_ids=None, dataset_id=None):
|
|
591
|
+
"""
|
|
592
|
+
Run model prediction with items
|
|
593
|
+
|
|
594
|
+
:param item_ids: a list of item id to run the prediction.
|
|
595
|
+
:param dataset_id: dataset id to run the prediction on
|
|
596
|
+
:return:
|
|
597
|
+
"""
|
|
598
|
+
return self.models.predict(model=self, item_ids=item_ids, dataset_id=dataset_id)
|
|
599
|
+
|
|
600
|
+
def embed(self, item_ids):
|
|
601
|
+
"""
|
|
602
|
+
Run model embed with items
|
|
603
|
+
|
|
604
|
+
:param item_ids: a list of item id to run the embed.
|
|
605
|
+
:return:
|
|
606
|
+
"""
|
|
607
|
+
return self.models.embed(model=self, item_ids=item_ids)
|
|
608
|
+
|
|
609
|
+
def embed_datasets(self, dataset_ids, attach_trigger=False):
|
|
610
|
+
"""
|
|
611
|
+
Run model embed with datasets
|
|
612
|
+
|
|
613
|
+
:param dataset_ids: a list of dataset id to run the embed.
|
|
614
|
+
:param attach_trigger: bool - True, if you want to activate the trigger
|
|
615
|
+
:return:
|
|
616
|
+
"""
|
|
617
|
+
return self.models.embed_datasets(model=self, dataset_ids=dataset_ids, attach_trigger=attach_trigger)
|
|
618
|
+
|
|
619
|
+
def deploy(self, service_config=None) -> entities.Service:
|
|
620
|
+
"""
|
|
621
|
+
Deploy a trained model. This will create a service that will execute predictions
|
|
622
|
+
|
|
623
|
+
:param dict service_config : Service object as dict. Contains the spec of the default service to create.
|
|
624
|
+
|
|
625
|
+
:return: dl.Service: The deployed service
|
|
626
|
+
"""
|
|
627
|
+
return self.models.deploy(model_id=self.id, service_config=service_config)
|
|
628
|
+
|
|
629
|
+
def wait_for_model_ready(self):
|
|
630
|
+
"""
|
|
631
|
+
Wait for model to be ready
|
|
632
|
+
|
|
633
|
+
:return:
|
|
634
|
+
"""
|
|
635
|
+
return self.models.wait_for_model_ready(model=self)
|
|
636
|
+
|
|
637
|
+
def log(self,
|
|
638
|
+
service=None,
|
|
639
|
+
size=None,
|
|
640
|
+
checkpoint=None,
|
|
641
|
+
start=None,
|
|
642
|
+
end=None,
|
|
643
|
+
follow=False,
|
|
644
|
+
text=None,
|
|
645
|
+
execution_id=None,
|
|
646
|
+
function_name=None,
|
|
647
|
+
replica_id=None,
|
|
648
|
+
system=False,
|
|
649
|
+
view=True,
|
|
650
|
+
until_completed=True,
|
|
651
|
+
model_operation: str = None,
|
|
652
|
+
):
|
|
653
|
+
"""
|
|
654
|
+
Get service logs
|
|
655
|
+
|
|
656
|
+
:param service: service object
|
|
657
|
+
:param int size: size
|
|
658
|
+
:param dict checkpoint: the information from the lst point checked in the service
|
|
659
|
+
:param str start: iso format time
|
|
660
|
+
:param str end: iso format time
|
|
661
|
+
:param bool follow: if true, keep stream future logs
|
|
662
|
+
:param str text: text
|
|
663
|
+
:param str execution_id: execution id
|
|
664
|
+
:param str function_name: function name
|
|
665
|
+
:param str replica_id: replica id
|
|
666
|
+
:param bool system: system
|
|
667
|
+
:param bool view: if true, print out all the logs
|
|
668
|
+
:param bool until_completed: wait until completed
|
|
669
|
+
:param str model_operation: model operation action
|
|
670
|
+
:return: ServiceLog entity
|
|
671
|
+
:rtype: ServiceLog
|
|
672
|
+
|
|
673
|
+
**Example**:
|
|
674
|
+
|
|
675
|
+
.. code-block:: python
|
|
676
|
+
|
|
677
|
+
service_log = service.log()
|
|
678
|
+
"""
|
|
679
|
+
return self.services.log(service=service,
|
|
680
|
+
size=size,
|
|
681
|
+
checkpoint=checkpoint,
|
|
682
|
+
start=start,
|
|
683
|
+
end=end,
|
|
684
|
+
follow=follow,
|
|
685
|
+
execution_id=execution_id,
|
|
686
|
+
function_name=function_name,
|
|
687
|
+
replica_id=replica_id,
|
|
688
|
+
system=system,
|
|
689
|
+
text=text,
|
|
690
|
+
view=view,
|
|
691
|
+
until_completed=until_completed,
|
|
692
|
+
model_id=self.id,
|
|
693
|
+
model_operation=model_operation,
|
|
694
|
+
project_id=self.project_id)
|