dtlpy 1.115.44__py3-none-any.whl → 1.116.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (238) hide show
  1. dtlpy/__init__.py +491 -491
  2. dtlpy/__version__.py +1 -1
  3. dtlpy/assets/__init__.py +26 -26
  4. dtlpy/assets/code_server/config.yaml +2 -2
  5. dtlpy/assets/code_server/installation.sh +24 -24
  6. dtlpy/assets/code_server/launch.json +13 -13
  7. dtlpy/assets/code_server/settings.json +2 -2
  8. dtlpy/assets/main.py +53 -53
  9. dtlpy/assets/main_partial.py +18 -18
  10. dtlpy/assets/mock.json +11 -11
  11. dtlpy/assets/model_adapter.py +83 -83
  12. dtlpy/assets/package.json +61 -61
  13. dtlpy/assets/package_catalog.json +29 -29
  14. dtlpy/assets/package_gitignore +307 -307
  15. dtlpy/assets/service_runners/__init__.py +33 -33
  16. dtlpy/assets/service_runners/converter.py +96 -96
  17. dtlpy/assets/service_runners/multi_method.py +49 -49
  18. dtlpy/assets/service_runners/multi_method_annotation.py +54 -54
  19. dtlpy/assets/service_runners/multi_method_dataset.py +55 -55
  20. dtlpy/assets/service_runners/multi_method_item.py +52 -52
  21. dtlpy/assets/service_runners/multi_method_json.py +52 -52
  22. dtlpy/assets/service_runners/single_method.py +37 -37
  23. dtlpy/assets/service_runners/single_method_annotation.py +43 -43
  24. dtlpy/assets/service_runners/single_method_dataset.py +43 -43
  25. dtlpy/assets/service_runners/single_method_item.py +41 -41
  26. dtlpy/assets/service_runners/single_method_json.py +42 -42
  27. dtlpy/assets/service_runners/single_method_multi_input.py +45 -45
  28. dtlpy/assets/voc_annotation_template.xml +23 -23
  29. dtlpy/caches/base_cache.py +32 -32
  30. dtlpy/caches/cache.py +473 -473
  31. dtlpy/caches/dl_cache.py +201 -201
  32. dtlpy/caches/filesystem_cache.py +89 -89
  33. dtlpy/caches/redis_cache.py +84 -84
  34. dtlpy/dlp/__init__.py +20 -20
  35. dtlpy/dlp/cli_utilities.py +367 -367
  36. dtlpy/dlp/command_executor.py +764 -764
  37. dtlpy/dlp/dlp +1 -1
  38. dtlpy/dlp/dlp.bat +1 -1
  39. dtlpy/dlp/dlp.py +128 -128
  40. dtlpy/dlp/parser.py +651 -651
  41. dtlpy/entities/__init__.py +83 -83
  42. dtlpy/entities/analytic.py +347 -347
  43. dtlpy/entities/annotation.py +1879 -1879
  44. dtlpy/entities/annotation_collection.py +699 -699
  45. dtlpy/entities/annotation_definitions/__init__.py +20 -20
  46. dtlpy/entities/annotation_definitions/base_annotation_definition.py +100 -100
  47. dtlpy/entities/annotation_definitions/box.py +195 -195
  48. dtlpy/entities/annotation_definitions/classification.py +67 -67
  49. dtlpy/entities/annotation_definitions/comparison.py +72 -72
  50. dtlpy/entities/annotation_definitions/cube.py +204 -204
  51. dtlpy/entities/annotation_definitions/cube_3d.py +149 -149
  52. dtlpy/entities/annotation_definitions/description.py +32 -32
  53. dtlpy/entities/annotation_definitions/ellipse.py +124 -124
  54. dtlpy/entities/annotation_definitions/free_text.py +62 -62
  55. dtlpy/entities/annotation_definitions/gis.py +69 -69
  56. dtlpy/entities/annotation_definitions/note.py +139 -139
  57. dtlpy/entities/annotation_definitions/point.py +117 -117
  58. dtlpy/entities/annotation_definitions/polygon.py +182 -182
  59. dtlpy/entities/annotation_definitions/polyline.py +111 -111
  60. dtlpy/entities/annotation_definitions/pose.py +92 -92
  61. dtlpy/entities/annotation_definitions/ref_image.py +86 -86
  62. dtlpy/entities/annotation_definitions/segmentation.py +240 -240
  63. dtlpy/entities/annotation_definitions/subtitle.py +34 -34
  64. dtlpy/entities/annotation_definitions/text.py +85 -85
  65. dtlpy/entities/annotation_definitions/undefined_annotation.py +74 -74
  66. dtlpy/entities/app.py +220 -220
  67. dtlpy/entities/app_module.py +107 -107
  68. dtlpy/entities/artifact.py +174 -174
  69. dtlpy/entities/assignment.py +399 -399
  70. dtlpy/entities/base_entity.py +214 -214
  71. dtlpy/entities/bot.py +113 -113
  72. dtlpy/entities/codebase.py +292 -292
  73. dtlpy/entities/collection.py +38 -38
  74. dtlpy/entities/command.py +169 -169
  75. dtlpy/entities/compute.py +449 -449
  76. dtlpy/entities/dataset.py +1299 -1299
  77. dtlpy/entities/directory_tree.py +44 -44
  78. dtlpy/entities/dpk.py +470 -470
  79. dtlpy/entities/driver.py +235 -235
  80. dtlpy/entities/execution.py +397 -397
  81. dtlpy/entities/feature.py +124 -124
  82. dtlpy/entities/feature_set.py +145 -145
  83. dtlpy/entities/filters.py +798 -798
  84. dtlpy/entities/gis_item.py +107 -107
  85. dtlpy/entities/integration.py +184 -184
  86. dtlpy/entities/item.py +959 -959
  87. dtlpy/entities/label.py +123 -123
  88. dtlpy/entities/links.py +85 -85
  89. dtlpy/entities/message.py +175 -175
  90. dtlpy/entities/model.py +684 -684
  91. dtlpy/entities/node.py +1005 -1005
  92. dtlpy/entities/ontology.py +810 -803
  93. dtlpy/entities/organization.py +287 -287
  94. dtlpy/entities/package.py +657 -657
  95. dtlpy/entities/package_defaults.py +5 -5
  96. dtlpy/entities/package_function.py +185 -185
  97. dtlpy/entities/package_module.py +113 -113
  98. dtlpy/entities/package_slot.py +118 -118
  99. dtlpy/entities/paged_entities.py +299 -299
  100. dtlpy/entities/pipeline.py +624 -624
  101. dtlpy/entities/pipeline_execution.py +279 -279
  102. dtlpy/entities/project.py +394 -394
  103. dtlpy/entities/prompt_item.py +505 -505
  104. dtlpy/entities/recipe.py +301 -301
  105. dtlpy/entities/reflect_dict.py +102 -102
  106. dtlpy/entities/resource_execution.py +138 -138
  107. dtlpy/entities/service.py +963 -963
  108. dtlpy/entities/service_driver.py +117 -117
  109. dtlpy/entities/setting.py +294 -294
  110. dtlpy/entities/task.py +495 -495
  111. dtlpy/entities/time_series.py +143 -143
  112. dtlpy/entities/trigger.py +426 -426
  113. dtlpy/entities/user.py +118 -118
  114. dtlpy/entities/webhook.py +124 -124
  115. dtlpy/examples/__init__.py +19 -19
  116. dtlpy/examples/add_labels.py +135 -135
  117. dtlpy/examples/add_metadata_to_item.py +21 -21
  118. dtlpy/examples/annotate_items_using_model.py +65 -65
  119. dtlpy/examples/annotate_video_using_model_and_tracker.py +75 -75
  120. dtlpy/examples/annotations_convert_to_voc.py +9 -9
  121. dtlpy/examples/annotations_convert_to_yolo.py +9 -9
  122. dtlpy/examples/convert_annotation_types.py +51 -51
  123. dtlpy/examples/converter.py +143 -143
  124. dtlpy/examples/copy_annotations.py +22 -22
  125. dtlpy/examples/copy_folder.py +31 -31
  126. dtlpy/examples/create_annotations.py +51 -51
  127. dtlpy/examples/create_video_annotations.py +83 -83
  128. dtlpy/examples/delete_annotations.py +26 -26
  129. dtlpy/examples/filters.py +113 -113
  130. dtlpy/examples/move_item.py +23 -23
  131. dtlpy/examples/play_video_annotation.py +13 -13
  132. dtlpy/examples/show_item_and_mask.py +53 -53
  133. dtlpy/examples/triggers.py +49 -49
  134. dtlpy/examples/upload_batch_of_items.py +20 -20
  135. dtlpy/examples/upload_items_and_custom_format_annotations.py +55 -55
  136. dtlpy/examples/upload_items_with_modalities.py +43 -43
  137. dtlpy/examples/upload_segmentation_annotations_from_mask_image.py +44 -44
  138. dtlpy/examples/upload_yolo_format_annotations.py +70 -70
  139. dtlpy/exceptions.py +125 -125
  140. dtlpy/miscellaneous/__init__.py +20 -20
  141. dtlpy/miscellaneous/dict_differ.py +95 -95
  142. dtlpy/miscellaneous/git_utils.py +217 -217
  143. dtlpy/miscellaneous/json_utils.py +14 -14
  144. dtlpy/miscellaneous/list_print.py +105 -105
  145. dtlpy/miscellaneous/zipping.py +130 -130
  146. dtlpy/ml/__init__.py +20 -20
  147. dtlpy/ml/base_feature_extractor_adapter.py +27 -27
  148. dtlpy/ml/base_model_adapter.py +1257 -1230
  149. dtlpy/ml/metrics.py +461 -461
  150. dtlpy/ml/predictions_utils.py +274 -274
  151. dtlpy/ml/summary_writer.py +57 -57
  152. dtlpy/ml/train_utils.py +60 -60
  153. dtlpy/new_instance.py +252 -252
  154. dtlpy/repositories/__init__.py +56 -56
  155. dtlpy/repositories/analytics.py +85 -85
  156. dtlpy/repositories/annotations.py +916 -916
  157. dtlpy/repositories/apps.py +383 -383
  158. dtlpy/repositories/artifacts.py +452 -452
  159. dtlpy/repositories/assignments.py +599 -599
  160. dtlpy/repositories/bots.py +213 -213
  161. dtlpy/repositories/codebases.py +559 -559
  162. dtlpy/repositories/collections.py +332 -332
  163. dtlpy/repositories/commands.py +152 -152
  164. dtlpy/repositories/compositions.py +61 -61
  165. dtlpy/repositories/computes.py +439 -439
  166. dtlpy/repositories/datasets.py +1504 -1504
  167. dtlpy/repositories/downloader.py +976 -923
  168. dtlpy/repositories/dpks.py +433 -433
  169. dtlpy/repositories/drivers.py +482 -482
  170. dtlpy/repositories/executions.py +815 -815
  171. dtlpy/repositories/feature_sets.py +226 -226
  172. dtlpy/repositories/features.py +255 -255
  173. dtlpy/repositories/integrations.py +484 -484
  174. dtlpy/repositories/items.py +912 -912
  175. dtlpy/repositories/messages.py +94 -94
  176. dtlpy/repositories/models.py +1000 -1000
  177. dtlpy/repositories/nodes.py +80 -80
  178. dtlpy/repositories/ontologies.py +511 -511
  179. dtlpy/repositories/organizations.py +525 -525
  180. dtlpy/repositories/packages.py +1941 -1941
  181. dtlpy/repositories/pipeline_executions.py +451 -451
  182. dtlpy/repositories/pipelines.py +640 -640
  183. dtlpy/repositories/projects.py +539 -539
  184. dtlpy/repositories/recipes.py +419 -399
  185. dtlpy/repositories/resource_executions.py +137 -137
  186. dtlpy/repositories/schema.py +120 -120
  187. dtlpy/repositories/service_drivers.py +213 -213
  188. dtlpy/repositories/services.py +1704 -1704
  189. dtlpy/repositories/settings.py +339 -339
  190. dtlpy/repositories/tasks.py +1477 -1477
  191. dtlpy/repositories/times_series.py +278 -278
  192. dtlpy/repositories/triggers.py +536 -536
  193. dtlpy/repositories/upload_element.py +257 -257
  194. dtlpy/repositories/uploader.py +661 -661
  195. dtlpy/repositories/webhooks.py +249 -249
  196. dtlpy/services/__init__.py +22 -22
  197. dtlpy/services/aihttp_retry.py +131 -131
  198. dtlpy/services/api_client.py +1785 -1785
  199. dtlpy/services/api_reference.py +40 -40
  200. dtlpy/services/async_utils.py +133 -133
  201. dtlpy/services/calls_counter.py +44 -44
  202. dtlpy/services/check_sdk.py +68 -68
  203. dtlpy/services/cookie.py +115 -115
  204. dtlpy/services/create_logger.py +156 -156
  205. dtlpy/services/events.py +84 -84
  206. dtlpy/services/logins.py +235 -235
  207. dtlpy/services/reporter.py +256 -256
  208. dtlpy/services/service_defaults.py +91 -91
  209. dtlpy/utilities/__init__.py +20 -20
  210. dtlpy/utilities/annotations/__init__.py +16 -16
  211. dtlpy/utilities/annotations/annotation_converters.py +269 -269
  212. dtlpy/utilities/base_package_runner.py +285 -264
  213. dtlpy/utilities/converter.py +1650 -1650
  214. dtlpy/utilities/dataset_generators/__init__.py +1 -1
  215. dtlpy/utilities/dataset_generators/dataset_generator.py +670 -670
  216. dtlpy/utilities/dataset_generators/dataset_generator_tensorflow.py +23 -23
  217. dtlpy/utilities/dataset_generators/dataset_generator_torch.py +21 -21
  218. dtlpy/utilities/local_development/__init__.py +1 -1
  219. dtlpy/utilities/local_development/local_session.py +179 -179
  220. dtlpy/utilities/reports/__init__.py +2 -2
  221. dtlpy/utilities/reports/figures.py +343 -343
  222. dtlpy/utilities/reports/report.py +71 -71
  223. dtlpy/utilities/videos/__init__.py +17 -17
  224. dtlpy/utilities/videos/video_player.py +598 -598
  225. dtlpy/utilities/videos/videos.py +470 -470
  226. {dtlpy-1.115.44.data → dtlpy-1.116.6.data}/scripts/dlp +1 -1
  227. dtlpy-1.116.6.data/scripts/dlp.bat +2 -0
  228. {dtlpy-1.115.44.data → dtlpy-1.116.6.data}/scripts/dlp.py +128 -128
  229. {dtlpy-1.115.44.dist-info → dtlpy-1.116.6.dist-info}/METADATA +186 -186
  230. dtlpy-1.116.6.dist-info/RECORD +239 -0
  231. {dtlpy-1.115.44.dist-info → dtlpy-1.116.6.dist-info}/WHEEL +1 -1
  232. {dtlpy-1.115.44.dist-info → dtlpy-1.116.6.dist-info}/licenses/LICENSE +200 -200
  233. tests/features/environment.py +551 -551
  234. dtlpy/assets/__pycache__/__init__.cpython-310.pyc +0 -0
  235. dtlpy-1.115.44.data/scripts/dlp.bat +0 -2
  236. dtlpy-1.115.44.dist-info/RECORD +0 -240
  237. {dtlpy-1.115.44.dist-info → dtlpy-1.116.6.dist-info}/entry_points.txt +0 -0
  238. {dtlpy-1.115.44.dist-info → dtlpy-1.116.6.dist-info}/top_level.txt +0 -0
dtlpy/entities/model.py CHANGED
@@ -1,684 +1,684 @@
1
- from collections import namedtuple
2
- from enum import Enum
3
- import traceback
4
- import logging
5
- import attr
6
- from .. import repositories, entities
7
- from ..services.api_client import ApiClient
8
-
9
- logger = logging.getLogger(name='dtlpy')
10
-
11
-
12
- class DatasetSubsetType(str, Enum):
13
- """Available types for dataset subsets"""
14
- TRAIN = 'train'
15
- VALIDATION = 'validation'
16
- TEST = 'test'
17
-
18
-
19
- class ModelStatus(str, Enum):
20
- """Available types for model status"""
21
- CREATED = "created",
22
- PRE_TRAINED = "pre-trained",
23
- PENDING = "pending",
24
- TRAINING = "training",
25
- TRAINED = "trained",
26
- DEPLOYED = "deployed",
27
- FAILED = "failed",
28
- CLONING = "cloning"
29
-
30
-
31
- class PlotSample:
32
- def __init__(self, figure, legend, x, y):
33
- """
34
- Create a single metric sample for Model
35
-
36
- :param figure: figure name identifier
37
- :param legend: line name identifier
38
- :param x: x value for the current sample
39
- :param y: y value for the current sample
40
- """
41
- self.figure = figure
42
- self.legend = legend
43
- self.x = x
44
- self.y = y
45
-
46
- def to_json(self) -> dict:
47
- _json = {'figure': self.figure,
48
- 'legend': self.legend,
49
- 'data': {'x': self.x,
50
- 'y': self.y}}
51
- return _json
52
-
53
-
54
- # class MatrixSample:
55
- # def __init__(self, figure, legend, x, y):
56
- # """
57
- # Create a single metric sample for Model
58
- #
59
- # :param figure: figure name identifier
60
- # :param legend: line name identifier
61
- # :param x: x value for the current sample
62
- # :param y: y value for the current sample
63
- # """
64
- # self.figure = figure
65
- # self.legend = legend
66
- # self.x = x
67
- # self.y = y
68
- #
69
- # def to_json(self) -> dict:
70
- # _json = {'figure': self.figure,
71
- # 'legend': self.legend,
72
- # 'data': {'x': self.x,
73
- # 'y': self.y}}
74
- # return _json
75
-
76
-
77
- @attr.s
78
- class Model(entities.BaseEntity):
79
- """
80
- Model object
81
- """
82
- # platform
83
- id = attr.ib()
84
- creator = attr.ib()
85
- created_at = attr.ib()
86
- updated_at = attr.ib(repr=False)
87
- model_artifacts = attr.ib()
88
- name = attr.ib()
89
- description = attr.ib()
90
- ontology_id = attr.ib(repr=False)
91
- labels = attr.ib()
92
- status = attr.ib()
93
- tags = attr.ib()
94
- configuration = attr.ib()
95
- metadata = attr.ib()
96
- input_type = attr.ib()
97
- output_type = attr.ib()
98
- module_name = attr.ib()
99
-
100
- url = attr.ib()
101
- scope = attr.ib()
102
- version = attr.ib()
103
- context = attr.ib()
104
- status_logs = attr.ib()
105
-
106
- # name change
107
- package_id = attr.ib(repr=False)
108
- project_id = attr.ib()
109
- dataset_id = attr.ib(repr=False)
110
-
111
- # sdk
112
- _project = attr.ib(repr=False)
113
- _package = attr.ib(repr=False)
114
- _dataset = attr.ib(repr=False)
115
- _feature_set = attr.ib(repr=False)
116
- _client_api = attr.ib(type=ApiClient, repr=False)
117
- _repositories = attr.ib(repr=False)
118
- _ontology = attr.ib(repr=False, default=None)
119
- updated_by = attr.ib(default=None)
120
- app = attr.ib(default=None)
121
-
122
- @staticmethod
123
- def _protected_from_json(_json, client_api, project=None, package=None, is_fetched=True):
124
- """
125
- Same as from_json but with try-except to catch if error
126
-
127
- :param _json: platform representation of Model
128
- :param client_api: ApiClient entity
129
- :param project: project that owns the model
130
- :param package: package entity of the model
131
- :param is_fetched: is Entity fetched from Platform
132
- :return: Model entity
133
- """
134
- try:
135
- model = Model.from_json(_json=_json,
136
- client_api=client_api,
137
- project=project,
138
- package=package,
139
- is_fetched=is_fetched)
140
- status = True
141
- except Exception:
142
- model = traceback.format_exc()
143
- status = False
144
- return status, model
145
-
146
- @classmethod
147
- def from_json(cls, _json, client_api, project=None, package=None, is_fetched=True):
148
- """
149
- Turn platform representation of model into a model entity
150
-
151
- :param _json: platform representation of model
152
- :param client_api: ApiClient entity
153
- :param project: project that owns the model
154
- :param package: package entity of the model
155
- :param is_fetched: is Entity fetched from Platform
156
- :return: Model entity
157
- """
158
- if project is not None:
159
- if project.id != _json.get('context', {}).get('project', None):
160
- logger.warning("Model's project is different then the input project")
161
- project = None
162
-
163
- if package is not None:
164
- if package.id != _json.get('packageId', None):
165
- logger.warning("Model's package is different then the input package")
166
- package = None
167
-
168
- model_artifacts = [entities.Artifact.from_json(_json=artifact,
169
- client_api=client_api,
170
- project=project)
171
- for artifact in _json.get('artifacts', list())]
172
-
173
- inst = cls(
174
- configuration=_json.get('configuration', None),
175
- description=_json.get('description', None),
176
- status=_json.get('status', None),
177
- tags=_json.get('tags', None),
178
- metadata=_json.get('metadata', dict()),
179
- project_id=_json.get('context', {}).get('project', None),
180
- dataset_id=_json.get('datasetId', None),
181
- package_id=_json.get('packageId', None),
182
- model_artifacts=model_artifacts,
183
- labels=_json.get('labels', None),
184
- ontology_id=_json.get('ontology_id', None),
185
- created_at=_json.get('createdAt', None),
186
- updated_at=_json.get('updatedAt', None),
187
- creator=_json.get('context', {}).get('creator', None),
188
- client_api=client_api,
189
- name=_json.get('name', None),
190
- project=project,
191
- package=package,
192
- dataset=None,
193
- feature_set=None,
194
- id=_json.get('id', None),
195
- url=_json.get('url', None),
196
- scope=_json.get('scope', entities.EntityScopeLevel.PROJECT),
197
- version=_json.get('version', '1.0.0'),
198
- context=_json.get('context', {}),
199
- input_type=_json.get('inputType', None),
200
- output_type=_json.get('outputType', None),
201
- module_name=_json.get('moduleName', None),
202
- updated_by=_json.get('updatedBy', None),
203
- app=_json.get('app', None),
204
- status_logs=_json.get('statusLogs', []),
205
- )
206
- inst.is_fetched = is_fetched
207
- return inst
208
-
209
- def to_json(self):
210
- """
211
- Get the dict of Model
212
-
213
- :return: platform json of model
214
- :rtype: dict
215
- """
216
- _json = attr.asdict(self,
217
- filter=attr.filters.exclude(attr.fields(Model)._project,
218
- attr.fields(Model)._package,
219
- attr.fields(Model)._dataset,
220
- attr.fields(Model)._ontology,
221
- attr.fields(Model)._repositories,
222
- attr.fields(Model)._feature_set,
223
- attr.fields(Model)._client_api,
224
- attr.fields(Model).package_id,
225
- attr.fields(Model).project_id,
226
- attr.fields(Model).dataset_id,
227
- attr.fields(Model).ontology_id,
228
- attr.fields(Model).model_artifacts,
229
- attr.fields(Model).created_at,
230
- attr.fields(Model).updated_at,
231
- attr.fields(Model).input_type,
232
- attr.fields(Model).output_type,
233
- attr.fields(Model).updated_by,
234
- attr.fields(Model).app,
235
- attr.fields(Model).status_logs
236
- ))
237
- _json['packageId'] = self.package_id
238
- _json['datasetId'] = self.dataset_id
239
- _json['createdAt'] = self.created_at
240
- _json['updatedAt'] = self.updated_at
241
- _json['inputType'] = self.input_type
242
- _json['outputType'] = self.output_type
243
- _json['moduleName'] = self.module_name
244
-
245
- model_artifacts = list()
246
- for artifact in self.model_artifacts:
247
- if artifact.type in ['file', 'dir']:
248
- artifact = {'type': 'item',
249
- 'itemId': artifact.id}
250
- else:
251
- artifact = artifact.to_json(as_artifact=True)
252
- model_artifacts.append(artifact)
253
- _json['artifacts'] = model_artifacts
254
-
255
- if self.updated_by:
256
- _json['updatedBy'] = self.updated_by
257
- if self.app:
258
- _json['app'] = self.app
259
- if self.status_logs:
260
- _json['statusLogs'] = self.status_logs
261
-
262
- return _json
263
-
264
- ############
265
- # entities #
266
- ############
267
- @property
268
- def project(self):
269
- if self._project is None:
270
- self._project = self.projects.get(project_id=self.project_id, fetch=None)
271
- self._repositories = self.set_repositories() # update the repos with the new fetched entity
272
- assert isinstance(self._project, entities.Project)
273
- return self._project
274
-
275
- @property
276
- def feature_set(self) -> 'entities.FeatureSet':
277
- if self._feature_set is None:
278
- filters = entities.Filters(field='modelId',
279
- values=self.id,
280
- resource=entities.FiltersResource.FEATURE_SET)
281
- feature_sets = self.project.feature_sets.list(filters=filters)
282
- if feature_sets.items_count > 1:
283
- logger.warning("Found more than one feature set associated with model entity. Returning first result."
284
- "Set feature_set if other feature set entity is needed.")
285
- self._feature_set = feature_sets.items[0]
286
- elif feature_sets.items_count == 1:
287
- self._feature_set = feature_sets.items[0]
288
- else:
289
- self._feature_set = None
290
- return self._feature_set
291
-
292
- @feature_set.setter
293
- def feature_set(self, feature_set: 'entities.FeatureSet'):
294
- if not isinstance(feature_set, entities.FeatureSet):
295
- raise ValueError("feature_set must be of type dl.FeatureSet")
296
- else:
297
- self._feature_set = feature_set
298
-
299
- @property
300
- def package(self):
301
- if self._package is None:
302
- try:
303
- if self.app:
304
- self._package = self.dpks.get_revisions(dpk_id=self.app['dpkId'], version=self.app['dpkVersion'])
305
- else:
306
- self._package = self.packages.get(package_id=self.package_id)
307
- except Exception as e:
308
- error = e
309
- try:
310
- self._package = self.dpks.get(dpk_id=self.package_id)
311
- except Exception:
312
- raise error
313
- self._repositories = self.set_repositories() # update the repos with the new fetched entity
314
- assert isinstance(self._package, (entities.Package, entities.Dpk))
315
- return self._package
316
-
317
- @property
318
- def dataset(self):
319
- if self._dataset is None:
320
- if self.dataset_id is None:
321
- raise RuntimeError("Model {!r} has no dataset. Can be used only for inference".format(self.id))
322
- self._dataset = self.datasets.get(dataset_id=self.dataset_id, fetch=None)
323
- self._repositories = self.set_repositories() # update the repos with the new fetched entity
324
- assert isinstance(self._dataset, entities.Dataset)
325
- return self._dataset
326
-
327
- @property
328
- def ontology(self):
329
- if self._ontology is None:
330
- if self.ontology_id is None:
331
- raise RuntimeError("Model {!r} has no ontology.".format(self.id))
332
- self._ontology = self.ontologies.get(ontology_id=self.ontology_id)
333
- assert isinstance(self._ontology, entities.Ontology)
334
- return self._ontology
335
-
336
- ################
337
- # repositories #
338
- ################
339
- @_repositories.default
340
- def set_repositories(self):
341
- reps = namedtuple('repositories',
342
- field_names=['projects', 'datasets', 'models', 'packages', 'ontologies', 'artifacts',
343
- 'metrics', 'dpks', 'services'])
344
-
345
- r = reps(projects=repositories.Projects(client_api=self._client_api),
346
- datasets=repositories.Datasets(client_api=self._client_api,
347
- project=self._project),
348
- models=repositories.Models(client_api=self._client_api,
349
- project=self._project,
350
- project_id=self.project_id,
351
- package=self._package),
352
- packages=repositories.Packages(client_api=self._client_api,
353
- project=self._project),
354
- ontologies=repositories.Ontologies(client_api=self._client_api,
355
- project=self._project,
356
- dataset=self._dataset),
357
- artifacts=repositories.Artifacts(client_api=self._client_api,
358
- project=self._project,
359
- project_id=self.project_id,
360
- model=self),
361
- metrics=repositories.Metrics(client_api=self._client_api,
362
- model=self),
363
- dpks=repositories.Dpks(client_api=self._client_api),
364
- services=repositories.Services(client_api=self._client_api,
365
- project=self._project,
366
- project_id=self.project_id,
367
- model_id=self.id,
368
- model=self),
369
- )
370
- return r
371
-
372
- @property
373
- def platform_url(self):
374
- return self._client_api._get_resource_url("projects/{}/model/{}".format(self.project_id, self.id))
375
-
376
- @property
377
- def projects(self):
378
- assert isinstance(self._repositories.projects, repositories.Projects)
379
- return self._repositories.projects
380
-
381
- @property
382
- def datasets(self):
383
- assert isinstance(self._repositories.datasets, repositories.Datasets)
384
- return self._repositories.datasets
385
-
386
- @property
387
- def models(self):
388
- assert isinstance(self._repositories.models, repositories.Models)
389
- return self._repositories.models
390
-
391
- @property
392
- def packages(self):
393
- assert isinstance(self._repositories.packages, repositories.Packages)
394
- return self._repositories.packages
395
-
396
- @property
397
- def dpks(self):
398
- assert isinstance(self._repositories.dpks, repositories.Dpks)
399
- return self._repositories.dpks
400
-
401
- @property
402
- def ontologies(self):
403
- assert isinstance(self._repositories.ontologies, repositories.Ontologies)
404
- return self._repositories.ontologies
405
-
406
- @property
407
- def artifacts(self):
408
- assert isinstance(self._repositories.artifacts, repositories.Artifacts)
409
- return self._repositories.artifacts
410
-
411
- @property
412
- def metrics(self):
413
- assert isinstance(self._repositories.metrics, repositories.Metrics)
414
- return self._repositories.metrics
415
-
416
- @property
417
- def services(self):
418
- assert isinstance(self._repositories.services, repositories.Services)
419
- return self._repositories.services
420
-
421
- @property
422
- def id_to_label_map(self):
423
- # default
424
- if 'id_to_label_map' not in self.configuration:
425
- if not (self.dataset_id == 'null' or self.dataset_id is None):
426
- self.labels = [label.tag for label in self.dataset.labels]
427
- self.configuration['id_to_label_map'] = {int(idx): lbl for idx, lbl in enumerate(self.labels)}
428
- # use existing
429
- else:
430
- self.configuration['id_to_label_map'] = {int(idx): lbl for idx, lbl in
431
- self.configuration['id_to_label_map'].items()}
432
- return self.configuration['id_to_label_map']
433
-
434
- @id_to_label_map.setter
435
- def id_to_label_map(self, mapping: dict):
436
- self.configuration['id_to_label_map'] = {int(idx): lbl for idx, lbl in mapping.items()}
437
-
438
- @property
439
- def label_to_id_map(self):
440
- if 'label_to_id_map' not in self.configuration:
441
- self.configuration['label_to_id_map'] = {v: int(k) for k, v in self.id_to_label_map.items()}
442
- return self.configuration['label_to_id_map']
443
-
444
- @label_to_id_map.setter
445
- def label_to_id_map(self, mapping: dict):
446
- self.configuration['label_to_id_map'] = {v: int(k) for k, v in mapping.items()}
447
-
448
- ###########
449
- # methods #
450
- ###########
451
-
452
- def add_subset(
453
- self, subset_name: str, subset_filter=None, subset_annotation_filter=None
454
- ):
455
- """
456
- Adds a subset for the model, specifying a subset of the model's dataset that could be used for training or
457
- validation. Optionally also adds an annotations subset.
458
-
459
- :param str subset_name: the name of the subset
460
- :param subset_filter: filtering for items subset. Can be `entities.Filters`, `dict`, or `None`
461
- :param subset_annotation_filter: optional filtering for annotations subset. Can be `entities.Filters`, `dict`, or `None`
462
-
463
- """
464
- self.models.add_subset(self, subset_name, subset_filter, subset_annotation_filter)
465
-
466
- def delete_subset(self, subset_name: str):
467
- """
468
- Removes a subset from the model's metadata (both subsets and annotationsSubsets).
469
-
470
- :param str subset_name: the name of the subset
471
-
472
- """
473
- self.models.delete_subset(self, subset_name)
474
-
475
- def update(self, system_metadata=False, reload_services=True):
476
- """
477
- Update Models changes to platform
478
-
479
- :param bool system_metadata: bool - True, if you want to change metadata system
480
- :param bool reload_services: bool - True, if you want to update the services with the new model
481
- :return: Models entity
482
- """
483
- return self.models.update(model=self,
484
- system_metadata=system_metadata,
485
- reload_services=reload_services
486
- )
487
-
488
- def open_in_web(self):
489
- """
490
- Open the model in web platform
491
-
492
- :return:
493
- """
494
- self._client_api._open_in_web(url=self.platform_url)
495
-
496
- def delete(self):
497
- """
498
- Delete Model object
499
-
500
- :return: True
501
- """
502
- return self.models.delete(model=self)
503
-
504
- def clone(self,
505
- model_name: str,
506
- dataset: entities.Dataset = None,
507
- configuration: dict = None,
508
- status=None,
509
- scope=None,
510
- project_id: str = None,
511
- labels: list = None,
512
- description: str = None,
513
- tags: list = None,
514
- train_filter: entities.Filters = None,
515
- validation_filter: entities.Filters = None,
516
- annotations_train_filter: entities.Filters = None,
517
- annotations_validation_filter: entities.Filters = None,
518
- wait=True
519
- ):
520
- """
521
- Clones and creates a new model out of existing one
522
-
523
- :param str model_name: `str` new model name
524
- :param str dataset: dataset object for the cloned model
525
- :param dict configuration: `dict` (optional) if passed replaces the current configuration
526
- :param str status: `str` (optional) set the new status
527
- :param str scope: `str` (optional) set the new scope. default is "project"
528
- :param str project_id: `str` specify the project id to create the new model on (if other than the source model)
529
- :param list labels: `list` of `str` - label of the model
530
- :param str description: `str` description of the new model
531
- :param list tags: `list` of `str` - label of the model
532
- :param dtlpy.entities.filters.Filters train_filter: Filters entity or a dictionary to define the items' scope in the specified dataset_id for the model train
533
- :param dtlpy.entities.filters.Filters validation_filter: Filters entity or a dictionary to define the items' scope in the specified dataset_id for the model validation
534
- :param dtlpy.entities.filters.Filters annotations_train_filter: Filters entity or a dictionary to define the annotations' scope in the specified dataset_id for the model train
535
- :param dtlpy.entities.filters.Filters annotations_validation_filter: Filters entity or a dictionary to define the annotations' scope in the specified dataset_id for the model validation
536
- :param bool wait: `bool` wait for the model to be ready before returning
537
-
538
- :return: dl.Model which is a clone version of the existing model
539
- """
540
- return self.models.clone(from_model=self,
541
- model_name=model_name,
542
- project_id=project_id,
543
- dataset=dataset,
544
- scope=scope,
545
- status=status,
546
- configuration=configuration,
547
- labels=labels,
548
- description=description,
549
- tags=tags,
550
- train_filter=train_filter,
551
- validation_filter=validation_filter,
552
- annotations_train_filter=annotations_train_filter,
553
- annotations_validation_filter=annotations_validation_filter,
554
- wait=wait
555
- )
556
-
557
- def train(self, service_config=None):
558
- """
559
- Train the model in the cloud. This will create a service and will run the adapter's train function as an execution
560
-
561
- :param dict service_config : Service object as dict. Contains the spec of the default service to create.
562
- :return:
563
- """
564
- return self.models.train(model_id=self.id, service_config=service_config)
565
-
566
- def evaluate(self, dataset_id, filters: entities.Filters = None, service_config=None):
567
- """
568
- Evaluate Model, provide data to evaluate the model on You can also provide specific config for the deployed service
569
-
570
- :param dict service_config : Service object as dict. Contains the spec of the default service to create.
571
- :param str dataset_id: ID of the dataset to evaluate
572
- :param entities.Filters filters: dl.Filter entity to run the predictions on
573
- :return:
574
- """
575
- return self.models.evaluate(model_id=self.id,
576
- dataset_id=dataset_id,
577
- filters=filters,
578
- service_config=service_config)
579
-
580
- def predict(self, item_ids=None, dataset_id=None):
581
- """
582
- Run model prediction with items
583
-
584
- :param item_ids: a list of item id to run the prediction.
585
- :param dataset_id: dataset id to run the prediction on
586
- :return:
587
- """
588
- return self.models.predict(model=self, item_ids=item_ids, dataset_id=dataset_id)
589
-
590
- def embed(self, item_ids):
591
- """
592
- Run model embed with items
593
-
594
- :param item_ids: a list of item id to run the embed.
595
- :return:
596
- """
597
- return self.models.embed(model=self, item_ids=item_ids)
598
-
599
- def embed_datasets(self, dataset_ids, attach_trigger=False):
600
- """
601
- Run model embed with datasets
602
-
603
- :param dataset_ids: a list of dataset id to run the embed.
604
- :param attach_trigger: bool - True, if you want to activate the trigger
605
- :return:
606
- """
607
- return self.models.embed_datasets(model=self, dataset_ids=dataset_ids, attach_trigger=attach_trigger)
608
-
609
- def deploy(self, service_config=None) -> entities.Service:
610
- """
611
- Deploy a trained model. This will create a service that will execute predictions
612
-
613
- :param dict service_config : Service object as dict. Contains the spec of the default service to create.
614
-
615
- :return: dl.Service: The deployed service
616
- """
617
- return self.models.deploy(model_id=self.id, service_config=service_config)
618
-
619
- def wait_for_model_ready(self):
620
- """
621
- Wait for model to be ready
622
-
623
- :return:
624
- """
625
- return self.models.wait_for_model_ready(model=self)
626
-
627
- def log(self,
628
- service=None,
629
- size=None,
630
- checkpoint=None,
631
- start=None,
632
- end=None,
633
- follow=False,
634
- text=None,
635
- execution_id=None,
636
- function_name=None,
637
- replica_id=None,
638
- system=False,
639
- view=True,
640
- until_completed=True,
641
- model_operation: str = None,
642
- ):
643
- """
644
- Get service logs
645
-
646
- :param service: service object
647
- :param int size: size
648
- :param dict checkpoint: the information from the lst point checked in the service
649
- :param str start: iso format time
650
- :param str end: iso format time
651
- :param bool follow: if true, keep stream future logs
652
- :param str text: text
653
- :param str execution_id: execution id
654
- :param str function_name: function name
655
- :param str replica_id: replica id
656
- :param bool system: system
657
- :param bool view: if true, print out all the logs
658
- :param bool until_completed: wait until completed
659
- :param str model_operation: model operation action
660
- :return: ServiceLog entity
661
- :rtype: ServiceLog
662
-
663
- **Example**:
664
-
665
- .. code-block:: python
666
-
667
- service_log = service.log()
668
- """
669
- return self.services.log(service=service,
670
- size=size,
671
- checkpoint=checkpoint,
672
- start=start,
673
- end=end,
674
- follow=follow,
675
- execution_id=execution_id,
676
- function_name=function_name,
677
- replica_id=replica_id,
678
- system=system,
679
- text=text,
680
- view=view,
681
- until_completed=until_completed,
682
- model_id=self.id,
683
- model_operation=model_operation,
684
- project_id=self.project_id)
1
+ from collections import namedtuple
2
+ from enum import Enum
3
+ import traceback
4
+ import logging
5
+ import attr
6
+ from .. import repositories, entities
7
+ from ..services.api_client import ApiClient
8
+
9
+ logger = logging.getLogger(name='dtlpy')
10
+
11
+
12
+ class DatasetSubsetType(str, Enum):
13
+ """Available types for dataset subsets"""
14
+ TRAIN = 'train'
15
+ VALIDATION = 'validation'
16
+ TEST = 'test'
17
+
18
+
19
+ class ModelStatus(str, Enum):
20
+ """Available types for model status"""
21
+ CREATED = "created",
22
+ PRE_TRAINED = "pre-trained",
23
+ PENDING = "pending",
24
+ TRAINING = "training",
25
+ TRAINED = "trained",
26
+ DEPLOYED = "deployed",
27
+ FAILED = "failed",
28
+ CLONING = "cloning"
29
+
30
+
31
+ class PlotSample:
32
+ def __init__(self, figure, legend, x, y):
33
+ """
34
+ Create a single metric sample for Model
35
+
36
+ :param figure: figure name identifier
37
+ :param legend: line name identifier
38
+ :param x: x value for the current sample
39
+ :param y: y value for the current sample
40
+ """
41
+ self.figure = figure
42
+ self.legend = legend
43
+ self.x = x
44
+ self.y = y
45
+
46
+ def to_json(self) -> dict:
47
+ _json = {'figure': self.figure,
48
+ 'legend': self.legend,
49
+ 'data': {'x': self.x,
50
+ 'y': self.y}}
51
+ return _json
52
+
53
+
54
+ # class MatrixSample:
55
+ # def __init__(self, figure, legend, x, y):
56
+ # """
57
+ # Create a single metric sample for Model
58
+ #
59
+ # :param figure: figure name identifier
60
+ # :param legend: line name identifier
61
+ # :param x: x value for the current sample
62
+ # :param y: y value for the current sample
63
+ # """
64
+ # self.figure = figure
65
+ # self.legend = legend
66
+ # self.x = x
67
+ # self.y = y
68
+ #
69
+ # def to_json(self) -> dict:
70
+ # _json = {'figure': self.figure,
71
+ # 'legend': self.legend,
72
+ # 'data': {'x': self.x,
73
+ # 'y': self.y}}
74
+ # return _json
75
+
76
+
77
+ @attr.s
78
+ class Model(entities.BaseEntity):
79
+ """
80
+ Model object
81
+ """
82
+ # platform
83
+ id = attr.ib()
84
+ creator = attr.ib()
85
+ created_at = attr.ib()
86
+ updated_at = attr.ib(repr=False)
87
+ model_artifacts = attr.ib()
88
+ name = attr.ib()
89
+ description = attr.ib()
90
+ ontology_id = attr.ib(repr=False)
91
+ labels = attr.ib()
92
+ status = attr.ib()
93
+ tags = attr.ib()
94
+ configuration = attr.ib()
95
+ metadata = attr.ib()
96
+ input_type = attr.ib()
97
+ output_type = attr.ib()
98
+ module_name = attr.ib()
99
+
100
+ url = attr.ib()
101
+ scope = attr.ib()
102
+ version = attr.ib()
103
+ context = attr.ib()
104
+ status_logs = attr.ib()
105
+
106
+ # name change
107
+ package_id = attr.ib(repr=False)
108
+ project_id = attr.ib()
109
+ dataset_id = attr.ib(repr=False)
110
+
111
+ # sdk
112
+ _project = attr.ib(repr=False)
113
+ _package = attr.ib(repr=False)
114
+ _dataset = attr.ib(repr=False)
115
+ _feature_set = attr.ib(repr=False)
116
+ _client_api = attr.ib(type=ApiClient, repr=False)
117
+ _repositories = attr.ib(repr=False)
118
+ _ontology = attr.ib(repr=False, default=None)
119
+ updated_by = attr.ib(default=None)
120
+ app = attr.ib(default=None)
121
+
122
+ @staticmethod
123
+ def _protected_from_json(_json, client_api, project=None, package=None, is_fetched=True):
124
+ """
125
+ Same as from_json but with try-except to catch if error
126
+
127
+ :param _json: platform representation of Model
128
+ :param client_api: ApiClient entity
129
+ :param project: project that owns the model
130
+ :param package: package entity of the model
131
+ :param is_fetched: is Entity fetched from Platform
132
+ :return: Model entity
133
+ """
134
+ try:
135
+ model = Model.from_json(_json=_json,
136
+ client_api=client_api,
137
+ project=project,
138
+ package=package,
139
+ is_fetched=is_fetched)
140
+ status = True
141
+ except Exception:
142
+ model = traceback.format_exc()
143
+ status = False
144
+ return status, model
145
+
146
+ @classmethod
147
+ def from_json(cls, _json, client_api, project=None, package=None, is_fetched=True):
148
+ """
149
+ Turn platform representation of model into a model entity
150
+
151
+ :param _json: platform representation of model
152
+ :param client_api: ApiClient entity
153
+ :param project: project that owns the model
154
+ :param package: package entity of the model
155
+ :param is_fetched: is Entity fetched from Platform
156
+ :return: Model entity
157
+ """
158
+ if project is not None:
159
+ if project.id != _json.get('context', {}).get('project', None):
160
+ logger.warning("Model's project is different then the input project")
161
+ project = None
162
+
163
+ if package is not None:
164
+ if package.id != _json.get('packageId', None):
165
+ logger.warning("Model's package is different then the input package")
166
+ package = None
167
+
168
+ model_artifacts = [entities.Artifact.from_json(_json=artifact,
169
+ client_api=client_api,
170
+ project=project)
171
+ for artifact in _json.get('artifacts', list())]
172
+
173
+ inst = cls(
174
+ configuration=_json.get('configuration', None),
175
+ description=_json.get('description', None),
176
+ status=_json.get('status', None),
177
+ tags=_json.get('tags', None),
178
+ metadata=_json.get('metadata', dict()),
179
+ project_id=_json.get('context', {}).get('project', None),
180
+ dataset_id=_json.get('datasetId', None),
181
+ package_id=_json.get('packageId', None),
182
+ model_artifacts=model_artifacts,
183
+ labels=_json.get('labels', None),
184
+ ontology_id=_json.get('ontology_id', None),
185
+ created_at=_json.get('createdAt', None),
186
+ updated_at=_json.get('updatedAt', None),
187
+ creator=_json.get('context', {}).get('creator', None),
188
+ client_api=client_api,
189
+ name=_json.get('name', None),
190
+ project=project,
191
+ package=package,
192
+ dataset=None,
193
+ feature_set=None,
194
+ id=_json.get('id', None),
195
+ url=_json.get('url', None),
196
+ scope=_json.get('scope', entities.EntityScopeLevel.PROJECT),
197
+ version=_json.get('version', '1.0.0'),
198
+ context=_json.get('context', {}),
199
+ input_type=_json.get('inputType', None),
200
+ output_type=_json.get('outputType', None),
201
+ module_name=_json.get('moduleName', None),
202
+ updated_by=_json.get('updatedBy', None),
203
+ app=_json.get('app', None),
204
+ status_logs=_json.get('statusLogs', []),
205
+ )
206
+ inst.is_fetched = is_fetched
207
+ return inst
208
+
209
+ def to_json(self):
210
+ """
211
+ Get the dict of Model
212
+
213
+ :return: platform json of model
214
+ :rtype: dict
215
+ """
216
+ _json = attr.asdict(self,
217
+ filter=attr.filters.exclude(attr.fields(Model)._project,
218
+ attr.fields(Model)._package,
219
+ attr.fields(Model)._dataset,
220
+ attr.fields(Model)._ontology,
221
+ attr.fields(Model)._repositories,
222
+ attr.fields(Model)._feature_set,
223
+ attr.fields(Model)._client_api,
224
+ attr.fields(Model).package_id,
225
+ attr.fields(Model).project_id,
226
+ attr.fields(Model).dataset_id,
227
+ attr.fields(Model).ontology_id,
228
+ attr.fields(Model).model_artifacts,
229
+ attr.fields(Model).created_at,
230
+ attr.fields(Model).updated_at,
231
+ attr.fields(Model).input_type,
232
+ attr.fields(Model).output_type,
233
+ attr.fields(Model).updated_by,
234
+ attr.fields(Model).app,
235
+ attr.fields(Model).status_logs
236
+ ))
237
+ _json['packageId'] = self.package_id
238
+ _json['datasetId'] = self.dataset_id
239
+ _json['createdAt'] = self.created_at
240
+ _json['updatedAt'] = self.updated_at
241
+ _json['inputType'] = self.input_type
242
+ _json['outputType'] = self.output_type
243
+ _json['moduleName'] = self.module_name
244
+
245
+ model_artifacts = list()
246
+ for artifact in self.model_artifacts:
247
+ if artifact.type in ['file', 'dir']:
248
+ artifact = {'type': 'item',
249
+ 'itemId': artifact.id}
250
+ else:
251
+ artifact = artifact.to_json(as_artifact=True)
252
+ model_artifacts.append(artifact)
253
+ _json['artifacts'] = model_artifacts
254
+
255
+ if self.updated_by:
256
+ _json['updatedBy'] = self.updated_by
257
+ if self.app:
258
+ _json['app'] = self.app
259
+ if self.status_logs:
260
+ _json['statusLogs'] = self.status_logs
261
+
262
+ return _json
263
+
264
+ ############
265
+ # entities #
266
+ ############
267
+ @property
268
+ def project(self):
269
+ if self._project is None:
270
+ self._project = self.projects.get(project_id=self.project_id, fetch=None)
271
+ self._repositories = self.set_repositories() # update the repos with the new fetched entity
272
+ assert isinstance(self._project, entities.Project)
273
+ return self._project
274
+
275
+ @property
276
+ def feature_set(self) -> 'entities.FeatureSet':
277
+ if self._feature_set is None:
278
+ filters = entities.Filters(field='modelId',
279
+ values=self.id,
280
+ resource=entities.FiltersResource.FEATURE_SET)
281
+ feature_sets = self.project.feature_sets.list(filters=filters)
282
+ if feature_sets.items_count > 1:
283
+ logger.warning("Found more than one feature set associated with model entity. Returning first result."
284
+ "Set feature_set if other feature set entity is needed.")
285
+ self._feature_set = feature_sets.items[0]
286
+ elif feature_sets.items_count == 1:
287
+ self._feature_set = feature_sets.items[0]
288
+ else:
289
+ self._feature_set = None
290
+ return self._feature_set
291
+
292
+ @feature_set.setter
293
+ def feature_set(self, feature_set: 'entities.FeatureSet'):
294
+ if not isinstance(feature_set, entities.FeatureSet):
295
+ raise ValueError("feature_set must be of type dl.FeatureSet")
296
+ else:
297
+ self._feature_set = feature_set
298
+
299
+ @property
300
+ def package(self):
301
+ if self._package is None:
302
+ try:
303
+ if self.app:
304
+ self._package = self.dpks.get_revisions(dpk_id=self.app['dpkId'], version=self.app['dpkVersion'])
305
+ else:
306
+ self._package = self.packages.get(package_id=self.package_id)
307
+ except Exception as e:
308
+ error = e
309
+ try:
310
+ self._package = self.dpks.get(dpk_id=self.package_id)
311
+ except Exception:
312
+ raise error
313
+ self._repositories = self.set_repositories() # update the repos with the new fetched entity
314
+ assert isinstance(self._package, (entities.Package, entities.Dpk))
315
+ return self._package
316
+
317
+ @property
318
+ def dataset(self):
319
+ if self._dataset is None:
320
+ if self.dataset_id is None:
321
+ raise RuntimeError("Model {!r} has no dataset. Can be used only for inference".format(self.id))
322
+ self._dataset = self.datasets.get(dataset_id=self.dataset_id, fetch=None)
323
+ self._repositories = self.set_repositories() # update the repos with the new fetched entity
324
+ assert isinstance(self._dataset, entities.Dataset)
325
+ return self._dataset
326
+
327
+ @property
328
+ def ontology(self):
329
+ if self._ontology is None:
330
+ if self.ontology_id is None:
331
+ raise RuntimeError("Model {!r} has no ontology.".format(self.id))
332
+ self._ontology = self.ontologies.get(ontology_id=self.ontology_id)
333
+ assert isinstance(self._ontology, entities.Ontology)
334
+ return self._ontology
335
+
336
+ ################
337
+ # repositories #
338
+ ################
339
+ @_repositories.default
340
+ def set_repositories(self):
341
+ reps = namedtuple('repositories',
342
+ field_names=['projects', 'datasets', 'models', 'packages', 'ontologies', 'artifacts',
343
+ 'metrics', 'dpks', 'services'])
344
+
345
+ r = reps(projects=repositories.Projects(client_api=self._client_api),
346
+ datasets=repositories.Datasets(client_api=self._client_api,
347
+ project=self._project),
348
+ models=repositories.Models(client_api=self._client_api,
349
+ project=self._project,
350
+ project_id=self.project_id,
351
+ package=self._package),
352
+ packages=repositories.Packages(client_api=self._client_api,
353
+ project=self._project),
354
+ ontologies=repositories.Ontologies(client_api=self._client_api,
355
+ project=self._project,
356
+ dataset=self._dataset),
357
+ artifacts=repositories.Artifacts(client_api=self._client_api,
358
+ project=self._project,
359
+ project_id=self.project_id,
360
+ model=self),
361
+ metrics=repositories.Metrics(client_api=self._client_api,
362
+ model=self),
363
+ dpks=repositories.Dpks(client_api=self._client_api),
364
+ services=repositories.Services(client_api=self._client_api,
365
+ project=self._project,
366
+ project_id=self.project_id,
367
+ model_id=self.id,
368
+ model=self),
369
+ )
370
+ return r
371
+
372
+ @property
373
+ def platform_url(self):
374
+ return self._client_api._get_resource_url("projects/{}/model/{}".format(self.project_id, self.id))
375
+
376
+ @property
377
+ def projects(self):
378
+ assert isinstance(self._repositories.projects, repositories.Projects)
379
+ return self._repositories.projects
380
+
381
+ @property
382
+ def datasets(self):
383
+ assert isinstance(self._repositories.datasets, repositories.Datasets)
384
+ return self._repositories.datasets
385
+
386
+ @property
387
+ def models(self):
388
+ assert isinstance(self._repositories.models, repositories.Models)
389
+ return self._repositories.models
390
+
391
+ @property
392
+ def packages(self):
393
+ assert isinstance(self._repositories.packages, repositories.Packages)
394
+ return self._repositories.packages
395
+
396
+ @property
397
+ def dpks(self):
398
+ assert isinstance(self._repositories.dpks, repositories.Dpks)
399
+ return self._repositories.dpks
400
+
401
+ @property
402
+ def ontologies(self):
403
+ assert isinstance(self._repositories.ontologies, repositories.Ontologies)
404
+ return self._repositories.ontologies
405
+
406
+ @property
407
+ def artifacts(self):
408
+ assert isinstance(self._repositories.artifacts, repositories.Artifacts)
409
+ return self._repositories.artifacts
410
+
411
+ @property
412
+ def metrics(self):
413
+ assert isinstance(self._repositories.metrics, repositories.Metrics)
414
+ return self._repositories.metrics
415
+
416
+ @property
417
+ def services(self):
418
+ assert isinstance(self._repositories.services, repositories.Services)
419
+ return self._repositories.services
420
+
421
+ @property
422
+ def id_to_label_map(self):
423
+ # default
424
+ if 'id_to_label_map' not in self.configuration:
425
+ if not (self.dataset_id == 'null' or self.dataset_id is None):
426
+ self.labels = [label.tag for label in self.dataset.labels]
427
+ self.configuration['id_to_label_map'] = {int(idx): lbl for idx, lbl in enumerate(self.labels)}
428
+ # use existing
429
+ else:
430
+ self.configuration['id_to_label_map'] = {int(idx): lbl for idx, lbl in
431
+ self.configuration['id_to_label_map'].items()}
432
+ return self.configuration['id_to_label_map']
433
+
434
+ @id_to_label_map.setter
435
+ def id_to_label_map(self, mapping: dict):
436
+ self.configuration['id_to_label_map'] = {int(idx): lbl for idx, lbl in mapping.items()}
437
+
438
+ @property
439
+ def label_to_id_map(self):
440
+ if 'label_to_id_map' not in self.configuration:
441
+ self.configuration['label_to_id_map'] = {v: int(k) for k, v in self.id_to_label_map.items()}
442
+ return self.configuration['label_to_id_map']
443
+
444
+ @label_to_id_map.setter
445
+ def label_to_id_map(self, mapping: dict):
446
+ self.configuration['label_to_id_map'] = {v: int(k) for k, v in mapping.items()}
447
+
448
+ ###########
449
+ # methods #
450
+ ###########
451
+
452
+ def add_subset(
453
+ self, subset_name: str, subset_filter=None, subset_annotation_filter=None
454
+ ):
455
+ """
456
+ Adds a subset for the model, specifying a subset of the model's dataset that could be used for training or
457
+ validation. Optionally also adds an annotations subset.
458
+
459
+ :param str subset_name: the name of the subset
460
+ :param subset_filter: filtering for items subset. Can be `entities.Filters`, `dict`, or `None`
461
+ :param subset_annotation_filter: optional filtering for annotations subset. Can be `entities.Filters`, `dict`, or `None`
462
+
463
+ """
464
+ self.models.add_subset(self, subset_name, subset_filter, subset_annotation_filter)
465
+
466
+ def delete_subset(self, subset_name: str):
467
+ """
468
+ Removes a subset from the model's metadata (both subsets and annotationsSubsets).
469
+
470
+ :param str subset_name: the name of the subset
471
+
472
+ """
473
+ self.models.delete_subset(self, subset_name)
474
+
475
+ def update(self, system_metadata=False, reload_services=True):
476
+ """
477
+ Update Models changes to platform
478
+
479
+ :param bool system_metadata: bool - True, if you want to change metadata system
480
+ :param bool reload_services: bool - True, if you want to update the services with the new model
481
+ :return: Models entity
482
+ """
483
+ return self.models.update(model=self,
484
+ system_metadata=system_metadata,
485
+ reload_services=reload_services
486
+ )
487
+
488
+ def open_in_web(self):
489
+ """
490
+ Open the model in web platform
491
+
492
+ :return:
493
+ """
494
+ self._client_api._open_in_web(url=self.platform_url)
495
+
496
+ def delete(self):
497
+ """
498
+ Delete Model object
499
+
500
+ :return: True
501
+ """
502
+ return self.models.delete(model=self)
503
+
504
+ def clone(self,
505
+ model_name: str,
506
+ dataset: entities.Dataset = None,
507
+ configuration: dict = None,
508
+ status=None,
509
+ scope=None,
510
+ project_id: str = None,
511
+ labels: list = None,
512
+ description: str = None,
513
+ tags: list = None,
514
+ train_filter: entities.Filters = None,
515
+ validation_filter: entities.Filters = None,
516
+ annotations_train_filter: entities.Filters = None,
517
+ annotations_validation_filter: entities.Filters = None,
518
+ wait=True
519
+ ):
520
+ """
521
+ Clones and creates a new model out of existing one
522
+
523
+ :param str model_name: `str` new model name
524
+ :param str dataset: dataset object for the cloned model
525
+ :param dict configuration: `dict` (optional) if passed replaces the current configuration
526
+ :param str status: `str` (optional) set the new status
527
+ :param str scope: `str` (optional) set the new scope. default is "project"
528
+ :param str project_id: `str` specify the project id to create the new model on (if other than the source model)
529
+ :param list labels: `list` of `str` - label of the model
530
+ :param str description: `str` description of the new model
531
+ :param list tags: `list` of `str` - label of the model
532
+ :param dtlpy.entities.filters.Filters train_filter: Filters entity or a dictionary to define the items' scope in the specified dataset_id for the model train
533
+ :param dtlpy.entities.filters.Filters validation_filter: Filters entity or a dictionary to define the items' scope in the specified dataset_id for the model validation
534
+ :param dtlpy.entities.filters.Filters annotations_train_filter: Filters entity or a dictionary to define the annotations' scope in the specified dataset_id for the model train
535
+ :param dtlpy.entities.filters.Filters annotations_validation_filter: Filters entity or a dictionary to define the annotations' scope in the specified dataset_id for the model validation
536
+ :param bool wait: `bool` wait for the model to be ready before returning
537
+
538
+ :return: dl.Model which is a clone version of the existing model
539
+ """
540
+ return self.models.clone(from_model=self,
541
+ model_name=model_name,
542
+ project_id=project_id,
543
+ dataset=dataset,
544
+ scope=scope,
545
+ status=status,
546
+ configuration=configuration,
547
+ labels=labels,
548
+ description=description,
549
+ tags=tags,
550
+ train_filter=train_filter,
551
+ validation_filter=validation_filter,
552
+ annotations_train_filter=annotations_train_filter,
553
+ annotations_validation_filter=annotations_validation_filter,
554
+ wait=wait
555
+ )
556
+
557
+ def train(self, service_config=None):
558
+ """
559
+ Train the model in the cloud. This will create a service and will run the adapter's train function as an execution
560
+
561
+ :param dict service_config : Service object as dict. Contains the spec of the default service to create.
562
+ :return:
563
+ """
564
+ return self.models.train(model_id=self.id, service_config=service_config)
565
+
566
+ def evaluate(self, dataset_id, filters: entities.Filters = None, service_config=None):
567
+ """
568
+ Evaluate Model, provide data to evaluate the model on You can also provide specific config for the deployed service
569
+
570
+ :param dict service_config : Service object as dict. Contains the spec of the default service to create.
571
+ :param str dataset_id: ID of the dataset to evaluate
572
+ :param entities.Filters filters: dl.Filter entity to run the predictions on
573
+ :return:
574
+ """
575
+ return self.models.evaluate(model_id=self.id,
576
+ dataset_id=dataset_id,
577
+ filters=filters,
578
+ service_config=service_config)
579
+
580
+ def predict(self, item_ids=None, dataset_id=None):
581
+ """
582
+ Run model prediction with items
583
+
584
+ :param item_ids: a list of item id to run the prediction.
585
+ :param dataset_id: dataset id to run the prediction on
586
+ :return:
587
+ """
588
+ return self.models.predict(model=self, item_ids=item_ids, dataset_id=dataset_id)
589
+
590
+ def embed(self, item_ids):
591
+ """
592
+ Run model embed with items
593
+
594
+ :param item_ids: a list of item id to run the embed.
595
+ :return:
596
+ """
597
+ return self.models.embed(model=self, item_ids=item_ids)
598
+
599
+ def embed_datasets(self, dataset_ids, attach_trigger=False):
600
+ """
601
+ Run model embed with datasets
602
+
603
+ :param dataset_ids: a list of dataset id to run the embed.
604
+ :param attach_trigger: bool - True, if you want to activate the trigger
605
+ :return:
606
+ """
607
+ return self.models.embed_datasets(model=self, dataset_ids=dataset_ids, attach_trigger=attach_trigger)
608
+
609
+ def deploy(self, service_config=None) -> entities.Service:
610
+ """
611
+ Deploy a trained model. This will create a service that will execute predictions
612
+
613
+ :param dict service_config : Service object as dict. Contains the spec of the default service to create.
614
+
615
+ :return: dl.Service: The deployed service
616
+ """
617
+ return self.models.deploy(model_id=self.id, service_config=service_config)
618
+
619
+ def wait_for_model_ready(self):
620
+ """
621
+ Wait for model to be ready
622
+
623
+ :return:
624
+ """
625
+ return self.models.wait_for_model_ready(model=self)
626
+
627
+ def log(self,
628
+ service=None,
629
+ size=None,
630
+ checkpoint=None,
631
+ start=None,
632
+ end=None,
633
+ follow=False,
634
+ text=None,
635
+ execution_id=None,
636
+ function_name=None,
637
+ replica_id=None,
638
+ system=False,
639
+ view=True,
640
+ until_completed=True,
641
+ model_operation: str = None,
642
+ ):
643
+ """
644
+ Get service logs
645
+
646
+ :param service: service object
647
+ :param int size: size
648
+ :param dict checkpoint: the information from the lst point checked in the service
649
+ :param str start: iso format time
650
+ :param str end: iso format time
651
+ :param bool follow: if true, keep stream future logs
652
+ :param str text: text
653
+ :param str execution_id: execution id
654
+ :param str function_name: function name
655
+ :param str replica_id: replica id
656
+ :param bool system: system
657
+ :param bool view: if true, print out all the logs
658
+ :param bool until_completed: wait until completed
659
+ :param str model_operation: model operation action
660
+ :return: ServiceLog entity
661
+ :rtype: ServiceLog
662
+
663
+ **Example**:
664
+
665
+ .. code-block:: python
666
+
667
+ service_log = service.log()
668
+ """
669
+ return self.services.log(service=service,
670
+ size=size,
671
+ checkpoint=checkpoint,
672
+ start=start,
673
+ end=end,
674
+ follow=follow,
675
+ execution_id=execution_id,
676
+ function_name=function_name,
677
+ replica_id=replica_id,
678
+ system=system,
679
+ text=text,
680
+ view=view,
681
+ until_completed=until_completed,
682
+ model_id=self.id,
683
+ model_operation=model_operation,
684
+ project_id=self.project_id)