dtlpy 1.115.44__py3-none-any.whl → 1.117.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (238) hide show
  1. dtlpy/__init__.py +491 -491
  2. dtlpy/__version__.py +1 -1
  3. dtlpy/assets/__init__.py +26 -26
  4. dtlpy/assets/code_server/config.yaml +2 -2
  5. dtlpy/assets/code_server/installation.sh +24 -24
  6. dtlpy/assets/code_server/launch.json +13 -13
  7. dtlpy/assets/code_server/settings.json +2 -2
  8. dtlpy/assets/main.py +53 -53
  9. dtlpy/assets/main_partial.py +18 -18
  10. dtlpy/assets/mock.json +11 -11
  11. dtlpy/assets/model_adapter.py +83 -83
  12. dtlpy/assets/package.json +61 -61
  13. dtlpy/assets/package_catalog.json +29 -29
  14. dtlpy/assets/package_gitignore +307 -307
  15. dtlpy/assets/service_runners/__init__.py +33 -33
  16. dtlpy/assets/service_runners/converter.py +96 -96
  17. dtlpy/assets/service_runners/multi_method.py +49 -49
  18. dtlpy/assets/service_runners/multi_method_annotation.py +54 -54
  19. dtlpy/assets/service_runners/multi_method_dataset.py +55 -55
  20. dtlpy/assets/service_runners/multi_method_item.py +52 -52
  21. dtlpy/assets/service_runners/multi_method_json.py +52 -52
  22. dtlpy/assets/service_runners/single_method.py +37 -37
  23. dtlpy/assets/service_runners/single_method_annotation.py +43 -43
  24. dtlpy/assets/service_runners/single_method_dataset.py +43 -43
  25. dtlpy/assets/service_runners/single_method_item.py +41 -41
  26. dtlpy/assets/service_runners/single_method_json.py +42 -42
  27. dtlpy/assets/service_runners/single_method_multi_input.py +45 -45
  28. dtlpy/assets/voc_annotation_template.xml +23 -23
  29. dtlpy/caches/base_cache.py +32 -32
  30. dtlpy/caches/cache.py +473 -473
  31. dtlpy/caches/dl_cache.py +201 -201
  32. dtlpy/caches/filesystem_cache.py +89 -89
  33. dtlpy/caches/redis_cache.py +84 -84
  34. dtlpy/dlp/__init__.py +20 -20
  35. dtlpy/dlp/cli_utilities.py +367 -367
  36. dtlpy/dlp/command_executor.py +764 -764
  37. dtlpy/dlp/dlp +1 -1
  38. dtlpy/dlp/dlp.bat +1 -1
  39. dtlpy/dlp/dlp.py +128 -128
  40. dtlpy/dlp/parser.py +651 -651
  41. dtlpy/entities/__init__.py +83 -83
  42. dtlpy/entities/analytic.py +347 -347
  43. dtlpy/entities/annotation.py +1879 -1879
  44. dtlpy/entities/annotation_collection.py +699 -699
  45. dtlpy/entities/annotation_definitions/__init__.py +20 -20
  46. dtlpy/entities/annotation_definitions/base_annotation_definition.py +100 -100
  47. dtlpy/entities/annotation_definitions/box.py +195 -195
  48. dtlpy/entities/annotation_definitions/classification.py +67 -67
  49. dtlpy/entities/annotation_definitions/comparison.py +72 -72
  50. dtlpy/entities/annotation_definitions/cube.py +204 -204
  51. dtlpy/entities/annotation_definitions/cube_3d.py +149 -149
  52. dtlpy/entities/annotation_definitions/description.py +32 -32
  53. dtlpy/entities/annotation_definitions/ellipse.py +124 -124
  54. dtlpy/entities/annotation_definitions/free_text.py +62 -62
  55. dtlpy/entities/annotation_definitions/gis.py +69 -69
  56. dtlpy/entities/annotation_definitions/note.py +139 -139
  57. dtlpy/entities/annotation_definitions/point.py +117 -117
  58. dtlpy/entities/annotation_definitions/polygon.py +182 -182
  59. dtlpy/entities/annotation_definitions/polyline.py +111 -111
  60. dtlpy/entities/annotation_definitions/pose.py +92 -92
  61. dtlpy/entities/annotation_definitions/ref_image.py +86 -86
  62. dtlpy/entities/annotation_definitions/segmentation.py +240 -240
  63. dtlpy/entities/annotation_definitions/subtitle.py +34 -34
  64. dtlpy/entities/annotation_definitions/text.py +85 -85
  65. dtlpy/entities/annotation_definitions/undefined_annotation.py +74 -74
  66. dtlpy/entities/app.py +220 -220
  67. dtlpy/entities/app_module.py +107 -107
  68. dtlpy/entities/artifact.py +174 -174
  69. dtlpy/entities/assignment.py +399 -399
  70. dtlpy/entities/base_entity.py +214 -214
  71. dtlpy/entities/bot.py +113 -113
  72. dtlpy/entities/codebase.py +292 -292
  73. dtlpy/entities/collection.py +38 -38
  74. dtlpy/entities/command.py +169 -169
  75. dtlpy/entities/compute.py +449 -449
  76. dtlpy/entities/dataset.py +1299 -1299
  77. dtlpy/entities/directory_tree.py +44 -44
  78. dtlpy/entities/dpk.py +470 -470
  79. dtlpy/entities/driver.py +235 -235
  80. dtlpy/entities/execution.py +397 -397
  81. dtlpy/entities/feature.py +124 -124
  82. dtlpy/entities/feature_set.py +152 -145
  83. dtlpy/entities/filters.py +798 -798
  84. dtlpy/entities/gis_item.py +107 -107
  85. dtlpy/entities/integration.py +184 -184
  86. dtlpy/entities/item.py +975 -959
  87. dtlpy/entities/label.py +123 -123
  88. dtlpy/entities/links.py +85 -85
  89. dtlpy/entities/message.py +175 -175
  90. dtlpy/entities/model.py +684 -684
  91. dtlpy/entities/node.py +1005 -1005
  92. dtlpy/entities/ontology.py +810 -803
  93. dtlpy/entities/organization.py +287 -287
  94. dtlpy/entities/package.py +657 -657
  95. dtlpy/entities/package_defaults.py +5 -5
  96. dtlpy/entities/package_function.py +185 -185
  97. dtlpy/entities/package_module.py +113 -113
  98. dtlpy/entities/package_slot.py +118 -118
  99. dtlpy/entities/paged_entities.py +299 -299
  100. dtlpy/entities/pipeline.py +624 -624
  101. dtlpy/entities/pipeline_execution.py +279 -279
  102. dtlpy/entities/project.py +394 -394
  103. dtlpy/entities/prompt_item.py +505 -505
  104. dtlpy/entities/recipe.py +301 -301
  105. dtlpy/entities/reflect_dict.py +102 -102
  106. dtlpy/entities/resource_execution.py +138 -138
  107. dtlpy/entities/service.py +974 -963
  108. dtlpy/entities/service_driver.py +117 -117
  109. dtlpy/entities/setting.py +294 -294
  110. dtlpy/entities/task.py +495 -495
  111. dtlpy/entities/time_series.py +143 -143
  112. dtlpy/entities/trigger.py +426 -426
  113. dtlpy/entities/user.py +118 -118
  114. dtlpy/entities/webhook.py +124 -124
  115. dtlpy/examples/__init__.py +19 -19
  116. dtlpy/examples/add_labels.py +135 -135
  117. dtlpy/examples/add_metadata_to_item.py +21 -21
  118. dtlpy/examples/annotate_items_using_model.py +65 -65
  119. dtlpy/examples/annotate_video_using_model_and_tracker.py +75 -75
  120. dtlpy/examples/annotations_convert_to_voc.py +9 -9
  121. dtlpy/examples/annotations_convert_to_yolo.py +9 -9
  122. dtlpy/examples/convert_annotation_types.py +51 -51
  123. dtlpy/examples/converter.py +143 -143
  124. dtlpy/examples/copy_annotations.py +22 -22
  125. dtlpy/examples/copy_folder.py +31 -31
  126. dtlpy/examples/create_annotations.py +51 -51
  127. dtlpy/examples/create_video_annotations.py +83 -83
  128. dtlpy/examples/delete_annotations.py +26 -26
  129. dtlpy/examples/filters.py +113 -113
  130. dtlpy/examples/move_item.py +23 -23
  131. dtlpy/examples/play_video_annotation.py +13 -13
  132. dtlpy/examples/show_item_and_mask.py +53 -53
  133. dtlpy/examples/triggers.py +49 -49
  134. dtlpy/examples/upload_batch_of_items.py +20 -20
  135. dtlpy/examples/upload_items_and_custom_format_annotations.py +55 -55
  136. dtlpy/examples/upload_items_with_modalities.py +43 -43
  137. dtlpy/examples/upload_segmentation_annotations_from_mask_image.py +44 -44
  138. dtlpy/examples/upload_yolo_format_annotations.py +70 -70
  139. dtlpy/exceptions.py +125 -125
  140. dtlpy/miscellaneous/__init__.py +20 -20
  141. dtlpy/miscellaneous/dict_differ.py +95 -95
  142. dtlpy/miscellaneous/git_utils.py +217 -217
  143. dtlpy/miscellaneous/json_utils.py +14 -14
  144. dtlpy/miscellaneous/list_print.py +105 -105
  145. dtlpy/miscellaneous/zipping.py +130 -130
  146. dtlpy/ml/__init__.py +20 -20
  147. dtlpy/ml/base_feature_extractor_adapter.py +27 -27
  148. dtlpy/ml/base_model_adapter.py +1287 -1230
  149. dtlpy/ml/metrics.py +461 -461
  150. dtlpy/ml/predictions_utils.py +274 -274
  151. dtlpy/ml/summary_writer.py +57 -57
  152. dtlpy/ml/train_utils.py +60 -60
  153. dtlpy/new_instance.py +252 -252
  154. dtlpy/repositories/__init__.py +56 -56
  155. dtlpy/repositories/analytics.py +85 -85
  156. dtlpy/repositories/annotations.py +916 -916
  157. dtlpy/repositories/apps.py +383 -383
  158. dtlpy/repositories/artifacts.py +452 -452
  159. dtlpy/repositories/assignments.py +599 -599
  160. dtlpy/repositories/bots.py +213 -213
  161. dtlpy/repositories/codebases.py +559 -559
  162. dtlpy/repositories/collections.py +332 -332
  163. dtlpy/repositories/commands.py +152 -152
  164. dtlpy/repositories/compositions.py +61 -61
  165. dtlpy/repositories/computes.py +439 -439
  166. dtlpy/repositories/datasets.py +1585 -1504
  167. dtlpy/repositories/downloader.py +1157 -923
  168. dtlpy/repositories/dpks.py +433 -433
  169. dtlpy/repositories/drivers.py +482 -482
  170. dtlpy/repositories/executions.py +815 -815
  171. dtlpy/repositories/feature_sets.py +256 -226
  172. dtlpy/repositories/features.py +255 -255
  173. dtlpy/repositories/integrations.py +484 -484
  174. dtlpy/repositories/items.py +912 -912
  175. dtlpy/repositories/messages.py +94 -94
  176. dtlpy/repositories/models.py +1000 -1000
  177. dtlpy/repositories/nodes.py +80 -80
  178. dtlpy/repositories/ontologies.py +511 -511
  179. dtlpy/repositories/organizations.py +525 -525
  180. dtlpy/repositories/packages.py +1941 -1941
  181. dtlpy/repositories/pipeline_executions.py +451 -451
  182. dtlpy/repositories/pipelines.py +640 -640
  183. dtlpy/repositories/projects.py +539 -539
  184. dtlpy/repositories/recipes.py +429 -399
  185. dtlpy/repositories/resource_executions.py +137 -137
  186. dtlpy/repositories/schema.py +120 -120
  187. dtlpy/repositories/service_drivers.py +213 -213
  188. dtlpy/repositories/services.py +1704 -1704
  189. dtlpy/repositories/settings.py +339 -339
  190. dtlpy/repositories/tasks.py +1477 -1477
  191. dtlpy/repositories/times_series.py +278 -278
  192. dtlpy/repositories/triggers.py +536 -536
  193. dtlpy/repositories/upload_element.py +257 -257
  194. dtlpy/repositories/uploader.py +661 -661
  195. dtlpy/repositories/webhooks.py +249 -249
  196. dtlpy/services/__init__.py +22 -22
  197. dtlpy/services/aihttp_retry.py +131 -131
  198. dtlpy/services/api_client.py +1786 -1785
  199. dtlpy/services/api_reference.py +40 -40
  200. dtlpy/services/async_utils.py +133 -133
  201. dtlpy/services/calls_counter.py +44 -44
  202. dtlpy/services/check_sdk.py +68 -68
  203. dtlpy/services/cookie.py +115 -115
  204. dtlpy/services/create_logger.py +156 -156
  205. dtlpy/services/events.py +84 -84
  206. dtlpy/services/logins.py +235 -235
  207. dtlpy/services/reporter.py +256 -256
  208. dtlpy/services/service_defaults.py +91 -91
  209. dtlpy/utilities/__init__.py +20 -20
  210. dtlpy/utilities/annotations/__init__.py +16 -16
  211. dtlpy/utilities/annotations/annotation_converters.py +269 -269
  212. dtlpy/utilities/base_package_runner.py +285 -264
  213. dtlpy/utilities/converter.py +1650 -1650
  214. dtlpy/utilities/dataset_generators/__init__.py +1 -1
  215. dtlpy/utilities/dataset_generators/dataset_generator.py +670 -670
  216. dtlpy/utilities/dataset_generators/dataset_generator_tensorflow.py +23 -23
  217. dtlpy/utilities/dataset_generators/dataset_generator_torch.py +21 -21
  218. dtlpy/utilities/local_development/__init__.py +1 -1
  219. dtlpy/utilities/local_development/local_session.py +179 -179
  220. dtlpy/utilities/reports/__init__.py +2 -2
  221. dtlpy/utilities/reports/figures.py +343 -343
  222. dtlpy/utilities/reports/report.py +71 -71
  223. dtlpy/utilities/videos/__init__.py +17 -17
  224. dtlpy/utilities/videos/video_player.py +598 -598
  225. dtlpy/utilities/videos/videos.py +470 -470
  226. {dtlpy-1.115.44.data → dtlpy-1.117.6.data}/scripts/dlp +1 -1
  227. dtlpy-1.117.6.data/scripts/dlp.bat +2 -0
  228. {dtlpy-1.115.44.data → dtlpy-1.117.6.data}/scripts/dlp.py +128 -128
  229. {dtlpy-1.115.44.dist-info → dtlpy-1.117.6.dist-info}/METADATA +186 -186
  230. dtlpy-1.117.6.dist-info/RECORD +239 -0
  231. {dtlpy-1.115.44.dist-info → dtlpy-1.117.6.dist-info}/WHEEL +1 -1
  232. {dtlpy-1.115.44.dist-info → dtlpy-1.117.6.dist-info}/licenses/LICENSE +200 -200
  233. tests/features/environment.py +551 -551
  234. dtlpy/assets/__pycache__/__init__.cpython-310.pyc +0 -0
  235. dtlpy-1.115.44.data/scripts/dlp.bat +0 -2
  236. dtlpy-1.115.44.dist-info/RECORD +0 -240
  237. {dtlpy-1.115.44.dist-info → dtlpy-1.117.6.dist-info}/entry_points.txt +0 -0
  238. {dtlpy-1.115.44.dist-info → dtlpy-1.117.6.dist-info}/top_level.txt +0 -0
@@ -1,1000 +1,1000 @@
1
- import time
2
- from typing import List
3
- import logging
4
- from urllib.parse import urlencode
5
-
6
- from .. import entities, repositories, exceptions, miscellaneous
7
- from ..services.api_client import ApiClient
8
-
9
- logger = logging.getLogger(name='dtlpy')
10
-
11
- MIN_INTERVAL = 1
12
- BACKOFF_FACTOR = 1.2
13
- MAX_INTERVAL = 12
14
-
15
-
16
- class Models:
17
- """
18
- Models Repository
19
- """
20
- @staticmethod
21
- def _filter_to_dict(filter_obj):
22
- """Convert Filters object to dict, or return as-is if already dict/None"""
23
- if filter_obj is not None:
24
- filter_obj = filter_obj.prepare() if isinstance(filter_obj, entities.Filters) else filter_obj
25
- return filter_obj
26
-
27
- @staticmethod
28
- def _get_filter_from_model(model, subset_type, resource_type):
29
- """Extract filter dict from model metadata"""
30
- filter_dict = None
31
- if model is not None:
32
- if resource_type == entities.FiltersResource.ITEM:
33
- filter_dict = model.metadata.get('system', {}).get('subsets', {}).get(subset_type.value)
34
- else: # ANNOTATION
35
- filter_dict = model.metadata.get('system', {}).get('annotationsSubsets', {}).get(subset_type.value)
36
- return filter_dict
37
-
38
- @staticmethod
39
- def _build_model_metadata(
40
- train_filter: entities.Filters = None,
41
- validation_filter: entities.Filters = None,
42
- annotations_train_filter: entities.Filters = None,
43
- annotations_validation_filter: entities.Filters = None,
44
- from_model: entities.Model = None
45
- ) -> dict:
46
- """
47
- Build model metadata with filters, optionally inheriting from existing model.
48
-
49
- :param train_filter: Training data filter (Filters object or dict)
50
- :param validation_filter: Validation data filter (Filters object or dict)
51
- :param annotations_train_filter: Training annotations filter (Filters object or dict)
52
- :param annotations_validation_filter: Validation annotations filter (Filters object or dict)
53
- :param from_model: Source model to inherit filters from (if not provided explicitly)
54
- :return: Metadata dictionary with filters
55
- """
56
- metadata = {'system': {'subsets': {}, 'annotationsSubsets': {}}}
57
-
58
- # Handle item filters
59
- train_filter_dict = Models._filter_to_dict(train_filter)
60
- if train_filter_dict is None and from_model is not None:
61
- train_filter_dict = Models._get_filter_from_model(
62
- model=from_model, subset_type=entities.DatasetSubsetType.TRAIN, resource_type=entities.FiltersResource.ITEM)
63
-
64
- validation_filter_dict = Models._filter_to_dict(validation_filter)
65
- if validation_filter_dict is None and from_model is not None:
66
- validation_filter_dict = Models._get_filter_from_model(
67
- model=from_model, subset_type=entities.DatasetSubsetType.VALIDATION, resource_type=entities.FiltersResource.ITEM)
68
-
69
- # Handle annotation filters
70
- annotations_train_filter_dict = Models._filter_to_dict(annotations_train_filter)
71
- if annotations_train_filter_dict is None and from_model is not None:
72
- annotations_train_filter_dict = Models._get_filter_from_model(
73
- model=from_model, subset_type=entities.DatasetSubsetType.TRAIN, resource_type=entities.FiltersResource.ANNOTATION)
74
-
75
- annotations_validation_filter_dict = Models._filter_to_dict(annotations_validation_filter)
76
- if annotations_validation_filter_dict is None and from_model is not None:
77
- annotations_validation_filter_dict = Models._get_filter_from_model(
78
- model=from_model, subset_type=entities.DatasetSubsetType.VALIDATION, resource_type=entities.FiltersResource.ANNOTATION)
79
-
80
- # Set filters in metadata
81
- if train_filter_dict is not None:
82
- metadata['system']['subsets']['train'] = train_filter_dict
83
- if validation_filter_dict is not None:
84
- metadata['system']['subsets']['validation'] = validation_filter_dict
85
- if annotations_train_filter_dict is not None:
86
- metadata['system']['annotationsSubsets']['train'] = annotations_train_filter_dict
87
- if annotations_validation_filter_dict is not None:
88
- metadata['system']['annotationsSubsets']['validation'] = annotations_validation_filter_dict
89
-
90
- return metadata
91
-
92
- def __init__(self,
93
- client_api: ApiClient,
94
- package: entities.Package = None,
95
- project: entities.Project = None,
96
- project_id: str = None):
97
- self._client_api = client_api
98
- self._project = project
99
- self._package = package
100
- self._project_id = project_id
101
-
102
- if self._project is not None:
103
- self._project_id = self._project.id
104
-
105
- ############
106
- # entities #
107
- ############
108
- @property
109
- def project(self) -> entities.Project:
110
- if self._project is None:
111
- if self._project_id is not None:
112
- projects = repositories.Projects(client_api=self._client_api)
113
- self._project = projects.get(project_id=self._project_id)
114
- if self._project is None:
115
- if self._package is not None:
116
- if self._package._project is not None:
117
- self._project = self._package._project
118
- if self._project is None:
119
- raise exceptions.PlatformException(
120
- error='2001',
121
- message='Missing "project". need to set a Project entity or use project.models repository')
122
- assert isinstance(self._project, entities.Project)
123
- return self._project
124
-
125
- @project.setter
126
- def project(self, project: entities.Project):
127
- if not isinstance(project, entities.Project):
128
- raise ValueError('Must input a valid Project entity')
129
- self._project = project
130
-
131
- @property
132
- def package(self) -> entities.Package:
133
- if self._package is None:
134
- raise exceptions.PlatformException(
135
- error='2001',
136
- message='Cannot perform action WITHOUT Package entity in {} repository.'.format(
137
- self.__class__.__name__) +
138
- ' Please use package.models or set a model')
139
- assert isinstance(self._package, entities.Package)
140
- return self._package
141
-
142
- ###########
143
- # methods #
144
- ###########
145
- def get(self, model_name=None, model_id=None) -> entities.Model:
146
- """
147
- Get model object
148
- :param model_name:
149
- :param model_id:
150
- :return: dl.Model object
151
- """
152
-
153
- if model_id is not None:
154
- success, response = self._client_api.gen_request(req_type="get",
155
- path="/ml/models/{}".format(model_id))
156
- if not success:
157
- raise exceptions.PlatformException(response)
158
- model = entities.Model.from_json(client_api=self._client_api,
159
- _json=response.json(),
160
- project=self._project,
161
- package=self._package)
162
- # verify input model name is same as the given id
163
- if model_name is not None and model.name != model_name:
164
- logger.warning(
165
- "Mismatch found in models.get: model_name is different then model.name:"
166
- " {!r} != {!r}".format(
167
- model_name,
168
- model.name))
169
- elif model_name is not None:
170
-
171
- filters = entities.Filters(
172
- resource=entities.FiltersResource.MODEL,
173
- field='name',
174
- values=model_name
175
- )
176
-
177
- project_id = None
178
-
179
- if self._project is not None:
180
- project_id = self._project.id
181
- elif self._project_id is not None:
182
- project_id = self._project_id
183
-
184
- if project_id is not None:
185
- filters.add(field='projectId', values=project_id)
186
-
187
- if self._package is not None:
188
- filters.add(field='packageId', values=self._package.id)
189
-
190
- models = self.list(filters=filters)
191
-
192
- if models.items_count == 0:
193
- raise exceptions.PlatformException(
194
- error='404',
195
- message='Model not found. Name: {}'.format(model_name))
196
- elif models.items_count > 1:
197
- raise exceptions.PlatformException(
198
- error='400',
199
- message='More than one Model found by the name of: {}. Try "get" by id or "list()".'.format(
200
- model_name))
201
- model = models.items[0]
202
- else:
203
- raise exceptions.PlatformException(
204
- error='400',
205
- message='No checked-out Model was found, must checkout or provide an identifier in inputs')
206
-
207
- return model
208
-
209
- def _build_entities_from_response(self, response_items) -> miscellaneous.List[entities.Model]:
210
- jobs = [None for _ in range(len(response_items))]
211
- pool = self._client_api.thread_pools(pool_name='entity.create')
212
-
213
- # return triggers list
214
- for i_service, service in enumerate(response_items):
215
- jobs[i_service] = pool.submit(entities.Model._protected_from_json,
216
- **{'client_api': self._client_api,
217
- '_json': service,
218
- 'package': self._package,
219
- 'project': self._project})
220
-
221
- # get all results
222
- results = [j.result() for j in jobs]
223
- # log errors
224
- _ = [logger.warning(r[1]) for r in results if r[0] is False]
225
- # return good jobs
226
- return miscellaneous.List([r[1] for r in results if r[0] is True])
227
-
228
- def _list(self, filters: entities.Filters):
229
- # request
230
- success, response = self._client_api.gen_request(req_type='POST',
231
- path='/ml/models/query',
232
- json_req=filters.prepare())
233
- if not success:
234
- raise exceptions.PlatformException(response)
235
- return response.json()
236
-
237
- def list(self, filters: entities.Filters = None) -> entities.PagedEntities:
238
- """
239
- List project model
240
-
241
- :param dtlpy.entities.filters.Filters filters: Filters entity or a dictionary containing filters parameters
242
- :return: Paged entity
243
- :rtype: dtlpy.entities.paged_entities.PagedEntities
244
- """
245
- # default filters
246
- if filters is None:
247
- filters = entities.Filters(resource=entities.FiltersResource.MODEL)
248
- if self._project is not None:
249
- filters.add(field='projectId', values=self._project.id)
250
- if self._package is not None:
251
- filters.add(field='packageId', values=self._package.id)
252
-
253
- # assert type filters
254
- if not isinstance(filters, entities.Filters):
255
- raise exceptions.PlatformException(error='400',
256
- message='Unknown filters type: {!r}'.format(type(filters)))
257
-
258
- if filters.resource != entities.FiltersResource.MODEL:
259
- raise exceptions.PlatformException(
260
- error='400',
261
- message='Filters resource must to be FiltersResource.MODEL. Got: {!r}'.format(filters.resource))
262
-
263
- paged = entities.PagedEntities(items_repository=self,
264
- filters=filters,
265
- page_offset=filters.page,
266
- page_size=filters.page_size,
267
- client_api=self._client_api)
268
- paged.get_page()
269
- return paged
270
-
271
- def _set_model_filter(self,
272
- metadata: dict,
273
- train_filter: entities.Filters = None,
274
- validation_filter: entities.Filters = None):
275
- if metadata is None:
276
- metadata = {}
277
- if 'system' not in metadata:
278
- metadata['system'] = {}
279
- if 'subsets' not in metadata['system']:
280
- metadata['system']['subsets'] = {}
281
- if train_filter is not None:
282
- metadata['system']['subsets']['train'] = train_filter.prepare() if isinstance(train_filter,
283
- entities.Filters) else train_filter
284
- if validation_filter is not None:
285
- metadata['system']['subsets']['validation'] = validation_filter.prepare() if isinstance(validation_filter,
286
- entities.Filters) else validation_filter
287
- return metadata
288
-
289
- @staticmethod
290
- def add_subset(
291
- model: entities.Model,
292
- subset_name: str,
293
- subset_filter=None,
294
- subset_annotation_filter=None,
295
- ):
296
- """
297
- Adds a subset for a model, specifying a subset of the model's dataset that could be used for training or
298
- validation. Optionally also adds an annotations subset.
299
-
300
- :param dtlpy.entities.Model model: the model to which the subset should be added
301
- :param str subset_name: the name of the subset
302
- :param subset_filter: filtering for items subset. Can be `entities.Filters`, `dict`, or `None`
303
- :param subset_annotation_filter: optional filtering for annotations subset. Can be `entities.Filters`, `dict`, or `None`
304
-
305
- Behavior:
306
- - If both filters are None, no metadata is added/changed.
307
- - If a filter is a dict, it is used as-is (no prepare()).
308
- - If a filter is `entities.Filters`, `.prepare()` is used.
309
- - Only non-None filters are added.
310
- """
311
- if subset_filter is None and subset_annotation_filter is None:
312
- return
313
-
314
- subset_filter_dict = subset_filter.prepare() if isinstance(subset_filter, entities.Filters) else subset_filter
315
- subset_annotation_filter_dict = (
316
- subset_annotation_filter.prepare()
317
- if isinstance(subset_annotation_filter, entities.Filters)
318
- else subset_annotation_filter
319
- )
320
-
321
- # Initialize containers only if needed
322
- if 'system' not in model.metadata:
323
- model.metadata['system'] = dict()
324
- if subset_filter_dict is not None:
325
- if 'subsets' not in model.metadata['system']:
326
- model.metadata['system']['subsets'] = dict()
327
- model.metadata['system']['subsets'][subset_name] = subset_filter_dict
328
-
329
- if subset_annotation_filter_dict is not None:
330
- if 'annotationsSubsets' not in model.metadata['system']:
331
- model.metadata['system']['annotationsSubsets'] = dict()
332
- model.metadata['system']['annotationsSubsets'][subset_name] = subset_annotation_filter_dict
333
-
334
- model.update(system_metadata=True)
335
-
336
- @staticmethod
337
- def delete_subset(model: entities.Model, subset_name: str):
338
- """
339
- Removes a subset from a model's metadata (both subsets and annotationsSubsets).
340
-
341
- :param dtlpy.entities.Model model: the model to which the subset should be added
342
- :param str subset_name: the name of the subset
343
-
344
- **Example**
345
-
346
- .. code-block:: python
347
-
348
- project.models.add_subset(model=model_entity, subset_name='train', subset_filter=dtlpy.Filters(field='dir', values='/train'))
349
- model_entity.metadata['system']['subsets']
350
- {'train': <dtlpy.entities.filters.Filters object at 0x1501dfe20>}
351
- project.models.delete_subset(model=model_entity, subset_name='train')
352
- model_entity.metadata['system']['subsets']
353
- {}
354
-
355
- """
356
- # Check if subset exists in subsets (for warning)
357
- if model.metadata.get("system", dict()).get("subsets", dict()).get(subset_name) is None:
358
- logger.error(f"Model system metadata incomplete, could not delete subset {subset_name}.")
359
- else:
360
- _ = model.metadata['system']['subsets'].pop(subset_name)
361
-
362
- # Remove from annotationsSubsets if it exists
363
- if model.metadata.get("system", dict()).get("annotationsSubsets", dict()).get(subset_name) is not None:
364
- _ = model.metadata['system']['annotationsSubsets'].pop(subset_name)
365
-
366
- model.update(system_metadata=True)
367
-
368
- def create(
369
- self,
370
- model_name: str,
371
- dataset_id: str = None,
372
- labels: list = None,
373
- ontology_id: str = None,
374
- description: str = None,
375
- model_artifacts: List[entities.Artifact] = None,
376
- project_id=None,
377
- tags: List[str] = None,
378
- package: entities.Package = None,
379
- configuration: dict = None,
380
- status: str = None,
381
- scope: entities.EntityScopeLevel = entities.EntityScopeLevel.PROJECT,
382
- version: str = '1.0.0',
383
- input_type=None,
384
- output_type=None,
385
- train_filter: entities.Filters = None,
386
- validation_filter: entities.Filters = None,
387
- annotations_train_filter: entities.Filters = None,
388
- annotations_validation_filter: entities.Filters = None,
389
- app: entities.App = None
390
- ) -> entities.Model:
391
- """
392
- Create a Model entity
393
-
394
- :param str model_name: name of the model
395
- :param str dataset_id: dataset id
396
- :param list labels: list of labels from ontology (must mach ontology id) can be a subset
397
- :param str ontology_id: ontology to connect to the model
398
- :param str description: description
399
- :param model_artifacts: optional list of dl.Artifact. Can be ItemArtifact, LocaArtifact or LinkArtifact
400
- :param str project_id: project that owns the model
401
- :param list tags: list of string tags
402
- :param package: optional - Package object
403
- :param dict configuration: optional - model configuration - dict
404
- :param str status: `str` of the optional values of
405
- :param str scope: the scope level of the model dl.EntityScopeLevel
406
- :param str version: version of the model
407
- :param str input_type: the file type the model expect as input (image, video, txt, etc)
408
- :param str output_type: dl.AnnotationType - the type of annotations the model produces (class, box segment, text, etc)
409
- :param dtlpy.entities.filters.Filters train_filter: Filters entity or a dictionary to define the items' scope in the specified dataset_id for the model train
410
- :param dtlpy.entities.filters.Filters validation_filter: Filters entity or a dictionary to define the items' scope in the specified dataset_id for the model validation
411
- :param dtlpy.entities.filters.Filters annotations_train_filter: Filters entity or a dictionary to define the annotations' scope in the specified dataset_id for the model train
412
- :param dtlpy.entities.filters.Filters annotations_validation_filter: Filters entity or a dictionary to define the annotations' scope in the specified dataset_id for the model validation
413
- :param dtlpy.entities.App app: App entity to connect the model to
414
- :return: Model Entity
415
-
416
- **Example**:
417
-
418
- .. code-block:: python
419
-
420
- project.models.create(model_name='model_name', dataset_id='dataset_id', labels=['label1', 'label2'], train_filter={filter: {$and: [{dir: "/10K short videos"}]},page: 0,pageSize: 1000,resource: "items"}})
421
-
422
- """
423
-
424
- if ontology_id is not None:
425
- # take labels from ontology
426
- ontologies = repositories.Ontologies(client_api=self._client_api)
427
- labels = [label.tag for label in ontologies.get(ontology_id=ontology_id).labels]
428
-
429
- if labels is None:
430
- # dont have to have labels. can use an empty list
431
- labels = list()
432
-
433
- if input_type is None:
434
- input_type = 'image'
435
-
436
- if output_type is None:
437
- output_type = entities.AnnotationType.CLASSIFICATION
438
-
439
- if package is None and self._package is None:
440
- raise exceptions.PlatformException('Must provide a Package or create from package.models')
441
- elif package is None:
442
- package = self._package
443
-
444
- # TODO need to remove the entire project id user interface - need to take it from dataset id (in BE)
445
- if project_id is None:
446
- if self._project is None:
447
- raise exceptions.PlatformException('Please provide project_id')
448
- project_id = self._project.id
449
- else:
450
- if project_id != self._project_id:
451
- if (isinstance(package, entities.Package) and not package.is_global) or \
452
- (isinstance(package, entities.Dpk) and not package.scope != 'public'):
453
- logger.warning(
454
- "Note! you are specified project_id {!r} which is different from repository context: {!r}".format(
455
- project_id, self._project_id))
456
-
457
- if model_artifacts is None:
458
- model_artifacts = []
459
-
460
- if not isinstance(model_artifacts, list):
461
- raise ValueError('`model_artifacts` must be a list of dl.Artifact entities')
462
-
463
- # create payload for request
464
- payload = {
465
- 'packageId': package.id,
466
- 'name': model_name,
467
- 'projectId': project_id,
468
- 'datasetId': dataset_id,
469
- 'labels': labels,
470
- 'artifacts': [artifact.to_json(as_artifact=True) for artifact in model_artifacts],
471
- 'scope': scope,
472
- 'version': version,
473
- 'inputType': input_type,
474
- 'outputType': output_type,
475
- }
476
-
477
- if app is not None:
478
- if not isinstance(package, entities.Dpk):
479
- raise ValueError('package must be a Dpk entity')
480
- if app.dpk_name != package.name or app.dpk_version != package.version:
481
- raise ValueError('App and package must be the same')
482
- component_name = None
483
- compute_config = None
484
- for model in package.components.models:
485
- if model['name'] == model_name:
486
- component_name = model['name']
487
- compute_config = model.get('computeConfigs', None)
488
- break
489
- if component_name is None:
490
- raise ValueError('Model name not found in package')
491
- payload['app'] = {
492
- "id": app.id,
493
- "componentName": component_name,
494
- "dpkName": package.name,
495
- "dpkVersion": package.version
496
- }
497
- if compute_config is not None:
498
- payload['app']['computeConfig'] = compute_config
499
-
500
- if configuration is not None:
501
- payload['configuration'] = configuration
502
-
503
- if tags is not None:
504
- payload['tags'] = tags
505
-
506
- if description is not None:
507
- payload['description'] = description
508
-
509
- if status is not None:
510
- payload['status'] = status
511
-
512
- if train_filter or validation_filter or annotations_train_filter or annotations_validation_filter:
513
- metadata = Models._build_model_metadata(
514
- train_filter=train_filter,
515
- validation_filter=validation_filter,
516
- annotations_train_filter=annotations_train_filter,
517
- annotations_validation_filter=annotations_validation_filter
518
- )
519
- payload['metadata'] = metadata
520
-
521
- # request
522
- success, response = self._client_api.gen_request(req_type='post',
523
- path='/ml/models',
524
- json_req=payload)
525
-
526
- # exception handling
527
- if not success:
528
- raise exceptions.PlatformException(response)
529
-
530
- model = entities.Model.from_json(_json=response.json(),
531
- client_api=self._client_api,
532
- project=self._project,
533
- package=package)
534
-
535
- return model
536
-
537
- def clone(self,
538
- from_model: entities.Model,
539
- model_name: str,
540
- dataset: entities.Dataset = None,
541
- configuration: dict = None,
542
- status=None,
543
- scope=None,
544
- project_id: str = None,
545
- labels: list = None,
546
- description: str = None,
547
- tags: list = None,
548
- train_filter: entities.Filters = None,
549
- validation_filter: entities.Filters = None,
550
- annotations_train_filter: entities.Filters = None,
551
- annotations_validation_filter: entities.Filters = None,
552
- wait=True,
553
- ) -> entities.Model:
554
- """
555
- Clones and creates a new model out of existing one
556
-
557
- :param from_model: existing model to clone from
558
- :param str model_name: `str` new model name
559
- :param str dataset: dataset object for the cloned model
560
- :param dict configuration: `dict` (optional) if passed replaces the current configuration
561
- :param str status: `str` (optional) set the new status
562
- :param str scope: `str` (optional) set the new scope. default is "project"
563
- :param str project_id: `str` specify the project id to create the new model on (if other than the source model)
564
- :param list labels: `list` of `str` - label of the model
565
- :param str description: `str` description of the new model
566
- :param list tags: `list` of `str` - label of the model
567
- :param dtlpy.entities.filters.Filters train_filter: Filters entity or a dictionary to define the items' scope in the specified dataset_id for the model train
568
- :param dtlpy.entities.filters.Filters validation_filter: Filters entity or a dictionary to define the items' scope in the specified dataset_id for the model validation
569
- :param dtlpy.entities.filters.Filters annotations_train_filter: Filters entity or a dictionary to define the annotations' scope in the specified dataset_id for the model train
570
- :param dtlpy.entities.filters.Filters annotations_validation_filter: Filters entity or a dictionary to define the annotations' scope in the specified dataset_id for the model validation
571
- :param bool wait: `bool` wait for model to be ready
572
- :return: dl.Model which is a clone version of the existing model
573
- """
574
- from_json = {"name": model_name,
575
- "packageId": from_model.package_id,
576
- "configuration": from_model.configuration,
577
- "outputType": from_model.output_type,
578
- "inputType": from_model.input_type}
579
- if project_id is None:
580
- if dataset is not None:
581
- # take dataset project
582
- project_id = dataset.project.id
583
- else:
584
- # take model's project
585
- project_id = self.project.id
586
- from_json['projectId'] = project_id
587
- if dataset is not None:
588
- if labels is None:
589
- labels = list(dataset.labels_flat_dict.keys())
590
- from_json['datasetId'] = dataset.id
591
- if labels is not None:
592
- from_json['labels'] = labels
593
- # if there are new labels - pop the mapping from the original
594
- _ = from_json['configuration'].pop('id_to_label_map', None)
595
- _ = from_json['configuration'].pop('label_to_id_map', None)
596
- if configuration is not None:
597
- from_json['configuration'].update(configuration)
598
- if description is not None:
599
- from_json['description'] = description
600
- if tags is not None:
601
- from_json['tags'] = tags
602
- if scope is not None:
603
- from_json['scope'] = scope
604
- if status is not None:
605
- from_json['status'] = status
606
-
607
- metadata = Models._build_model_metadata(
608
- train_filter=train_filter,
609
- validation_filter=validation_filter,
610
- annotations_train_filter=annotations_train_filter,
611
- annotations_validation_filter=annotations_validation_filter,
612
- from_model=from_model
613
- )
614
- if metadata['system']['subsets'] or metadata['system']['annotationsSubsets']:
615
- from_json['metadata'] = metadata
616
- success, response = self._client_api.gen_request(req_type='post',
617
- path='/ml/models/{}/clone'.format(from_model.id),
618
- json_req=from_json)
619
- if not success:
620
- raise exceptions.PlatformException(response)
621
- new_model = entities.Model.from_json(_json=response.json(),
622
- client_api=self._client_api,
623
- project=self._project,
624
- package=from_model._package)
625
- if wait:
626
- new_model = self.wait_for_model_ready(model=new_model)
627
- return new_model
628
-
629
- def wait_for_model_ready(self, model: entities.Model):
630
- """
631
- Wait for model to be ready
632
-
633
- :param model: Model entity
634
- """
635
- sleep_time = MIN_INTERVAL
636
- while model.status == entities.ModelStatus.CLONING:
637
- model = self.get(model_id=model.id)
638
- time.sleep(sleep_time)
639
- sleep_time = min(sleep_time * BACKOFF_FACTOR, MAX_INTERVAL)
640
- time.sleep(sleep_time)
641
- return model
642
-
643
- @property
644
- def platform_url(self):
645
- return self._client_api._get_resource_url("projects/{}/models".format(self.project.id))
646
-
647
- def open_in_web(self, model=None, model_id=None):
648
- """
649
- Open the model in web platform
650
-
651
- :param model: model entity
652
- :param str model_id: model id
653
- """
654
- if model is not None:
655
- model.open_in_web()
656
- elif model_id is not None:
657
- self._client_api._open_in_web(url=self.platform_url + '/' + str(model_id) + '/main')
658
- else:
659
- self._client_api._open_in_web(url=self.platform_url)
660
-
661
- def delete(self, model: entities.Model = None, model_name=None, model_id=None):
662
- """
663
- Delete Model object
664
-
665
- :param model: Model entity to delete
666
- :param str model_name: delete by model name
667
- :param str model_id: delete by model id
668
- :return: True
669
- :rtype: bool
670
- """
671
- # get id and name
672
- if model_id is None:
673
- if model is not None:
674
- model_id = model.id
675
- elif model_name is not None:
676
- model = self.get(model_name=model_name)
677
- model_id = model.id
678
- else:
679
- raise exceptions.PlatformException(error='400',
680
- message='Must input at least one parameter to models.delete')
681
-
682
- # request
683
- success, response = self._client_api.gen_request(
684
- req_type="delete",
685
- path="/ml/models/{}".format(model_id)
686
- )
687
-
688
- # exception handling
689
- if not success:
690
- raise exceptions.PlatformException(response)
691
-
692
- # return results
693
- return True
694
-
695
- def update(self,
696
- model: entities.Model,
697
- system_metadata: bool = False,
698
- reload_services: bool = True
699
- ) -> entities.Model:
700
- """
701
- Update Model changes to platform
702
-
703
- :param model: Model entity
704
- :param bool system_metadata: True, if you want to change metadata system
705
- :param bool reload_services: True, if you want to update services
706
- :return: Model entity
707
- """
708
- # payload
709
- payload = model.to_json()
710
-
711
- # url
712
- url_path = '/ml/models/{}'.format(model.id)
713
- query_params = {}
714
- if system_metadata:
715
- query_params['system'] = 'true'
716
- if reload_services is not None:
717
- query_params['reloadServices'] = 'true' if reload_services else 'false'
718
-
719
- if query_params:
720
- url_path += '?' + urlencode(query_params)
721
-
722
- # request
723
- success, response = self._client_api.gen_request(req_type='patch',
724
- path=url_path,
725
- json_req=payload)
726
-
727
- # exception handling
728
- if not success:
729
- raise exceptions.PlatformException(response)
730
-
731
- # return entity
732
- return entities.Model.from_json(_json=response.json(),
733
- client_api=self._client_api,
734
- project=self._project,
735
- package=model._package)
736
-
737
- def train(self, model_id: str, service_config=None):
738
- """
739
- Train the model in the cloud. This will create a service and will run the adapter's train function as an execution
740
-
741
- :param model_id: id of the model to train
742
- :param dict service_config : Service object as dict. Contains the spec of the default service to create.
743
- :return:
744
- """
745
- payload = dict()
746
- if service_config is not None:
747
- payload['serviceConfig'] = service_config
748
- success, response = self._client_api.gen_request(req_type="post",
749
- path=f"/ml/models/{model_id}/train",
750
- json_req=payload)
751
- if not success:
752
- raise exceptions.PlatformException(response)
753
- return entities.Execution.from_json(_json=response.json(),
754
- client_api=self._client_api,
755
- project=self._project)
756
-
757
- def evaluate(self, model_id: str, dataset_id: str, filters: entities.Filters = None, service_config=None):
758
- """
759
- Evaluate Model, provide data to evaluate the model on You can also provide specific config for the deployed service
760
-
761
- :param str model_id: Model id to predict
762
- :param dict service_config : Service object as dict. Contains the spec of the default service to create.
763
- :param str dataset_id: ID of the dataset to evaluate
764
- :param entities.Filters filters: dl.Filter entity to run the predictions on
765
- :return:
766
- """
767
-
768
- payload = {'input': {'datasetId': dataset_id}}
769
- if service_config is not None:
770
- payload['config'] = {'serviceConfig': service_config}
771
- if filters is None:
772
- filters = entities.Filters()
773
- if filters is not None:
774
- payload['input']['datasetQuery'] = filters.prepare()
775
- success, response = self._client_api.gen_request(req_type="post",
776
- path=f"/ml/models/{model_id}/evaluate",
777
- json_req=payload)
778
- if not success:
779
- raise exceptions.PlatformException(response)
780
- return entities.Execution.from_json(_json=response.json(),
781
- client_api=self._client_api,
782
- project=self._project)
783
-
784
- def predict(self, model, item_ids=None, dataset_id=None,filters=None):
785
- """
786
- Run model prediction with items
787
-
788
- :param model: dl.Model entity to run the prediction.
789
- :param item_ids: a list of item id to run the prediction.
790
- :param dataset_id: a dataset id to run the prediction.
791
- :param filters_dict: dict of filters to run the prediction.
792
- :return:
793
- """
794
- if len(model.metadata['system'].get('deploy', {}).get('services', [])) == 0:
795
- # no services for model
796
- raise ValueError("Model doesnt have any associated services. Need to deploy before predicting")
797
- if item_ids is None and dataset_id is None:
798
- raise ValueError("Need to provide either item_ids or dataset_id")
799
- if filters is not None and dataset_id is None:
800
- raise ValueError("If filters are provided, dataset_id is mandatory.")
801
- payload_input = {}
802
- if item_ids is not None:
803
- payload_input['itemIds'] = item_ids
804
- if dataset_id is not None:
805
- payload_input['datasetId'] = dataset_id
806
- if filters is not None:
807
- payload_input['datasetQuery'] = filters.prepare()['filter']
808
- payload = {'input': payload_input,
809
- 'config': {'serviceId': model.metadata['system']['deploy']['services'][0]}}
810
- logger.debug(f"generate post request to predict with payload {payload}")
811
- success, response = self._client_api.gen_request(req_type="post",
812
- path=f"/ml/models/{model.id}/predict",
813
- json_req=payload)
814
- if not success:
815
- logger.error(f"failed to make API request /ml/models/{model.id}/predict with payload {payload} response {response}")
816
- raise exceptions.PlatformException(response)
817
- return entities.Execution.from_json(_json=response.json(),
818
- client_api=self._client_api,
819
- project=self._project)
820
-
821
- def embed(self, model, item_ids=None, dataset_id=None, filters=None):
822
- """
823
- Run model embed with items
824
-
825
- :param model: dl.Model entity to run the prediction.
826
- :param item_ids: a list of item id to run the embed.
827
- :param dataset_id: a dataset id to run the embed.
828
- :param filters_dict: dict of filters to run the embed.
829
- :return: Execution
830
- :rtype: dtlpy.entities.execution.Execution
831
- """
832
- if len(model.metadata['system'].get('deploy', {}).get('services', [])) == 0:
833
- # no services for model
834
- raise ValueError("Model doesnt have any associated services. Need to deploy before predicting")
835
- if item_ids is None and dataset_id is None:
836
- raise ValueError("Need to provide either item_ids or dataset_id")
837
- if filters is not None and dataset_id is None:
838
- raise ValueError("If filters are provided, dataset_id is mandatory.")
839
- payload_input = {}
840
- if item_ids is not None:
841
- payload_input['itemIds'] = item_ids
842
- if dataset_id is not None:
843
- payload_input['datasetId'] = dataset_id
844
- if filters is not None:
845
- payload_input['datasetQuery'] = filters.prepare()['filter']
846
- payload = {'input': payload_input,
847
- 'config': {'serviceId': model.metadata['system']['deploy']['services'][0]}}
848
- logger.debug(f"generate post request to embed with payload {payload}")
849
- success, response = self._client_api.gen_request(req_type="post",
850
- path=f"/ml/models/{model.id}/embed",
851
- json_req=payload)
852
- if not success:
853
- logger.error(f"failed to make API request /ml/models/{model.id}/embed with payload {payload} response {response}")
854
- raise exceptions.PlatformException(response)
855
- return entities.Execution.from_json(_json=response.json(),
856
- client_api=self._client_api,
857
- project=self._project)
858
-
859
- def embed_datasets(self, model, dataset_ids, attach_trigger=False):
860
- """
861
- Run model embed with datasets
862
-
863
- :param model: dl.Model entity to run the prediction.
864
- :param dataset_ids: a list of dataset id to run the embed.
865
- :param attach_trigger: bool, if True will activate the trigger
866
- :return:
867
- """
868
- if len(model.metadata['system'].get('deploy', {}).get('services', [])) == 0:
869
- # no services for model
870
- raise ValueError("Model doesnt have any associated services. Need to deploy before predicting")
871
- if dataset_ids is None:
872
- raise ValueError("Need to provide either dataset_id")
873
- payload = {'datasetIds': dataset_ids,
874
- 'config': {'serviceId': model.metadata['system']['deploy']['services'][0]},
875
- 'attachTrigger': attach_trigger
876
- }
877
-
878
- success, response = self._client_api.gen_request(req_type="post",
879
- path=f"/ml/models/{model.id}/embed/datasets",
880
- json_req=payload)
881
- if not success:
882
- raise exceptions.PlatformException(response)
883
- command = entities.Command.from_json(_json=response.json(),
884
- client_api=self._client_api)
885
- command = command.wait()
886
- return command
887
-
888
- def deploy(self, model_id: str, service_config=None) -> entities.Service:
889
- """
890
- Deploy a trained model. This will create a service that will execute predictions
891
-
892
- :param model_id: id of the model to deploy
893
- :param dict service_config : Service object as dict. Contains the spec of the default service to create.
894
- :return: dl.Service: the deployed service
895
- """
896
- payload = dict()
897
- if service_config is not None:
898
- payload['serviceConfig'] = service_config if not service_config.get("serviceConfig") else service_config.get("serviceConfig")
899
- success, response = self._client_api.gen_request(req_type="post",
900
- path=f"/ml/models/{model_id}/deploy",
901
- json_req=payload)
902
- if not success:
903
- raise exceptions.PlatformException(response)
904
-
905
- return entities.Service.from_json(_json=response.json(),
906
- client_api=self._client_api,
907
- project=self._project,
908
- package=self._package)
909
-
910
-
911
- class Metrics:
912
- def __init__(self, client_api, model=None, model_id=None):
913
- self._client_api = client_api
914
- self._model_id = model_id
915
- self._model = model
916
-
917
- @property
918
- def model(self):
919
- return self._model
920
-
921
- def create(self, samples, dataset_id) -> bool:
922
- """
923
- Add Samples for model analytics and metrics
924
-
925
- :param samples: list of dl.PlotSample - must contain: model_id, figure, legend, x, y
926
- :param model_id: model id to save samples on
927
- :param dataset_id:
928
- :return: bool: True if success
929
- """
930
- if not isinstance(samples, list):
931
- samples = [samples]
932
-
933
- payload = list()
934
- for sample in samples:
935
- _json = sample.to_json()
936
- _json['modelId'] = self.model.id
937
- _json['datasetId'] = dataset_id
938
- payload.append(_json)
939
- # request
940
- success, response = self._client_api.gen_request(req_type='post',
941
- path='/ml/metrics/publish',
942
- json_req=payload)
943
-
944
- # exception handling
945
- if not success:
946
- raise exceptions.PlatformException(response)
947
-
948
- # return entity
949
- return True
950
-
951
- def _list(self, filters: entities.Filters):
952
- # request
953
- success, response = self._client_api.gen_request(req_type='POST',
954
- path='/ml/metrics/query',
955
- json_req=filters.prepare())
956
- if not success:
957
- raise exceptions.PlatformException(response)
958
- return response.json()
959
-
960
- def _build_entities_from_response(self, response_items) -> miscellaneous.List[entities.Model]:
961
- jobs = [None for _ in range(len(response_items))]
962
- pool = self._client_api.thread_pools(pool_name='entity.create')
963
-
964
- # return triggers list
965
- for i_service, sample in enumerate(response_items):
966
- jobs[i_service] = pool.submit(entities.PlotSample,
967
- **{'x': sample.get('data', dict()).get('x', None),
968
- 'y': sample.get('data', dict()).get('y', None),
969
- 'legend': sample.get('legend', ''),
970
- 'figure': sample.get('figure', '')})
971
-
972
- # get all results
973
- results = [j.result() for j in jobs]
974
- # return good jobs
975
- return miscellaneous.List(results)
976
-
977
- def list(self, filters=None) -> entities.PagedEntities:
978
- """
979
- List Samples for model analytics and metrics
980
-
981
- :param filters: dl.Filter query entity
982
- """
983
- if filters is None:
984
- filters = entities.Filters(resource=entities.FiltersResource.METRICS)
985
- if not isinstance(filters, entities.Filters):
986
- raise exceptions.PlatformException(error='400',
987
- message='Unknown filters type: {!r}'.format(type(filters)))
988
- if filters.resource != entities.FiltersResource.METRICS:
989
- raise exceptions.PlatformException(
990
- error='400',
991
- message='Filters resource must to be FiltersResource.METRICS. Got: {!r}'.format(filters.resource))
992
- if self._model is not None:
993
- filters.add(field='modelId', values=self._model.id)
994
- paged = entities.PagedEntities(items_repository=self,
995
- filters=filters,
996
- page_offset=filters.page,
997
- page_size=filters.page_size,
998
- client_api=self._client_api)
999
- paged.get_page()
1000
- return paged
1
+ import time
2
+ from typing import List
3
+ import logging
4
+ from urllib.parse import urlencode
5
+
6
+ from .. import entities, repositories, exceptions, miscellaneous
7
+ from ..services.api_client import ApiClient
8
+
9
+ logger = logging.getLogger(name='dtlpy')
10
+
11
+ MIN_INTERVAL = 1
12
+ BACKOFF_FACTOR = 1.2
13
+ MAX_INTERVAL = 12
14
+
15
+
16
+ class Models:
17
+ """
18
+ Models Repository
19
+ """
20
+ @staticmethod
21
+ def _filter_to_dict(filter_obj):
22
+ """Convert Filters object to dict, or return as-is if already dict/None"""
23
+ if filter_obj is not None:
24
+ filter_obj = filter_obj.prepare() if isinstance(filter_obj, entities.Filters) else filter_obj
25
+ return filter_obj
26
+
27
+ @staticmethod
28
+ def _get_filter_from_model(model, subset_type, resource_type):
29
+ """Extract filter dict from model metadata"""
30
+ filter_dict = None
31
+ if model is not None:
32
+ if resource_type == entities.FiltersResource.ITEM:
33
+ filter_dict = model.metadata.get('system', {}).get('subsets', {}).get(subset_type.value)
34
+ else: # ANNOTATION
35
+ filter_dict = model.metadata.get('system', {}).get('annotationsSubsets', {}).get(subset_type.value)
36
+ return filter_dict
37
+
38
+ @staticmethod
39
+ def _build_model_metadata(
40
+ train_filter: entities.Filters = None,
41
+ validation_filter: entities.Filters = None,
42
+ annotations_train_filter: entities.Filters = None,
43
+ annotations_validation_filter: entities.Filters = None,
44
+ from_model: entities.Model = None
45
+ ) -> dict:
46
+ """
47
+ Build model metadata with filters, optionally inheriting from existing model.
48
+
49
+ :param train_filter: Training data filter (Filters object or dict)
50
+ :param validation_filter: Validation data filter (Filters object or dict)
51
+ :param annotations_train_filter: Training annotations filter (Filters object or dict)
52
+ :param annotations_validation_filter: Validation annotations filter (Filters object or dict)
53
+ :param from_model: Source model to inherit filters from (if not provided explicitly)
54
+ :return: Metadata dictionary with filters
55
+ """
56
+ metadata = {'system': {'subsets': {}, 'annotationsSubsets': {}}}
57
+
58
+ # Handle item filters
59
+ train_filter_dict = Models._filter_to_dict(train_filter)
60
+ if train_filter_dict is None and from_model is not None:
61
+ train_filter_dict = Models._get_filter_from_model(
62
+ model=from_model, subset_type=entities.DatasetSubsetType.TRAIN, resource_type=entities.FiltersResource.ITEM)
63
+
64
+ validation_filter_dict = Models._filter_to_dict(validation_filter)
65
+ if validation_filter_dict is None and from_model is not None:
66
+ validation_filter_dict = Models._get_filter_from_model(
67
+ model=from_model, subset_type=entities.DatasetSubsetType.VALIDATION, resource_type=entities.FiltersResource.ITEM)
68
+
69
+ # Handle annotation filters
70
+ annotations_train_filter_dict = Models._filter_to_dict(annotations_train_filter)
71
+ if annotations_train_filter_dict is None and from_model is not None:
72
+ annotations_train_filter_dict = Models._get_filter_from_model(
73
+ model=from_model, subset_type=entities.DatasetSubsetType.TRAIN, resource_type=entities.FiltersResource.ANNOTATION)
74
+
75
+ annotations_validation_filter_dict = Models._filter_to_dict(annotations_validation_filter)
76
+ if annotations_validation_filter_dict is None and from_model is not None:
77
+ annotations_validation_filter_dict = Models._get_filter_from_model(
78
+ model=from_model, subset_type=entities.DatasetSubsetType.VALIDATION, resource_type=entities.FiltersResource.ANNOTATION)
79
+
80
+ # Set filters in metadata
81
+ if train_filter_dict is not None:
82
+ metadata['system']['subsets']['train'] = train_filter_dict
83
+ if validation_filter_dict is not None:
84
+ metadata['system']['subsets']['validation'] = validation_filter_dict
85
+ if annotations_train_filter_dict is not None:
86
+ metadata['system']['annotationsSubsets']['train'] = annotations_train_filter_dict
87
+ if annotations_validation_filter_dict is not None:
88
+ metadata['system']['annotationsSubsets']['validation'] = annotations_validation_filter_dict
89
+
90
+ return metadata
91
+
92
+ def __init__(self,
93
+ client_api: ApiClient,
94
+ package: entities.Package = None,
95
+ project: entities.Project = None,
96
+ project_id: str = None):
97
+ self._client_api = client_api
98
+ self._project = project
99
+ self._package = package
100
+ self._project_id = project_id
101
+
102
+ if self._project is not None:
103
+ self._project_id = self._project.id
104
+
105
+ ############
106
+ # entities #
107
+ ############
108
+ @property
109
+ def project(self) -> entities.Project:
110
+ if self._project is None:
111
+ if self._project_id is not None:
112
+ projects = repositories.Projects(client_api=self._client_api)
113
+ self._project = projects.get(project_id=self._project_id)
114
+ if self._project is None:
115
+ if self._package is not None:
116
+ if self._package._project is not None:
117
+ self._project = self._package._project
118
+ if self._project is None:
119
+ raise exceptions.PlatformException(
120
+ error='2001',
121
+ message='Missing "project". need to set a Project entity or use project.models repository')
122
+ assert isinstance(self._project, entities.Project)
123
+ return self._project
124
+
125
+ @project.setter
126
+ def project(self, project: entities.Project):
127
+ if not isinstance(project, entities.Project):
128
+ raise ValueError('Must input a valid Project entity')
129
+ self._project = project
130
+
131
+ @property
132
+ def package(self) -> entities.Package:
133
+ if self._package is None:
134
+ raise exceptions.PlatformException(
135
+ error='2001',
136
+ message='Cannot perform action WITHOUT Package entity in {} repository.'.format(
137
+ self.__class__.__name__) +
138
+ ' Please use package.models or set a model')
139
+ assert isinstance(self._package, entities.Package)
140
+ return self._package
141
+
142
+ ###########
143
+ # methods #
144
+ ###########
145
+ def get(self, model_name=None, model_id=None) -> entities.Model:
146
+ """
147
+ Get model object
148
+ :param model_name:
149
+ :param model_id:
150
+ :return: dl.Model object
151
+ """
152
+
153
+ if model_id is not None:
154
+ success, response = self._client_api.gen_request(req_type="get",
155
+ path="/ml/models/{}".format(model_id))
156
+ if not success:
157
+ raise exceptions.PlatformException(response)
158
+ model = entities.Model.from_json(client_api=self._client_api,
159
+ _json=response.json(),
160
+ project=self._project,
161
+ package=self._package)
162
+ # verify input model name is same as the given id
163
+ if model_name is not None and model.name != model_name:
164
+ logger.warning(
165
+ "Mismatch found in models.get: model_name is different then model.name:"
166
+ " {!r} != {!r}".format(
167
+ model_name,
168
+ model.name))
169
+ elif model_name is not None:
170
+
171
+ filters = entities.Filters(
172
+ resource=entities.FiltersResource.MODEL,
173
+ field='name',
174
+ values=model_name
175
+ )
176
+
177
+ project_id = None
178
+
179
+ if self._project is not None:
180
+ project_id = self._project.id
181
+ elif self._project_id is not None:
182
+ project_id = self._project_id
183
+
184
+ if project_id is not None:
185
+ filters.add(field='projectId', values=project_id)
186
+
187
+ if self._package is not None:
188
+ filters.add(field='packageId', values=self._package.id)
189
+
190
+ models = self.list(filters=filters)
191
+
192
+ if models.items_count == 0:
193
+ raise exceptions.PlatformException(
194
+ error='404',
195
+ message='Model not found. Name: {}'.format(model_name))
196
+ elif models.items_count > 1:
197
+ raise exceptions.PlatformException(
198
+ error='400',
199
+ message='More than one Model found by the name of: {}. Try "get" by id or "list()".'.format(
200
+ model_name))
201
+ model = models.items[0]
202
+ else:
203
+ raise exceptions.PlatformException(
204
+ error='400',
205
+ message='No checked-out Model was found, must checkout or provide an identifier in inputs')
206
+
207
+ return model
208
+
209
+ def _build_entities_from_response(self, response_items) -> miscellaneous.List[entities.Model]:
210
+ jobs = [None for _ in range(len(response_items))]
211
+ pool = self._client_api.thread_pools(pool_name='entity.create')
212
+
213
+ # return triggers list
214
+ for i_service, service in enumerate(response_items):
215
+ jobs[i_service] = pool.submit(entities.Model._protected_from_json,
216
+ **{'client_api': self._client_api,
217
+ '_json': service,
218
+ 'package': self._package,
219
+ 'project': self._project})
220
+
221
+ # get all results
222
+ results = [j.result() for j in jobs]
223
+ # log errors
224
+ _ = [logger.warning(r[1]) for r in results if r[0] is False]
225
+ # return good jobs
226
+ return miscellaneous.List([r[1] for r in results if r[0] is True])
227
+
228
+ def _list(self, filters: entities.Filters):
229
+ # request
230
+ success, response = self._client_api.gen_request(req_type='POST',
231
+ path='/ml/models/query',
232
+ json_req=filters.prepare())
233
+ if not success:
234
+ raise exceptions.PlatformException(response)
235
+ return response.json()
236
+
237
+ def list(self, filters: entities.Filters = None) -> entities.PagedEntities:
238
+ """
239
+ List project model
240
+
241
+ :param dtlpy.entities.filters.Filters filters: Filters entity or a dictionary containing filters parameters
242
+ :return: Paged entity
243
+ :rtype: dtlpy.entities.paged_entities.PagedEntities
244
+ """
245
+ # default filters
246
+ if filters is None:
247
+ filters = entities.Filters(resource=entities.FiltersResource.MODEL)
248
+ if self._project is not None:
249
+ filters.add(field='projectId', values=self._project.id)
250
+ if self._package is not None:
251
+ filters.add(field='packageId', values=self._package.id)
252
+
253
+ # assert type filters
254
+ if not isinstance(filters, entities.Filters):
255
+ raise exceptions.PlatformException(error='400',
256
+ message='Unknown filters type: {!r}'.format(type(filters)))
257
+
258
+ if filters.resource != entities.FiltersResource.MODEL:
259
+ raise exceptions.PlatformException(
260
+ error='400',
261
+ message='Filters resource must to be FiltersResource.MODEL. Got: {!r}'.format(filters.resource))
262
+
263
+ paged = entities.PagedEntities(items_repository=self,
264
+ filters=filters,
265
+ page_offset=filters.page,
266
+ page_size=filters.page_size,
267
+ client_api=self._client_api)
268
+ paged.get_page()
269
+ return paged
270
+
271
+ def _set_model_filter(self,
272
+ metadata: dict,
273
+ train_filter: entities.Filters = None,
274
+ validation_filter: entities.Filters = None):
275
+ if metadata is None:
276
+ metadata = {}
277
+ if 'system' not in metadata:
278
+ metadata['system'] = {}
279
+ if 'subsets' not in metadata['system']:
280
+ metadata['system']['subsets'] = {}
281
+ if train_filter is not None:
282
+ metadata['system']['subsets']['train'] = train_filter.prepare() if isinstance(train_filter,
283
+ entities.Filters) else train_filter
284
+ if validation_filter is not None:
285
+ metadata['system']['subsets']['validation'] = validation_filter.prepare() if isinstance(validation_filter,
286
+ entities.Filters) else validation_filter
287
+ return metadata
288
+
289
+ @staticmethod
290
+ def add_subset(
291
+ model: entities.Model,
292
+ subset_name: str,
293
+ subset_filter=None,
294
+ subset_annotation_filter=None,
295
+ ):
296
+ """
297
+ Adds a subset for a model, specifying a subset of the model's dataset that could be used for training or
298
+ validation. Optionally also adds an annotations subset.
299
+
300
+ :param dtlpy.entities.Model model: the model to which the subset should be added
301
+ :param str subset_name: the name of the subset
302
+ :param subset_filter: filtering for items subset. Can be `entities.Filters`, `dict`, or `None`
303
+ :param subset_annotation_filter: optional filtering for annotations subset. Can be `entities.Filters`, `dict`, or `None`
304
+
305
+ Behavior:
306
+ - If both filters are None, no metadata is added/changed.
307
+ - If a filter is a dict, it is used as-is (no prepare()).
308
+ - If a filter is `entities.Filters`, `.prepare()` is used.
309
+ - Only non-None filters are added.
310
+ """
311
+ if subset_filter is None and subset_annotation_filter is None:
312
+ return
313
+
314
+ subset_filter_dict = subset_filter.prepare() if isinstance(subset_filter, entities.Filters) else subset_filter
315
+ subset_annotation_filter_dict = (
316
+ subset_annotation_filter.prepare()
317
+ if isinstance(subset_annotation_filter, entities.Filters)
318
+ else subset_annotation_filter
319
+ )
320
+
321
+ # Initialize containers only if needed
322
+ if 'system' not in model.metadata:
323
+ model.metadata['system'] = dict()
324
+ if subset_filter_dict is not None:
325
+ if 'subsets' not in model.metadata['system']:
326
+ model.metadata['system']['subsets'] = dict()
327
+ model.metadata['system']['subsets'][subset_name] = subset_filter_dict
328
+
329
+ if subset_annotation_filter_dict is not None:
330
+ if 'annotationsSubsets' not in model.metadata['system']:
331
+ model.metadata['system']['annotationsSubsets'] = dict()
332
+ model.metadata['system']['annotationsSubsets'][subset_name] = subset_annotation_filter_dict
333
+
334
+ model.update(system_metadata=True)
335
+
336
+ @staticmethod
337
+ def delete_subset(model: entities.Model, subset_name: str):
338
+ """
339
+ Removes a subset from a model's metadata (both subsets and annotationsSubsets).
340
+
341
+ :param dtlpy.entities.Model model: the model to which the subset should be added
342
+ :param str subset_name: the name of the subset
343
+
344
+ **Example**
345
+
346
+ .. code-block:: python
347
+
348
+ project.models.add_subset(model=model_entity, subset_name='train', subset_filter=dtlpy.Filters(field='dir', values='/train'))
349
+ model_entity.metadata['system']['subsets']
350
+ {'train': <dtlpy.entities.filters.Filters object at 0x1501dfe20>}
351
+ project.models.delete_subset(model=model_entity, subset_name='train')
352
+ model_entity.metadata['system']['subsets']
353
+ {}
354
+
355
+ """
356
+ # Check if subset exists in subsets (for warning)
357
+ if model.metadata.get("system", dict()).get("subsets", dict()).get(subset_name) is None:
358
+ logger.error(f"Model system metadata incomplete, could not delete subset {subset_name}.")
359
+ else:
360
+ _ = model.metadata['system']['subsets'].pop(subset_name)
361
+
362
+ # Remove from annotationsSubsets if it exists
363
+ if model.metadata.get("system", dict()).get("annotationsSubsets", dict()).get(subset_name) is not None:
364
+ _ = model.metadata['system']['annotationsSubsets'].pop(subset_name)
365
+
366
+ model.update(system_metadata=True)
367
+
368
+ def create(
369
+ self,
370
+ model_name: str,
371
+ dataset_id: str = None,
372
+ labels: list = None,
373
+ ontology_id: str = None,
374
+ description: str = None,
375
+ model_artifacts: List[entities.Artifact] = None,
376
+ project_id=None,
377
+ tags: List[str] = None,
378
+ package: entities.Package = None,
379
+ configuration: dict = None,
380
+ status: str = None,
381
+ scope: entities.EntityScopeLevel = entities.EntityScopeLevel.PROJECT,
382
+ version: str = '1.0.0',
383
+ input_type=None,
384
+ output_type=None,
385
+ train_filter: entities.Filters = None,
386
+ validation_filter: entities.Filters = None,
387
+ annotations_train_filter: entities.Filters = None,
388
+ annotations_validation_filter: entities.Filters = None,
389
+ app: entities.App = None
390
+ ) -> entities.Model:
391
+ """
392
+ Create a Model entity
393
+
394
+ :param str model_name: name of the model
395
+ :param str dataset_id: dataset id
396
+ :param list labels: list of labels from ontology (must mach ontology id) can be a subset
397
+ :param str ontology_id: ontology to connect to the model
398
+ :param str description: description
399
+ :param model_artifacts: optional list of dl.Artifact. Can be ItemArtifact, LocaArtifact or LinkArtifact
400
+ :param str project_id: project that owns the model
401
+ :param list tags: list of string tags
402
+ :param package: optional - Package object
403
+ :param dict configuration: optional - model configuration - dict
404
+ :param str status: `str` of the optional values of
405
+ :param str scope: the scope level of the model dl.EntityScopeLevel
406
+ :param str version: version of the model
407
+ :param str input_type: the file type the model expect as input (image, video, txt, etc)
408
+ :param str output_type: dl.AnnotationType - the type of annotations the model produces (class, box segment, text, etc)
409
+ :param dtlpy.entities.filters.Filters train_filter: Filters entity or a dictionary to define the items' scope in the specified dataset_id for the model train
410
+ :param dtlpy.entities.filters.Filters validation_filter: Filters entity or a dictionary to define the items' scope in the specified dataset_id for the model validation
411
+ :param dtlpy.entities.filters.Filters annotations_train_filter: Filters entity or a dictionary to define the annotations' scope in the specified dataset_id for the model train
412
+ :param dtlpy.entities.filters.Filters annotations_validation_filter: Filters entity or a dictionary to define the annotations' scope in the specified dataset_id for the model validation
413
+ :param dtlpy.entities.App app: App entity to connect the model to
414
+ :return: Model Entity
415
+
416
+ **Example**:
417
+
418
+ .. code-block:: python
419
+
420
+ project.models.create(model_name='model_name', dataset_id='dataset_id', labels=['label1', 'label2'], train_filter={filter: {$and: [{dir: "/10K short videos"}]},page: 0,pageSize: 1000,resource: "items"}})
421
+
422
+ """
423
+
424
+ if ontology_id is not None:
425
+ # take labels from ontology
426
+ ontologies = repositories.Ontologies(client_api=self._client_api)
427
+ labels = [label.tag for label in ontologies.get(ontology_id=ontology_id).labels]
428
+
429
+ if labels is None:
430
+ # dont have to have labels. can use an empty list
431
+ labels = list()
432
+
433
+ if input_type is None:
434
+ input_type = 'image'
435
+
436
+ if output_type is None:
437
+ output_type = entities.AnnotationType.CLASSIFICATION
438
+
439
+ if package is None and self._package is None:
440
+ raise exceptions.PlatformException('Must provide a Package or create from package.models')
441
+ elif package is None:
442
+ package = self._package
443
+
444
+ # TODO need to remove the entire project id user interface - need to take it from dataset id (in BE)
445
+ if project_id is None:
446
+ if self._project is None:
447
+ raise exceptions.PlatformException('Please provide project_id')
448
+ project_id = self._project.id
449
+ else:
450
+ if project_id != self._project_id:
451
+ if (isinstance(package, entities.Package) and not package.is_global) or \
452
+ (isinstance(package, entities.Dpk) and not package.scope != 'public'):
453
+ logger.warning(
454
+ "Note! you are specified project_id {!r} which is different from repository context: {!r}".format(
455
+ project_id, self._project_id))
456
+
457
+ if model_artifacts is None:
458
+ model_artifacts = []
459
+
460
+ if not isinstance(model_artifacts, list):
461
+ raise ValueError('`model_artifacts` must be a list of dl.Artifact entities')
462
+
463
+ # create payload for request
464
+ payload = {
465
+ 'packageId': package.id,
466
+ 'name': model_name,
467
+ 'projectId': project_id,
468
+ 'datasetId': dataset_id,
469
+ 'labels': labels,
470
+ 'artifacts': [artifact.to_json(as_artifact=True) for artifact in model_artifacts],
471
+ 'scope': scope,
472
+ 'version': version,
473
+ 'inputType': input_type,
474
+ 'outputType': output_type,
475
+ }
476
+
477
+ if app is not None:
478
+ if not isinstance(package, entities.Dpk):
479
+ raise ValueError('package must be a Dpk entity')
480
+ if app.dpk_name != package.name or app.dpk_version != package.version:
481
+ raise ValueError('App and package must be the same')
482
+ component_name = None
483
+ compute_config = None
484
+ for model in package.components.models:
485
+ if model['name'] == model_name:
486
+ component_name = model['name']
487
+ compute_config = model.get('computeConfigs', None)
488
+ break
489
+ if component_name is None:
490
+ raise ValueError('Model name not found in package')
491
+ payload['app'] = {
492
+ "id": app.id,
493
+ "componentName": component_name,
494
+ "dpkName": package.name,
495
+ "dpkVersion": package.version
496
+ }
497
+ if compute_config is not None:
498
+ payload['app']['computeConfig'] = compute_config
499
+
500
+ if configuration is not None:
501
+ payload['configuration'] = configuration
502
+
503
+ if tags is not None:
504
+ payload['tags'] = tags
505
+
506
+ if description is not None:
507
+ payload['description'] = description
508
+
509
+ if status is not None:
510
+ payload['status'] = status
511
+
512
+ if train_filter or validation_filter or annotations_train_filter or annotations_validation_filter:
513
+ metadata = Models._build_model_metadata(
514
+ train_filter=train_filter,
515
+ validation_filter=validation_filter,
516
+ annotations_train_filter=annotations_train_filter,
517
+ annotations_validation_filter=annotations_validation_filter
518
+ )
519
+ payload['metadata'] = metadata
520
+
521
+ # request
522
+ success, response = self._client_api.gen_request(req_type='post',
523
+ path='/ml/models',
524
+ json_req=payload)
525
+
526
+ # exception handling
527
+ if not success:
528
+ raise exceptions.PlatformException(response)
529
+
530
+ model = entities.Model.from_json(_json=response.json(),
531
+ client_api=self._client_api,
532
+ project=self._project,
533
+ package=package)
534
+
535
+ return model
536
+
537
+ def clone(self,
538
+ from_model: entities.Model,
539
+ model_name: str,
540
+ dataset: entities.Dataset = None,
541
+ configuration: dict = None,
542
+ status=None,
543
+ scope=None,
544
+ project_id: str = None,
545
+ labels: list = None,
546
+ description: str = None,
547
+ tags: list = None,
548
+ train_filter: entities.Filters = None,
549
+ validation_filter: entities.Filters = None,
550
+ annotations_train_filter: entities.Filters = None,
551
+ annotations_validation_filter: entities.Filters = None,
552
+ wait=True,
553
+ ) -> entities.Model:
554
+ """
555
+ Clones and creates a new model out of existing one
556
+
557
+ :param from_model: existing model to clone from
558
+ :param str model_name: `str` new model name
559
+ :param str dataset: dataset object for the cloned model
560
+ :param dict configuration: `dict` (optional) if passed replaces the current configuration
561
+ :param str status: `str` (optional) set the new status
562
+ :param str scope: `str` (optional) set the new scope. default is "project"
563
+ :param str project_id: `str` specify the project id to create the new model on (if other than the source model)
564
+ :param list labels: `list` of `str` - label of the model
565
+ :param str description: `str` description of the new model
566
+ :param list tags: `list` of `str` - label of the model
567
+ :param dtlpy.entities.filters.Filters train_filter: Filters entity or a dictionary to define the items' scope in the specified dataset_id for the model train
568
+ :param dtlpy.entities.filters.Filters validation_filter: Filters entity or a dictionary to define the items' scope in the specified dataset_id for the model validation
569
+ :param dtlpy.entities.filters.Filters annotations_train_filter: Filters entity or a dictionary to define the annotations' scope in the specified dataset_id for the model train
570
+ :param dtlpy.entities.filters.Filters annotations_validation_filter: Filters entity or a dictionary to define the annotations' scope in the specified dataset_id for the model validation
571
+ :param bool wait: `bool` wait for model to be ready
572
+ :return: dl.Model which is a clone version of the existing model
573
+ """
574
+ from_json = {"name": model_name,
575
+ "packageId": from_model.package_id,
576
+ "configuration": from_model.configuration,
577
+ "outputType": from_model.output_type,
578
+ "inputType": from_model.input_type}
579
+ if project_id is None:
580
+ if dataset is not None:
581
+ # take dataset project
582
+ project_id = dataset.project.id
583
+ else:
584
+ # take model's project
585
+ project_id = self.project.id
586
+ from_json['projectId'] = project_id
587
+ if dataset is not None:
588
+ if labels is None:
589
+ labels = list(dataset.labels_flat_dict.keys())
590
+ from_json['datasetId'] = dataset.id
591
+ if labels is not None:
592
+ from_json['labels'] = labels
593
+ # if there are new labels - pop the mapping from the original
594
+ _ = from_json['configuration'].pop('id_to_label_map', None)
595
+ _ = from_json['configuration'].pop('label_to_id_map', None)
596
+ if configuration is not None:
597
+ from_json['configuration'].update(configuration)
598
+ if description is not None:
599
+ from_json['description'] = description
600
+ if tags is not None:
601
+ from_json['tags'] = tags
602
+ if scope is not None:
603
+ from_json['scope'] = scope
604
+ if status is not None:
605
+ from_json['status'] = status
606
+
607
+ metadata = Models._build_model_metadata(
608
+ train_filter=train_filter,
609
+ validation_filter=validation_filter,
610
+ annotations_train_filter=annotations_train_filter,
611
+ annotations_validation_filter=annotations_validation_filter,
612
+ from_model=from_model
613
+ )
614
+ if metadata['system']['subsets'] or metadata['system']['annotationsSubsets']:
615
+ from_json['metadata'] = metadata
616
+ success, response = self._client_api.gen_request(req_type='post',
617
+ path='/ml/models/{}/clone'.format(from_model.id),
618
+ json_req=from_json)
619
+ if not success:
620
+ raise exceptions.PlatformException(response)
621
+ new_model = entities.Model.from_json(_json=response.json(),
622
+ client_api=self._client_api,
623
+ project=self._project,
624
+ package=from_model._package)
625
+ if wait:
626
+ new_model = self.wait_for_model_ready(model=new_model)
627
+ return new_model
628
+
629
+ def wait_for_model_ready(self, model: entities.Model):
630
+ """
631
+ Wait for model to be ready
632
+
633
+ :param model: Model entity
634
+ """
635
+ sleep_time = MIN_INTERVAL
636
+ while model.status == entities.ModelStatus.CLONING:
637
+ model = self.get(model_id=model.id)
638
+ time.sleep(sleep_time)
639
+ sleep_time = min(sleep_time * BACKOFF_FACTOR, MAX_INTERVAL)
640
+ time.sleep(sleep_time)
641
+ return model
642
+
643
+ @property
644
+ def platform_url(self):
645
+ return self._client_api._get_resource_url("projects/{}/models".format(self.project.id))
646
+
647
+ def open_in_web(self, model=None, model_id=None):
648
+ """
649
+ Open the model in web platform
650
+
651
+ :param model: model entity
652
+ :param str model_id: model id
653
+ """
654
+ if model is not None:
655
+ model.open_in_web()
656
+ elif model_id is not None:
657
+ self._client_api._open_in_web(url=self.platform_url + '/' + str(model_id) + '/main')
658
+ else:
659
+ self._client_api._open_in_web(url=self.platform_url)
660
+
661
+ def delete(self, model: entities.Model = None, model_name=None, model_id=None):
662
+ """
663
+ Delete Model object
664
+
665
+ :param model: Model entity to delete
666
+ :param str model_name: delete by model name
667
+ :param str model_id: delete by model id
668
+ :return: True
669
+ :rtype: bool
670
+ """
671
+ # get id and name
672
+ if model_id is None:
673
+ if model is not None:
674
+ model_id = model.id
675
+ elif model_name is not None:
676
+ model = self.get(model_name=model_name)
677
+ model_id = model.id
678
+ else:
679
+ raise exceptions.PlatformException(error='400',
680
+ message='Must input at least one parameter to models.delete')
681
+
682
+ # request
683
+ success, response = self._client_api.gen_request(
684
+ req_type="delete",
685
+ path="/ml/models/{}".format(model_id)
686
+ )
687
+
688
+ # exception handling
689
+ if not success:
690
+ raise exceptions.PlatformException(response)
691
+
692
+ # return results
693
+ return True
694
+
695
+ def update(self,
696
+ model: entities.Model,
697
+ system_metadata: bool = False,
698
+ reload_services: bool = True
699
+ ) -> entities.Model:
700
+ """
701
+ Update Model changes to platform
702
+
703
+ :param model: Model entity
704
+ :param bool system_metadata: True, if you want to change metadata system
705
+ :param bool reload_services: True, if you want to update services
706
+ :return: Model entity
707
+ """
708
+ # payload
709
+ payload = model.to_json()
710
+
711
+ # url
712
+ url_path = '/ml/models/{}'.format(model.id)
713
+ query_params = {}
714
+ if system_metadata:
715
+ query_params['system'] = 'true'
716
+ if reload_services is not None:
717
+ query_params['reloadServices'] = 'true' if reload_services else 'false'
718
+
719
+ if query_params:
720
+ url_path += '?' + urlencode(query_params)
721
+
722
+ # request
723
+ success, response = self._client_api.gen_request(req_type='patch',
724
+ path=url_path,
725
+ json_req=payload)
726
+
727
+ # exception handling
728
+ if not success:
729
+ raise exceptions.PlatformException(response)
730
+
731
+ # return entity
732
+ return entities.Model.from_json(_json=response.json(),
733
+ client_api=self._client_api,
734
+ project=self._project,
735
+ package=model._package)
736
+
737
+ def train(self, model_id: str, service_config=None):
738
+ """
739
+ Train the model in the cloud. This will create a service and will run the adapter's train function as an execution
740
+
741
+ :param model_id: id of the model to train
742
+ :param dict service_config : Service object as dict. Contains the spec of the default service to create.
743
+ :return:
744
+ """
745
+ payload = dict()
746
+ if service_config is not None:
747
+ payload['serviceConfig'] = service_config
748
+ success, response = self._client_api.gen_request(req_type="post",
749
+ path=f"/ml/models/{model_id}/train",
750
+ json_req=payload)
751
+ if not success:
752
+ raise exceptions.PlatformException(response)
753
+ return entities.Execution.from_json(_json=response.json(),
754
+ client_api=self._client_api,
755
+ project=self._project)
756
+
757
+ def evaluate(self, model_id: str, dataset_id: str, filters: entities.Filters = None, service_config=None):
758
+ """
759
+ Evaluate Model, provide data to evaluate the model on You can also provide specific config for the deployed service
760
+
761
+ :param str model_id: Model id to predict
762
+ :param dict service_config : Service object as dict. Contains the spec of the default service to create.
763
+ :param str dataset_id: ID of the dataset to evaluate
764
+ :param entities.Filters filters: dl.Filter entity to run the predictions on
765
+ :return:
766
+ """
767
+
768
+ payload = {'input': {'datasetId': dataset_id}}
769
+ if service_config is not None:
770
+ payload['config'] = {'serviceConfig': service_config}
771
+ if filters is None:
772
+ filters = entities.Filters()
773
+ if filters is not None:
774
+ payload['input']['datasetQuery'] = filters.prepare()
775
+ success, response = self._client_api.gen_request(req_type="post",
776
+ path=f"/ml/models/{model_id}/evaluate",
777
+ json_req=payload)
778
+ if not success:
779
+ raise exceptions.PlatformException(response)
780
+ return entities.Execution.from_json(_json=response.json(),
781
+ client_api=self._client_api,
782
+ project=self._project)
783
+
784
+ def predict(self, model, item_ids=None, dataset_id=None,filters=None):
785
+ """
786
+ Run model prediction with items
787
+
788
+ :param model: dl.Model entity to run the prediction.
789
+ :param item_ids: a list of item id to run the prediction.
790
+ :param dataset_id: a dataset id to run the prediction.
791
+ :param filters_dict: dict of filters to run the prediction.
792
+ :return:
793
+ """
794
+ if len(model.metadata['system'].get('deploy', {}).get('services', [])) == 0:
795
+ # no services for model
796
+ raise ValueError("Model doesnt have any associated services. Need to deploy before predicting")
797
+ if item_ids is None and dataset_id is None:
798
+ raise ValueError("Need to provide either item_ids or dataset_id")
799
+ if filters is not None and dataset_id is None:
800
+ raise ValueError("If filters are provided, dataset_id is mandatory.")
801
+ payload_input = {}
802
+ if item_ids is not None:
803
+ payload_input['itemIds'] = item_ids
804
+ if dataset_id is not None:
805
+ payload_input['datasetId'] = dataset_id
806
+ if filters is not None:
807
+ payload_input['datasetQuery'] = filters.prepare()['filter']
808
+ payload = {'input': payload_input,
809
+ 'config': {'serviceId': model.metadata['system']['deploy']['services'][0]}}
810
+ logger.debug(f"generate post request to predict with payload {payload}")
811
+ success, response = self._client_api.gen_request(req_type="post",
812
+ path=f"/ml/models/{model.id}/predict",
813
+ json_req=payload)
814
+ if not success:
815
+ logger.error(f"failed to make API request /ml/models/{model.id}/predict with payload {payload} response {response}")
816
+ raise exceptions.PlatformException(response)
817
+ return entities.Execution.from_json(_json=response.json(),
818
+ client_api=self._client_api,
819
+ project=self._project)
820
+
821
+ def embed(self, model, item_ids=None, dataset_id=None, filters=None):
822
+ """
823
+ Run model embed with items
824
+
825
+ :param model: dl.Model entity to run the prediction.
826
+ :param item_ids: a list of item id to run the embed.
827
+ :param dataset_id: a dataset id to run the embed.
828
+ :param filters_dict: dict of filters to run the embed.
829
+ :return: Execution
830
+ :rtype: dtlpy.entities.execution.Execution
831
+ """
832
+ if len(model.metadata['system'].get('deploy', {}).get('services', [])) == 0:
833
+ # no services for model
834
+ raise ValueError("Model doesnt have any associated services. Need to deploy before predicting")
835
+ if item_ids is None and dataset_id is None:
836
+ raise ValueError("Need to provide either item_ids or dataset_id")
837
+ if filters is not None and dataset_id is None:
838
+ raise ValueError("If filters are provided, dataset_id is mandatory.")
839
+ payload_input = {}
840
+ if item_ids is not None:
841
+ payload_input['itemIds'] = item_ids
842
+ if dataset_id is not None:
843
+ payload_input['datasetId'] = dataset_id
844
+ if filters is not None:
845
+ payload_input['datasetQuery'] = filters.prepare()['filter']
846
+ payload = {'input': payload_input,
847
+ 'config': {'serviceId': model.metadata['system']['deploy']['services'][0]}}
848
+ logger.debug(f"generate post request to embed with payload {payload}")
849
+ success, response = self._client_api.gen_request(req_type="post",
850
+ path=f"/ml/models/{model.id}/embed",
851
+ json_req=payload)
852
+ if not success:
853
+ logger.error(f"failed to make API request /ml/models/{model.id}/embed with payload {payload} response {response}")
854
+ raise exceptions.PlatformException(response)
855
+ return entities.Execution.from_json(_json=response.json(),
856
+ client_api=self._client_api,
857
+ project=self._project)
858
+
859
+ def embed_datasets(self, model, dataset_ids, attach_trigger=False):
860
+ """
861
+ Run model embed with datasets
862
+
863
+ :param model: dl.Model entity to run the prediction.
864
+ :param dataset_ids: a list of dataset id to run the embed.
865
+ :param attach_trigger: bool, if True will activate the trigger
866
+ :return:
867
+ """
868
+ if len(model.metadata['system'].get('deploy', {}).get('services', [])) == 0:
869
+ # no services for model
870
+ raise ValueError("Model doesnt have any associated services. Need to deploy before predicting")
871
+ if dataset_ids is None:
872
+ raise ValueError("Need to provide either dataset_id")
873
+ payload = {'datasetIds': dataset_ids,
874
+ 'config': {'serviceId': model.metadata['system']['deploy']['services'][0]},
875
+ 'attachTrigger': attach_trigger
876
+ }
877
+
878
+ success, response = self._client_api.gen_request(req_type="post",
879
+ path=f"/ml/models/{model.id}/embed/datasets",
880
+ json_req=payload)
881
+ if not success:
882
+ raise exceptions.PlatformException(response)
883
+ command = entities.Command.from_json(_json=response.json(),
884
+ client_api=self._client_api)
885
+ command = command.wait()
886
+ return command
887
+
888
+ def deploy(self, model_id: str, service_config=None) -> entities.Service:
889
+ """
890
+ Deploy a trained model. This will create a service that will execute predictions
891
+
892
+ :param model_id: id of the model to deploy
893
+ :param dict service_config : Service object as dict. Contains the spec of the default service to create.
894
+ :return: dl.Service: the deployed service
895
+ """
896
+ payload = dict()
897
+ if service_config is not None:
898
+ payload['serviceConfig'] = service_config if not service_config.get("serviceConfig") else service_config.get("serviceConfig")
899
+ success, response = self._client_api.gen_request(req_type="post",
900
+ path=f"/ml/models/{model_id}/deploy",
901
+ json_req=payload)
902
+ if not success:
903
+ raise exceptions.PlatformException(response)
904
+
905
+ return entities.Service.from_json(_json=response.json(),
906
+ client_api=self._client_api,
907
+ project=self._project,
908
+ package=self._package)
909
+
910
+
911
+ class Metrics:
912
+ def __init__(self, client_api, model=None, model_id=None):
913
+ self._client_api = client_api
914
+ self._model_id = model_id
915
+ self._model = model
916
+
917
+ @property
918
+ def model(self):
919
+ return self._model
920
+
921
+ def create(self, samples, dataset_id) -> bool:
922
+ """
923
+ Add Samples for model analytics and metrics
924
+
925
+ :param samples: list of dl.PlotSample - must contain: model_id, figure, legend, x, y
926
+ :param model_id: model id to save samples on
927
+ :param dataset_id:
928
+ :return: bool: True if success
929
+ """
930
+ if not isinstance(samples, list):
931
+ samples = [samples]
932
+
933
+ payload = list()
934
+ for sample in samples:
935
+ _json = sample.to_json()
936
+ _json['modelId'] = self.model.id
937
+ _json['datasetId'] = dataset_id
938
+ payload.append(_json)
939
+ # request
940
+ success, response = self._client_api.gen_request(req_type='post',
941
+ path='/ml/metrics/publish',
942
+ json_req=payload)
943
+
944
+ # exception handling
945
+ if not success:
946
+ raise exceptions.PlatformException(response)
947
+
948
+ # return entity
949
+ return True
950
+
951
+ def _list(self, filters: entities.Filters):
952
+ # request
953
+ success, response = self._client_api.gen_request(req_type='POST',
954
+ path='/ml/metrics/query',
955
+ json_req=filters.prepare())
956
+ if not success:
957
+ raise exceptions.PlatformException(response)
958
+ return response.json()
959
+
960
+ def _build_entities_from_response(self, response_items) -> miscellaneous.List[entities.Model]:
961
+ jobs = [None for _ in range(len(response_items))]
962
+ pool = self._client_api.thread_pools(pool_name='entity.create')
963
+
964
+ # return triggers list
965
+ for i_service, sample in enumerate(response_items):
966
+ jobs[i_service] = pool.submit(entities.PlotSample,
967
+ **{'x': sample.get('data', dict()).get('x', None),
968
+ 'y': sample.get('data', dict()).get('y', None),
969
+ 'legend': sample.get('legend', ''),
970
+ 'figure': sample.get('figure', '')})
971
+
972
+ # get all results
973
+ results = [j.result() for j in jobs]
974
+ # return good jobs
975
+ return miscellaneous.List(results)
976
+
977
+ def list(self, filters=None) -> entities.PagedEntities:
978
+ """
979
+ List Samples for model analytics and metrics
980
+
981
+ :param filters: dl.Filter query entity
982
+ """
983
+ if filters is None:
984
+ filters = entities.Filters(resource=entities.FiltersResource.METRICS)
985
+ if not isinstance(filters, entities.Filters):
986
+ raise exceptions.PlatformException(error='400',
987
+ message='Unknown filters type: {!r}'.format(type(filters)))
988
+ if filters.resource != entities.FiltersResource.METRICS:
989
+ raise exceptions.PlatformException(
990
+ error='400',
991
+ message='Filters resource must to be FiltersResource.METRICS. Got: {!r}'.format(filters.resource))
992
+ if self._model is not None:
993
+ filters.add(field='modelId', values=self._model.id)
994
+ paged = entities.PagedEntities(items_repository=self,
995
+ filters=filters,
996
+ page_offset=filters.page,
997
+ page_size=filters.page_size,
998
+ client_api=self._client_api)
999
+ paged.get_page()
1000
+ return paged