dtlpy 1.114.17__py3-none-any.whl → 1.116.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (238) hide show
  1. dtlpy/__init__.py +491 -491
  2. dtlpy/__version__.py +1 -1
  3. dtlpy/assets/__init__.py +26 -26
  4. dtlpy/assets/code_server/config.yaml +2 -2
  5. dtlpy/assets/code_server/installation.sh +24 -24
  6. dtlpy/assets/code_server/launch.json +13 -13
  7. dtlpy/assets/code_server/settings.json +2 -2
  8. dtlpy/assets/main.py +53 -53
  9. dtlpy/assets/main_partial.py +18 -18
  10. dtlpy/assets/mock.json +11 -11
  11. dtlpy/assets/model_adapter.py +83 -83
  12. dtlpy/assets/package.json +61 -61
  13. dtlpy/assets/package_catalog.json +29 -29
  14. dtlpy/assets/package_gitignore +307 -307
  15. dtlpy/assets/service_runners/__init__.py +33 -33
  16. dtlpy/assets/service_runners/converter.py +96 -96
  17. dtlpy/assets/service_runners/multi_method.py +49 -49
  18. dtlpy/assets/service_runners/multi_method_annotation.py +54 -54
  19. dtlpy/assets/service_runners/multi_method_dataset.py +55 -55
  20. dtlpy/assets/service_runners/multi_method_item.py +52 -52
  21. dtlpy/assets/service_runners/multi_method_json.py +52 -52
  22. dtlpy/assets/service_runners/single_method.py +37 -37
  23. dtlpy/assets/service_runners/single_method_annotation.py +43 -43
  24. dtlpy/assets/service_runners/single_method_dataset.py +43 -43
  25. dtlpy/assets/service_runners/single_method_item.py +41 -41
  26. dtlpy/assets/service_runners/single_method_json.py +42 -42
  27. dtlpy/assets/service_runners/single_method_multi_input.py +45 -45
  28. dtlpy/assets/voc_annotation_template.xml +23 -23
  29. dtlpy/caches/base_cache.py +32 -32
  30. dtlpy/caches/cache.py +473 -473
  31. dtlpy/caches/dl_cache.py +201 -201
  32. dtlpy/caches/filesystem_cache.py +89 -89
  33. dtlpy/caches/redis_cache.py +84 -84
  34. dtlpy/dlp/__init__.py +20 -20
  35. dtlpy/dlp/cli_utilities.py +367 -367
  36. dtlpy/dlp/command_executor.py +764 -764
  37. dtlpy/dlp/dlp +1 -1
  38. dtlpy/dlp/dlp.bat +1 -1
  39. dtlpy/dlp/dlp.py +128 -128
  40. dtlpy/dlp/parser.py +651 -651
  41. dtlpy/entities/__init__.py +83 -83
  42. dtlpy/entities/analytic.py +347 -311
  43. dtlpy/entities/annotation.py +1879 -1879
  44. dtlpy/entities/annotation_collection.py +699 -699
  45. dtlpy/entities/annotation_definitions/__init__.py +20 -20
  46. dtlpy/entities/annotation_definitions/base_annotation_definition.py +100 -100
  47. dtlpy/entities/annotation_definitions/box.py +195 -195
  48. dtlpy/entities/annotation_definitions/classification.py +67 -67
  49. dtlpy/entities/annotation_definitions/comparison.py +72 -72
  50. dtlpy/entities/annotation_definitions/cube.py +204 -204
  51. dtlpy/entities/annotation_definitions/cube_3d.py +149 -149
  52. dtlpy/entities/annotation_definitions/description.py +32 -32
  53. dtlpy/entities/annotation_definitions/ellipse.py +124 -124
  54. dtlpy/entities/annotation_definitions/free_text.py +62 -62
  55. dtlpy/entities/annotation_definitions/gis.py +69 -69
  56. dtlpy/entities/annotation_definitions/note.py +139 -139
  57. dtlpy/entities/annotation_definitions/point.py +117 -117
  58. dtlpy/entities/annotation_definitions/polygon.py +182 -182
  59. dtlpy/entities/annotation_definitions/polyline.py +111 -111
  60. dtlpy/entities/annotation_definitions/pose.py +92 -92
  61. dtlpy/entities/annotation_definitions/ref_image.py +86 -86
  62. dtlpy/entities/annotation_definitions/segmentation.py +240 -240
  63. dtlpy/entities/annotation_definitions/subtitle.py +34 -34
  64. dtlpy/entities/annotation_definitions/text.py +85 -85
  65. dtlpy/entities/annotation_definitions/undefined_annotation.py +74 -74
  66. dtlpy/entities/app.py +220 -220
  67. dtlpy/entities/app_module.py +107 -107
  68. dtlpy/entities/artifact.py +174 -174
  69. dtlpy/entities/assignment.py +399 -399
  70. dtlpy/entities/base_entity.py +214 -214
  71. dtlpy/entities/bot.py +113 -113
  72. dtlpy/entities/codebase.py +292 -296
  73. dtlpy/entities/collection.py +38 -38
  74. dtlpy/entities/command.py +169 -169
  75. dtlpy/entities/compute.py +449 -442
  76. dtlpy/entities/dataset.py +1299 -1285
  77. dtlpy/entities/directory_tree.py +44 -44
  78. dtlpy/entities/dpk.py +470 -470
  79. dtlpy/entities/driver.py +235 -223
  80. dtlpy/entities/execution.py +397 -397
  81. dtlpy/entities/feature.py +124 -124
  82. dtlpy/entities/feature_set.py +145 -145
  83. dtlpy/entities/filters.py +798 -645
  84. dtlpy/entities/gis_item.py +107 -107
  85. dtlpy/entities/integration.py +184 -184
  86. dtlpy/entities/item.py +959 -953
  87. dtlpy/entities/label.py +123 -123
  88. dtlpy/entities/links.py +85 -85
  89. dtlpy/entities/message.py +175 -175
  90. dtlpy/entities/model.py +684 -684
  91. dtlpy/entities/node.py +1005 -1005
  92. dtlpy/entities/ontology.py +810 -803
  93. dtlpy/entities/organization.py +287 -287
  94. dtlpy/entities/package.py +657 -657
  95. dtlpy/entities/package_defaults.py +5 -5
  96. dtlpy/entities/package_function.py +185 -185
  97. dtlpy/entities/package_module.py +113 -113
  98. dtlpy/entities/package_slot.py +118 -118
  99. dtlpy/entities/paged_entities.py +299 -299
  100. dtlpy/entities/pipeline.py +624 -624
  101. dtlpy/entities/pipeline_execution.py +279 -279
  102. dtlpy/entities/project.py +394 -394
  103. dtlpy/entities/prompt_item.py +505 -499
  104. dtlpy/entities/recipe.py +301 -301
  105. dtlpy/entities/reflect_dict.py +102 -102
  106. dtlpy/entities/resource_execution.py +138 -138
  107. dtlpy/entities/service.py +963 -958
  108. dtlpy/entities/service_driver.py +117 -117
  109. dtlpy/entities/setting.py +294 -294
  110. dtlpy/entities/task.py +495 -495
  111. dtlpy/entities/time_series.py +143 -143
  112. dtlpy/entities/trigger.py +426 -426
  113. dtlpy/entities/user.py +118 -118
  114. dtlpy/entities/webhook.py +124 -124
  115. dtlpy/examples/__init__.py +19 -19
  116. dtlpy/examples/add_labels.py +135 -135
  117. dtlpy/examples/add_metadata_to_item.py +21 -21
  118. dtlpy/examples/annotate_items_using_model.py +65 -65
  119. dtlpy/examples/annotate_video_using_model_and_tracker.py +75 -75
  120. dtlpy/examples/annotations_convert_to_voc.py +9 -9
  121. dtlpy/examples/annotations_convert_to_yolo.py +9 -9
  122. dtlpy/examples/convert_annotation_types.py +51 -51
  123. dtlpy/examples/converter.py +143 -143
  124. dtlpy/examples/copy_annotations.py +22 -22
  125. dtlpy/examples/copy_folder.py +31 -31
  126. dtlpy/examples/create_annotations.py +51 -51
  127. dtlpy/examples/create_video_annotations.py +83 -83
  128. dtlpy/examples/delete_annotations.py +26 -26
  129. dtlpy/examples/filters.py +113 -113
  130. dtlpy/examples/move_item.py +23 -23
  131. dtlpy/examples/play_video_annotation.py +13 -13
  132. dtlpy/examples/show_item_and_mask.py +53 -53
  133. dtlpy/examples/triggers.py +49 -49
  134. dtlpy/examples/upload_batch_of_items.py +20 -20
  135. dtlpy/examples/upload_items_and_custom_format_annotations.py +55 -55
  136. dtlpy/examples/upload_items_with_modalities.py +43 -43
  137. dtlpy/examples/upload_segmentation_annotations_from_mask_image.py +44 -44
  138. dtlpy/examples/upload_yolo_format_annotations.py +70 -70
  139. dtlpy/exceptions.py +125 -125
  140. dtlpy/miscellaneous/__init__.py +20 -20
  141. dtlpy/miscellaneous/dict_differ.py +95 -95
  142. dtlpy/miscellaneous/git_utils.py +217 -217
  143. dtlpy/miscellaneous/json_utils.py +14 -14
  144. dtlpy/miscellaneous/list_print.py +105 -105
  145. dtlpy/miscellaneous/zipping.py +130 -130
  146. dtlpy/ml/__init__.py +20 -20
  147. dtlpy/ml/base_feature_extractor_adapter.py +27 -27
  148. dtlpy/ml/base_model_adapter.py +1257 -1086
  149. dtlpy/ml/metrics.py +461 -461
  150. dtlpy/ml/predictions_utils.py +274 -274
  151. dtlpy/ml/summary_writer.py +57 -57
  152. dtlpy/ml/train_utils.py +60 -60
  153. dtlpy/new_instance.py +252 -252
  154. dtlpy/repositories/__init__.py +56 -56
  155. dtlpy/repositories/analytics.py +85 -85
  156. dtlpy/repositories/annotations.py +916 -916
  157. dtlpy/repositories/apps.py +383 -383
  158. dtlpy/repositories/artifacts.py +452 -452
  159. dtlpy/repositories/assignments.py +599 -599
  160. dtlpy/repositories/bots.py +213 -213
  161. dtlpy/repositories/codebases.py +559 -559
  162. dtlpy/repositories/collections.py +332 -332
  163. dtlpy/repositories/commands.py +152 -158
  164. dtlpy/repositories/compositions.py +61 -61
  165. dtlpy/repositories/computes.py +439 -435
  166. dtlpy/repositories/datasets.py +1504 -1291
  167. dtlpy/repositories/downloader.py +976 -903
  168. dtlpy/repositories/dpks.py +433 -433
  169. dtlpy/repositories/drivers.py +482 -470
  170. dtlpy/repositories/executions.py +815 -817
  171. dtlpy/repositories/feature_sets.py +226 -226
  172. dtlpy/repositories/features.py +255 -238
  173. dtlpy/repositories/integrations.py +484 -484
  174. dtlpy/repositories/items.py +912 -909
  175. dtlpy/repositories/messages.py +94 -94
  176. dtlpy/repositories/models.py +1000 -988
  177. dtlpy/repositories/nodes.py +80 -80
  178. dtlpy/repositories/ontologies.py +511 -511
  179. dtlpy/repositories/organizations.py +525 -525
  180. dtlpy/repositories/packages.py +1941 -1941
  181. dtlpy/repositories/pipeline_executions.py +451 -451
  182. dtlpy/repositories/pipelines.py +640 -640
  183. dtlpy/repositories/projects.py +539 -539
  184. dtlpy/repositories/recipes.py +419 -399
  185. dtlpy/repositories/resource_executions.py +137 -137
  186. dtlpy/repositories/schema.py +120 -120
  187. dtlpy/repositories/service_drivers.py +213 -213
  188. dtlpy/repositories/services.py +1704 -1704
  189. dtlpy/repositories/settings.py +339 -339
  190. dtlpy/repositories/tasks.py +1477 -1477
  191. dtlpy/repositories/times_series.py +278 -278
  192. dtlpy/repositories/triggers.py +536 -536
  193. dtlpy/repositories/upload_element.py +257 -257
  194. dtlpy/repositories/uploader.py +661 -651
  195. dtlpy/repositories/webhooks.py +249 -249
  196. dtlpy/services/__init__.py +22 -22
  197. dtlpy/services/aihttp_retry.py +131 -131
  198. dtlpy/services/api_client.py +1785 -1782
  199. dtlpy/services/api_reference.py +40 -40
  200. dtlpy/services/async_utils.py +133 -133
  201. dtlpy/services/calls_counter.py +44 -44
  202. dtlpy/services/check_sdk.py +68 -68
  203. dtlpy/services/cookie.py +115 -115
  204. dtlpy/services/create_logger.py +156 -156
  205. dtlpy/services/events.py +84 -84
  206. dtlpy/services/logins.py +235 -235
  207. dtlpy/services/reporter.py +256 -256
  208. dtlpy/services/service_defaults.py +91 -91
  209. dtlpy/utilities/__init__.py +20 -20
  210. dtlpy/utilities/annotations/__init__.py +16 -16
  211. dtlpy/utilities/annotations/annotation_converters.py +269 -269
  212. dtlpy/utilities/base_package_runner.py +285 -264
  213. dtlpy/utilities/converter.py +1650 -1650
  214. dtlpy/utilities/dataset_generators/__init__.py +1 -1
  215. dtlpy/utilities/dataset_generators/dataset_generator.py +670 -670
  216. dtlpy/utilities/dataset_generators/dataset_generator_tensorflow.py +23 -23
  217. dtlpy/utilities/dataset_generators/dataset_generator_torch.py +21 -21
  218. dtlpy/utilities/local_development/__init__.py +1 -1
  219. dtlpy/utilities/local_development/local_session.py +179 -179
  220. dtlpy/utilities/reports/__init__.py +2 -2
  221. dtlpy/utilities/reports/figures.py +343 -343
  222. dtlpy/utilities/reports/report.py +71 -71
  223. dtlpy/utilities/videos/__init__.py +17 -17
  224. dtlpy/utilities/videos/video_player.py +598 -598
  225. dtlpy/utilities/videos/videos.py +470 -470
  226. {dtlpy-1.114.17.data → dtlpy-1.116.6.data}/scripts/dlp +1 -1
  227. dtlpy-1.116.6.data/scripts/dlp.bat +2 -0
  228. {dtlpy-1.114.17.data → dtlpy-1.116.6.data}/scripts/dlp.py +128 -128
  229. {dtlpy-1.114.17.dist-info → dtlpy-1.116.6.dist-info}/METADATA +186 -183
  230. dtlpy-1.116.6.dist-info/RECORD +239 -0
  231. {dtlpy-1.114.17.dist-info → dtlpy-1.116.6.dist-info}/WHEEL +1 -1
  232. {dtlpy-1.114.17.dist-info → dtlpy-1.116.6.dist-info}/licenses/LICENSE +200 -200
  233. tests/features/environment.py +551 -551
  234. dtlpy/assets/__pycache__/__init__.cpython-310.pyc +0 -0
  235. dtlpy-1.114.17.data/scripts/dlp.bat +0 -2
  236. dtlpy-1.114.17.dist-info/RECORD +0 -240
  237. {dtlpy-1.114.17.dist-info → dtlpy-1.116.6.dist-info}/entry_points.txt +0 -0
  238. {dtlpy-1.114.17.dist-info → dtlpy-1.116.6.dist-info}/top_level.txt +0 -0
@@ -1,435 +1,439 @@
1
- import base64
2
- import datetime
3
- import json
4
-
5
- from dtlpy import miscellaneous
6
-
7
- from ..services.api_client import ApiClient
8
- from .. import exceptions, entities, repositories
9
- from typing import List, Optional, Dict
10
- from ..entities import ComputeCluster, ComputeContext, ComputeType
11
- from ..entities.integration import IntegrationType
12
- import logging
13
- from urllib.parse import urlparse, urlencode, parse_qs, urlunparse
14
-
15
- logger = logging.getLogger(name='dtlpy')
16
-
17
-
18
- class Computes:
19
-
20
- def __init__(self, client_api: ApiClient):
21
- self._client_api = client_api
22
- self._base_url = '/compute'
23
- self._commands = None
24
- self._projects = None
25
- self._organizations = None
26
- self.log_cache = dict()
27
-
28
- @property
29
- def commands(self) -> repositories.Commands:
30
- if self._commands is None:
31
- self._commands = repositories.Commands(client_api=self._client_api)
32
- return self._commands
33
-
34
- @property
35
- def projects(self):
36
- if self._projects is None:
37
- self._projects = repositories.Projects(client_api=self._client_api)
38
- return self._projects
39
-
40
- @property
41
- def organizations(self):
42
- if self._organizations is None:
43
- self._organizations = repositories.Organizations(client_api=self._client_api)
44
- return self._organizations
45
-
46
- def create(
47
- self,
48
- name: str,
49
- context: entities.ComputeContext,
50
- shared_contexts: Optional[List[entities.ComputeContext]],
51
- cluster: entities.ComputeCluster,
52
- type: entities.ComputeType = entities.ComputeType.KUBERNETES,
53
- is_global: Optional[bool] = False,
54
- features: Optional[Dict] = None,
55
- wait=True,
56
- status: entities.ComputeStatus = None,
57
- settings: entities.ComputeSettings = None,
58
- metadata: dict = None
59
- ):
60
- """
61
- Create a new compute
62
-
63
- :param name: Compute name
64
- :param context: Compute context
65
- :param shared_contexts: Shared contexts
66
- :param cluster: Compute cluster
67
- :param type: Compute type
68
- :param is_global: Is global
69
- :param features: Features
70
- :param wait: Wait for compute creation
71
- :param status: Compute status
72
- :param settings: Compute settings
73
- :param metadata: Compute metadata
74
- :return: Compute
75
- :rtype: dl.entities.compute.Compute
76
- """
77
- if metadata is None:
78
- metadata = {}
79
- shared_contexts_json = []
80
- for shared_context in shared_contexts:
81
- src_json = shared_context.to_json() if isinstance(shared_context, entities.ComputeContext) else shared_context
82
- shared_contexts_json.append(src_json)
83
- payload = {
84
- 'name': name,
85
- 'context': context.to_json(),
86
- 'type': type.value,
87
- 'global': is_global,
88
- 'features': features,
89
- 'sharedContexts': shared_contexts_json,
90
- 'cluster': cluster.to_json(),
91
- 'status': status,
92
- "settings": settings.to_json() if isinstance(settings, entities.ComputeSettings) else settings,
93
- "metadata": metadata
94
- }
95
-
96
- # request
97
- success, response = self._client_api.gen_request(
98
- req_type='post',
99
- path=self._base_url,
100
- json_req=payload
101
- )
102
-
103
- if not success:
104
- raise exceptions.PlatformException(response)
105
-
106
- compute = self._build_compute_by_type(response.json())
107
-
108
- if wait:
109
- command_id = compute.metadata.get('system', {}).get('commands', {}).get('create', None)
110
- if command_id is not None:
111
- command = self.commands.get(command_id=command_id, url='api/v1/commands/faas/{}'.format(command_id))
112
- try:
113
- callback = self.__get_log_compute_progress_callback(compute.id)
114
- command.wait(iteration_callback=callback)
115
- callback()
116
- except Exception as e:
117
- self.log_cache.pop(compute.id, None)
118
- raise e
119
- compute = self.get(compute_id=compute.id)
120
-
121
- return compute
122
-
123
- def _build_compute_by_type(self, _json):
124
- if _json.get('type') == 'kubernetes':
125
- compute = entities.KubernetesCompute.from_json(
126
- _json=_json,
127
- client_api=self._client_api
128
- )
129
- else:
130
- compute = entities.Compute.from_json(
131
- _json=_json,
132
- client_api=self._client_api
133
- )
134
- return compute
135
-
136
- def __get_log_compute_progress_callback(self, compute_id: str, is_destroy=False):
137
- def func():
138
- compute = self.get(compute_id=compute_id, archived=True)
139
- log_type = 'bootstrap'
140
- validation_progress = None
141
- validation_logs = None
142
- if is_destroy is True:
143
- log_type = 'destroy'
144
- else:
145
- validation_progress = compute.metadata.get('system', {}).get('validation', {}).get('progress', None)
146
- validation_logs = compute.metadata.get('system', {}).get('validation', {}).get('logs', None)
147
- bootstrap_progress = compute.metadata.get('system', {}).get(log_type, {}).get('progress', None)
148
- bootstrap_logs = compute.metadata.get('system', {}).get(log_type, {}).get('logs', None)
149
-
150
- if bootstrap_progress is not None:
151
- if 'bootstrap' not in self.log_cache.get(compute_id, {}):
152
- logger.info(f"{log_type} in progress:")
153
- last_index = len(self.log_cache.get(compute_id, {}).get(log_type, []))
154
- new_logs = bootstrap_logs[last_index:]
155
- if new_logs:
156
- for log in new_logs:
157
- logger.info(log)
158
- logger.info(f'{log_type} progress: {int(bootstrap_progress)}%')
159
- if compute_id not in self.log_cache:
160
- self.log_cache[compute_id] = {}
161
- self.log_cache[compute_id][log_type] = bootstrap_logs
162
- if bootstrap_progress in [100, None] and validation_progress is not None:
163
- if 'validation' not in self.log_cache.get(compute_id, {}):
164
- logger.info(f"Validating created compute:")
165
- last_index = len(self.log_cache.get(compute_id, {}).get('validation', []))
166
- new_logs = validation_logs[last_index:]
167
- if new_logs:
168
- for log in new_logs:
169
- logger.info(log)
170
- logger.info(f'Validation progress: {int(validation_progress)}%')
171
- if compute_id not in self.log_cache:
172
- self.log_cache[compute_id] = {}
173
- self.log_cache[compute_id]['validation'] = validation_logs
174
- return func
175
-
176
- def get(self, compute_id: str, archived = False):
177
- """
178
- Get a compute
179
-
180
- :param compute_id: Compute ID
181
- :param archived: Archived
182
- :return: Compute
183
- :rtype: dl.entities.compute.Compute
184
- """
185
- url_path = self._base_url + '/{}'.format(compute_id)
186
- params_to_add = {"archived": "true" if archived else "false" }
187
- parsed_url = urlparse(url_path)
188
- query_dict = parse_qs(parsed_url.query)
189
- query_dict.update(params_to_add)
190
- new_query = urlencode(query_dict, doseq=True)
191
- url_path = urlunparse(parsed_url._replace(query=new_query))
192
- # request
193
- success, response = self._client_api.gen_request(
194
- req_type='get',
195
- path=url_path
196
- )
197
-
198
- if not success:
199
- raise exceptions.PlatformException(response)
200
-
201
- compute = self._build_compute_by_type(response.json())
202
-
203
- return compute
204
-
205
- def update(self, compute: entities.Compute):
206
- """
207
- Update a compute
208
-
209
- :param compute: Compute
210
- :return: Compute
211
- :rtype: dl.entities.compute.Compute
212
- """
213
-
214
- # request
215
- success, response = self._client_api.gen_request(
216
- req_type='patch',
217
- path=self._base_url + '/{}'.format(compute.id),
218
- json_req=compute.to_json()
219
- )
220
-
221
- if not success:
222
- raise exceptions.PlatformException(response)
223
-
224
- compute = self._build_compute_by_type(response.json())
225
-
226
- return compute
227
-
228
- def delete(self, compute_id: str, skip_destroy: bool = False, wait: bool = True):
229
- """
230
- Delete a compute
231
-
232
- :param compute_id: compute ID
233
- :param skip_destroy: bool
234
- :param bool wait: Wait for deletion
235
- """
236
- url_path = self._base_url + '/{}'.format(compute_id)
237
- params_to_add = {"skipDestroy": "true" if skip_destroy else "false" }
238
- parsed_url = urlparse(url_path)
239
- query_dict = parse_qs(parsed_url.query)
240
- query_dict.update(params_to_add)
241
- new_query = urlencode(query_dict, doseq=True)
242
- url_path = urlunparse(parsed_url._replace(query=new_query))
243
- # request
244
- success, response = self._client_api.gen_request(
245
- req_type='delete',
246
- path=url_path
247
- )
248
-
249
- if not success:
250
- raise exceptions.PlatformException(response)
251
- if skip_destroy is not True and wait is True:
252
- command_response = response.json()
253
- command_id = command_response['id']
254
- command = self.commands.get(command_id, url='api/v1/commands/faas/{}'.format(command_id))
255
- try:
256
- callback = self.__get_log_compute_progress_callback(compute_id, is_destroy=True)
257
- command.wait(iteration_callback=callback)
258
- callback()
259
- except Exception as e:
260
- self.log_cache.pop(command_id, None)
261
- raise e
262
-
263
- return True
264
-
265
- def validate(self, compute_id: str, wait: bool = True):
266
- """
267
- Validate a compute
268
-
269
- :param str compute_id: Compute ID
270
- :param bool wait: Wait for validation
271
- :return: Compute
272
- :rtype: dl.entities.compute.Compute
273
- """
274
-
275
- # request
276
- success, response = self._client_api.gen_request(
277
- req_type='post',
278
- path=self._base_url + '/{}/validate'.format(compute_id)
279
- )
280
-
281
- if not success:
282
- raise exceptions.PlatformException(response)
283
-
284
- compute = self._build_compute_by_type(response.json())
285
-
286
- if wait:
287
- command_id = compute.metadata.get('system', {}).get('commands', {}).get('validate', None)
288
- if command_id is not None:
289
- command = self.commands.get(command_id=command_id, url='api/v1/commands/faas/{}'.format(command_id))
290
- try:
291
- callback = self.__get_log_compute_progress_callback(compute.id)
292
- command.wait(iteration_callback=callback)
293
- callback()
294
- except Exception as e:
295
- self.log_cache.pop(compute.id, None)
296
- raise e
297
- compute = self.get(compute_id=compute.id)
298
-
299
- return compute
300
-
301
- def list_global(self):
302
- """
303
- List computes
304
-
305
- :return: List of computes
306
- :rtype: list[str]
307
- """
308
-
309
- # request
310
- success, response = self._client_api.gen_request(
311
- req_type='get',
312
- path=self._base_url + '/globals',
313
- )
314
-
315
- if not success:
316
- raise exceptions.PlatformException(response)
317
-
318
-
319
- return response.json()
320
-
321
- @staticmethod
322
- def read_file(file_path):
323
- try:
324
- with open(file_path, 'r') as file:
325
- content = file.read()
326
- return content
327
- except FileNotFoundError:
328
- print(f"The file at {file_path} was not found.")
329
- except IOError:
330
- print(f"An error occurred while reading the file at {file_path}.")
331
-
332
- def decode_and_parse_input(self, file_path):
333
- """Decode a base64 encoded string from file a and parse it as JSON."""
334
- decoded_bytes = base64.b64decode(self.read_file(file_path))
335
- return json.loads(decoded_bytes)
336
-
337
- @staticmethod
338
- def create_integration(org, name, auth_data):
339
- """Create a new key-value integration within the specified project."""
340
- return org.integrations.create(
341
- integrations_type=IntegrationType.KEY_VALUE,
342
- name=name,
343
- options={
344
- 'key': name,
345
- 'value': json.dumps(auth_data)
346
- }
347
- )
348
-
349
- def setup_compute_cluster(self, config, integration, org_id, project=None):
350
- """Set up a compute cluster using the provided configuration and integration."""
351
- cluster = ComputeCluster.from_setup_json(config, integration)
352
- project_id = None
353
- if project is not None:
354
- project_id = project.id
355
- compute = self.create(
356
- config['config']['name'],
357
- ComputeContext([], org_id, project_id),
358
- [],
359
- cluster,
360
- ComputeType.KUBERNETES,
361
- status=config['config'].get('status', None),
362
- settings=config['config'].get('settings', None),
363
- metadata=config['config'].get('metadata', None))
364
-
365
- return compute
366
-
367
- def create_from_config_file(self, config_file_path, org_id, project_name: Optional[str] = None):
368
- config = self.decode_and_parse_input(config_file_path)
369
- project = None
370
- if project_name is not None:
371
- project = self.projects.get(project_name=project_name)
372
- org = self.organizations.get(organization_id=org_id)
373
- integration_name = ('cluster_integration_test_' + datetime.datetime.now().isoformat().split('.')[0]
374
- .replace(':', '_'))
375
- integration = self.create_integration(org, integration_name, config['authentication'])
376
- compute = self.setup_compute_cluster(config, integration, org_id, project)
377
- return compute
378
-
379
-
380
- def _list(self, filters: entities.Filters):
381
- url = self._base_url + '/query'
382
- success, response = self._client_api.gen_request(req_type='POST',
383
- path=url,
384
- json_req=filters.prepare())
385
- if not success:
386
- raise exceptions.PlatformException(response)
387
-
388
- return response.json()
389
-
390
- def _build_entities_from_response(self, response_items) -> miscellaneous.List[entities.Compute]:
391
- pool = self._client_api.thread_pools(pool_name='entity.create')
392
- jobs = [None for _ in range(len(response_items))]
393
- for i_item, item in enumerate(response_items):
394
- jobs[i_item] = pool.submit(entities.Compute._protected_from_json,
395
- **{'client_api': self._client_api,
396
- '_json': item})
397
- results = [j.result() for j in jobs]
398
- _ = [logger.warning(r[1]) for r in results if r[0] is False]
399
- items = miscellaneous.List([r[1] for r in results if r[0] is True])
400
- return items
401
-
402
- def list(self, filters: entities.Filters = None) -> entities.PagedEntities:
403
- """
404
- List all services drivers
405
-
406
- :param dtlpy.entities.filters.Filters filters: Filters entity or a dictionary containing filters parameters
407
- :return: Paged entity
408
- :rtype: dtlpy.entities.paged_entities.PagedEntities
409
-
410
- **Example**:
411
-
412
- .. code-block:: python
413
-
414
- services = dl.service_drivers.list()
415
- """
416
- # default filters
417
- if filters is None:
418
- filters = entities.Filters(resource=entities.FiltersResource.COMPUTE)
419
-
420
- if filters.resource != entities.FiltersResource.COMPUTE:
421
- raise exceptions.PlatformException(
422
- error='400',
423
- message='Filters resource must to be FiltersResource.COMPUTE. Got: {!r}'.format(
424
- filters.resource))
425
-
426
- if not isinstance(filters, entities.Filters):
427
- raise exceptions.PlatformException('400', 'Unknown filters type')
428
-
429
- paged = entities.PagedEntities(items_repository=self,
430
- filters=filters,
431
- page_offset=filters.page,
432
- page_size=filters.page_size,
433
- client_api=self._client_api)
434
- paged.get_page()
435
- return paged
1
+ import base64
2
+ import datetime
3
+ import json
4
+
5
+ from dtlpy import miscellaneous
6
+
7
+ from ..services.api_client import ApiClient
8
+ from .. import exceptions, entities, repositories
9
+ from typing import List, Optional, Dict
10
+ from ..entities import ComputeCluster, ComputeContext, ComputeType
11
+ from ..entities.integration import IntegrationType
12
+ import logging
13
+ from urllib.parse import urlparse, urlencode, parse_qs, urlunparse
14
+
15
+ logger = logging.getLogger(name='dtlpy')
16
+
17
+
18
+ class Computes:
19
+
20
+ def __init__(self, client_api: ApiClient):
21
+ self._client_api = client_api
22
+ self._base_url = '/compute'
23
+ self._commands = None
24
+ self._projects = None
25
+ self._organizations = None
26
+ self.log_cache = dict()
27
+
28
+ @property
29
+ def commands(self) -> repositories.Commands:
30
+ if self._commands is None:
31
+ self._commands = repositories.Commands(client_api=self._client_api)
32
+ return self._commands
33
+
34
+ @property
35
+ def projects(self):
36
+ if self._projects is None:
37
+ self._projects = repositories.Projects(client_api=self._client_api)
38
+ return self._projects
39
+
40
+ @property
41
+ def organizations(self):
42
+ if self._organizations is None:
43
+ self._organizations = repositories.Organizations(client_api=self._client_api)
44
+ return self._organizations
45
+
46
+ def create(
47
+ self,
48
+ name: str,
49
+ context: entities.ComputeContext,
50
+ shared_contexts: Optional[List[entities.ComputeContext]],
51
+ cluster: entities.ComputeCluster,
52
+ type: entities.ComputeType = entities.ComputeType.KUBERNETES,
53
+ is_global: Optional[bool] = False,
54
+ features: Optional[Dict] = None,
55
+ wait=True,
56
+ status: entities.ComputeStatus = None,
57
+ settings: entities.ComputeSettings = None,
58
+ metadata: dict = None,
59
+ deployment_configuration: dict = None
60
+ ):
61
+ """
62
+ Create a new compute
63
+
64
+ :param name: Compute name
65
+ :param context: Compute context
66
+ :param shared_contexts: Shared contexts
67
+ :param cluster: Compute cluster
68
+ :param type: Compute type
69
+ :param is_global: Is global
70
+ :param features: Features
71
+ :param wait: Wait for compute creation
72
+ :param status: Compute status
73
+ :param settings: Compute settings
74
+ :param metadata: Compute metadata
75
+ :param deployment_configuration: Compute deployment Configuration
76
+ :return: Compute
77
+ :rtype: dl.entities.compute.Compute
78
+ """
79
+ if metadata is None:
80
+ metadata = {}
81
+ shared_contexts_json = []
82
+ for shared_context in shared_contexts:
83
+ src_json = shared_context.to_json() if isinstance(shared_context,
84
+ entities.ComputeContext) else shared_context
85
+ shared_contexts_json.append(src_json)
86
+ payload = {
87
+ 'name': name,
88
+ 'context': context.to_json(),
89
+ 'type': type.value,
90
+ 'global': is_global,
91
+ 'features': features,
92
+ 'sharedContexts': shared_contexts_json,
93
+ 'cluster': cluster.to_json(),
94
+ 'status': status,
95
+ "settings": settings.to_json() if isinstance(settings, entities.ComputeSettings) else settings,
96
+ "metadata": metadata,
97
+ "deploymentConfiguration": deployment_configuration
98
+ }
99
+
100
+ # request
101
+ success, response = self._client_api.gen_request(
102
+ req_type='post',
103
+ path=self._base_url,
104
+ json_req=payload
105
+ )
106
+
107
+ if not success:
108
+ raise exceptions.PlatformException(response)
109
+
110
+ compute = self._build_compute_by_type(response.json())
111
+
112
+ if wait:
113
+ command_id = compute.metadata.get('system', {}).get('commands', {}).get('create', None)
114
+ if command_id is not None:
115
+ command = self.commands.get(command_id=command_id, url='api/v1/commands/faas/{}'.format(command_id))
116
+ try:
117
+ callback = self.__get_log_compute_progress_callback(compute.id)
118
+ command.wait(iteration_callback=callback)
119
+ callback()
120
+ except Exception as e:
121
+ self.log_cache.pop(compute.id, None)
122
+ raise e
123
+ compute = self.get(compute_id=compute.id)
124
+
125
+ return compute
126
+
127
+ def _build_compute_by_type(self, _json):
128
+ if _json.get('type') == 'kubernetes':
129
+ compute = entities.KubernetesCompute.from_json(
130
+ _json=_json,
131
+ client_api=self._client_api
132
+ )
133
+ else:
134
+ compute = entities.Compute.from_json(
135
+ _json=_json,
136
+ client_api=self._client_api
137
+ )
138
+ return compute
139
+
140
+ def __get_log_compute_progress_callback(self, compute_id: str, is_destroy=False):
141
+ def func():
142
+ compute = self.get(compute_id=compute_id, archived=True)
143
+ log_type = 'bootstrap'
144
+ validation_progress = None
145
+ validation_logs = None
146
+ if is_destroy is True:
147
+ log_type = 'destroy'
148
+ else:
149
+ validation_progress = compute.metadata.get('system', {}).get('validation', {}).get('progress', None)
150
+ validation_logs = compute.metadata.get('system', {}).get('validation', {}).get('logs', None)
151
+ bootstrap_progress = compute.metadata.get('system', {}).get(log_type, {}).get('progress', None)
152
+ bootstrap_logs = compute.metadata.get('system', {}).get(log_type, {}).get('logs', None)
153
+
154
+ if bootstrap_progress is not None:
155
+ if 'bootstrap' not in self.log_cache.get(compute_id, {}):
156
+ logger.info(f"{log_type} in progress:")
157
+ last_index = len(self.log_cache.get(compute_id, {}).get(log_type, []))
158
+ new_logs = bootstrap_logs[last_index:]
159
+ if new_logs:
160
+ for log in new_logs:
161
+ logger.info(log)
162
+ logger.info(f'{log_type} progress: {int(bootstrap_progress)}%')
163
+ if compute_id not in self.log_cache:
164
+ self.log_cache[compute_id] = {}
165
+ self.log_cache[compute_id][log_type] = bootstrap_logs
166
+ if bootstrap_progress in [100, None] and validation_progress is not None:
167
+ if 'validation' not in self.log_cache.get(compute_id, {}):
168
+ logger.info(f"Validating created compute:")
169
+ last_index = len(self.log_cache.get(compute_id, {}).get('validation', []))
170
+ new_logs = validation_logs[last_index:]
171
+ if new_logs:
172
+ for log in new_logs:
173
+ logger.info(log)
174
+ logger.info(f'Validation progress: {int(validation_progress)}%')
175
+ if compute_id not in self.log_cache:
176
+ self.log_cache[compute_id] = {}
177
+ self.log_cache[compute_id]['validation'] = validation_logs
178
+
179
+ return func
180
+
181
+ def get(self, compute_id: str, archived=False):
182
+ """
183
+ Get a compute
184
+
185
+ :param compute_id: Compute ID
186
+ :param archived: Archived
187
+ :return: Compute
188
+ :rtype: dl.entities.compute.Compute
189
+ """
190
+ url_path = self._base_url + '/{}'.format(compute_id)
191
+ params_to_add = {"archived": "true" if archived else "false"}
192
+ parsed_url = urlparse(url_path)
193
+ query_dict = parse_qs(parsed_url.query)
194
+ query_dict.update(params_to_add)
195
+ new_query = urlencode(query_dict, doseq=True)
196
+ url_path = urlunparse(parsed_url._replace(query=new_query))
197
+ # request
198
+ success, response = self._client_api.gen_request(
199
+ req_type='get',
200
+ path=url_path
201
+ )
202
+
203
+ if not success:
204
+ raise exceptions.PlatformException(response)
205
+
206
+ compute = self._build_compute_by_type(response.json())
207
+
208
+ return compute
209
+
210
+ def update(self, compute: entities.Compute):
211
+ """
212
+ Update a compute
213
+
214
+ :param compute: Compute
215
+ :return: Compute
216
+ :rtype: dl.entities.compute.Compute
217
+ """
218
+
219
+ # request
220
+ success, response = self._client_api.gen_request(
221
+ req_type='patch',
222
+ path=self._base_url + '/{}'.format(compute.id),
223
+ json_req=compute.to_json()
224
+ )
225
+
226
+ if not success:
227
+ raise exceptions.PlatformException(response)
228
+
229
+ compute = self._build_compute_by_type(response.json())
230
+
231
+ return compute
232
+
233
+ def delete(self, compute_id: str, skip_destroy: bool = False, wait: bool = True):
234
+ """
235
+ Delete a compute
236
+
237
+ :param compute_id: compute ID
238
+ :param skip_destroy: bool
239
+ :param bool wait: Wait for deletion
240
+ """
241
+ url_path = self._base_url + '/{}'.format(compute_id)
242
+ params_to_add = {"skipDestroy": "true" if skip_destroy else "false"}
243
+ parsed_url = urlparse(url_path)
244
+ query_dict = parse_qs(parsed_url.query)
245
+ query_dict.update(params_to_add)
246
+ new_query = urlencode(query_dict, doseq=True)
247
+ url_path = urlunparse(parsed_url._replace(query=new_query))
248
+ # request
249
+ success, response = self._client_api.gen_request(
250
+ req_type='delete',
251
+ path=url_path
252
+ )
253
+
254
+ if not success:
255
+ raise exceptions.PlatformException(response)
256
+ if skip_destroy is not True and wait is True:
257
+ command_response = response.json()
258
+ command_id = command_response['id']
259
+ command = self.commands.get(command_id, url='api/v1/commands/faas/{}'.format(command_id))
260
+ try:
261
+ callback = self.__get_log_compute_progress_callback(compute_id, is_destroy=True)
262
+ command.wait(iteration_callback=callback)
263
+ callback()
264
+ except Exception as e:
265
+ self.log_cache.pop(command_id, None)
266
+ raise e
267
+
268
+ return True
269
+
270
+ def validate(self, compute_id: str, wait: bool = True):
271
+ """
272
+ Validate a compute
273
+
274
+ :param str compute_id: Compute ID
275
+ :param bool wait: Wait for validation
276
+ :return: Compute
277
+ :rtype: dl.entities.compute.Compute
278
+ """
279
+
280
+ # request
281
+ success, response = self._client_api.gen_request(
282
+ req_type='post',
283
+ path=self._base_url + '/{}/validate'.format(compute_id)
284
+ )
285
+
286
+ if not success:
287
+ raise exceptions.PlatformException(response)
288
+
289
+ compute = self._build_compute_by_type(response.json())
290
+
291
+ if wait:
292
+ command_id = compute.metadata.get('system', {}).get('commands', {}).get('validate', None)
293
+ if command_id is not None:
294
+ command = self.commands.get(command_id=command_id, url='api/v1/commands/faas/{}'.format(command_id))
295
+ try:
296
+ callback = self.__get_log_compute_progress_callback(compute.id)
297
+ command.wait(iteration_callback=callback)
298
+ callback()
299
+ except Exception as e:
300
+ self.log_cache.pop(compute.id, None)
301
+ raise e
302
+ compute = self.get(compute_id=compute.id)
303
+
304
+ return compute
305
+
306
+ def list_global(self):
307
+ """
308
+ List computes
309
+
310
+ :return: List of computes
311
+ :rtype: list[str]
312
+ """
313
+
314
+ # request
315
+ success, response = self._client_api.gen_request(
316
+ req_type='get',
317
+ path=self._base_url + '/globals',
318
+ )
319
+
320
+ if not success:
321
+ raise exceptions.PlatformException(response)
322
+
323
+ return response.json()
324
+
325
+ @staticmethod
326
+ def read_file(file_path):
327
+ try:
328
+ with open(file_path, 'r') as file:
329
+ content = file.read()
330
+ return content
331
+ except FileNotFoundError:
332
+ print(f"The file at {file_path} was not found.")
333
+ except IOError:
334
+ print(f"An error occurred while reading the file at {file_path}.")
335
+
336
+ def decode_and_parse_input(self, file_path):
337
+ """Decode a base64 encoded string from file a and parse it as JSON."""
338
+ decoded_bytes = base64.b64decode(self.read_file(file_path))
339
+ return json.loads(decoded_bytes)
340
+
341
+ @staticmethod
342
+ def create_integration(org, name, auth_data):
343
+ """Create a new key-value integration within the specified project."""
344
+ return org.integrations.create(
345
+ integrations_type=IntegrationType.KEY_VALUE,
346
+ name=name,
347
+ options={
348
+ 'key': name,
349
+ 'value': json.dumps(auth_data)
350
+ }
351
+ )
352
+
353
+ def setup_compute_cluster(self, config, integration, org_id, project=None, is_global=False):
354
+ """Set up a compute cluster using the provided configuration and integration."""
355
+ cluster = ComputeCluster.from_setup_json(config, integration)
356
+ project_id = None
357
+ if project is not None:
358
+ project_id = project.id
359
+ compute = self.create(
360
+ config['config']['name'],
361
+ ComputeContext([], org_id, project_id),
362
+ [],
363
+ cluster,
364
+ ComputeType.KUBERNETES,
365
+ status=config['config'].get('status', None),
366
+ settings=config['config'].get('settings', None),
367
+ deployment_configuration=config['config'].get('deploymentConfiguration', {}),
368
+ metadata=config['config'].get('metadata', None), is_global=is_global)
369
+
370
+ return compute
371
+
372
+ def create_from_config_file(self, config_file_path, org_id, project_name: Optional[str] = None, is_global=False):
373
+ config = self.decode_and_parse_input(config_file_path)
374
+ project = None
375
+ if project_name is not None:
376
+ project = self.projects.get(project_name=project_name)
377
+ org = self.organizations.get(organization_id=org_id)
378
+ integration_name = ('cluster_integration_test_' + datetime.datetime.now().isoformat().split('.')[0]
379
+ .replace(':', '_'))
380
+ integration = self.create_integration(org, integration_name, config['authentication'])
381
+ compute = self.setup_compute_cluster(config, integration, org_id, project, is_global=is_global)
382
+ return compute
383
+
384
+ def _list(self, filters: entities.Filters):
385
+ url = self._base_url + '/query'
386
+ success, response = self._client_api.gen_request(req_type='POST',
387
+ path=url,
388
+ json_req=filters.prepare())
389
+ if not success:
390
+ raise exceptions.PlatformException(response)
391
+
392
+ return response.json()
393
+
394
+ def _build_entities_from_response(self, response_items) -> miscellaneous.List[entities.Compute]:
395
+ pool = self._client_api.thread_pools(pool_name='entity.create')
396
+ jobs = [None for _ in range(len(response_items))]
397
+ for i_item, item in enumerate(response_items):
398
+ jobs[i_item] = pool.submit(entities.Compute._protected_from_json,
399
+ **{'client_api': self._client_api,
400
+ '_json': item})
401
+ results = [j.result() for j in jobs]
402
+ _ = [logger.warning(r[1]) for r in results if r[0] is False]
403
+ items = miscellaneous.List([r[1] for r in results if r[0] is True])
404
+ return items
405
+
406
+ def list(self, filters: entities.Filters = None) -> entities.PagedEntities:
407
+ """
408
+ List all services drivers
409
+
410
+ :param dtlpy.entities.filters.Filters filters: Filters entity or a dictionary containing filters parameters
411
+ :return: Paged entity
412
+ :rtype: dtlpy.entities.paged_entities.PagedEntities
413
+
414
+ **Example**:
415
+
416
+ .. code-block:: python
417
+
418
+ services = dl.service_drivers.list()
419
+ """
420
+ # default filters
421
+ if filters is None:
422
+ filters = entities.Filters(resource=entities.FiltersResource.COMPUTE)
423
+
424
+ if filters.resource != entities.FiltersResource.COMPUTE:
425
+ raise exceptions.PlatformException(
426
+ error='400',
427
+ message='Filters resource must to be FiltersResource.COMPUTE. Got: {!r}'.format(
428
+ filters.resource))
429
+
430
+ if not isinstance(filters, entities.Filters):
431
+ raise exceptions.PlatformException('400', 'Unknown filters type')
432
+
433
+ paged = entities.PagedEntities(items_repository=self,
434
+ filters=filters,
435
+ page_offset=filters.page,
436
+ page_size=filters.page_size,
437
+ client_api=self._client_api)
438
+ paged.get_page()
439
+ return paged