dtlpy 1.113.10__py3-none-any.whl → 1.114.13__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (243) hide show
  1. dtlpy/__init__.py +488 -488
  2. dtlpy/__version__.py +1 -1
  3. dtlpy/assets/__init__.py +26 -26
  4. dtlpy/assets/__pycache__/__init__.cpython-38.pyc +0 -0
  5. dtlpy/assets/code_server/config.yaml +2 -2
  6. dtlpy/assets/code_server/installation.sh +24 -24
  7. dtlpy/assets/code_server/launch.json +13 -13
  8. dtlpy/assets/code_server/settings.json +2 -2
  9. dtlpy/assets/main.py +53 -53
  10. dtlpy/assets/main_partial.py +18 -18
  11. dtlpy/assets/mock.json +11 -11
  12. dtlpy/assets/model_adapter.py +83 -83
  13. dtlpy/assets/package.json +61 -61
  14. dtlpy/assets/package_catalog.json +29 -29
  15. dtlpy/assets/package_gitignore +307 -307
  16. dtlpy/assets/service_runners/__init__.py +33 -33
  17. dtlpy/assets/service_runners/converter.py +96 -96
  18. dtlpy/assets/service_runners/multi_method.py +49 -49
  19. dtlpy/assets/service_runners/multi_method_annotation.py +54 -54
  20. dtlpy/assets/service_runners/multi_method_dataset.py +55 -55
  21. dtlpy/assets/service_runners/multi_method_item.py +52 -52
  22. dtlpy/assets/service_runners/multi_method_json.py +52 -52
  23. dtlpy/assets/service_runners/single_method.py +37 -37
  24. dtlpy/assets/service_runners/single_method_annotation.py +43 -43
  25. dtlpy/assets/service_runners/single_method_dataset.py +43 -43
  26. dtlpy/assets/service_runners/single_method_item.py +41 -41
  27. dtlpy/assets/service_runners/single_method_json.py +42 -42
  28. dtlpy/assets/service_runners/single_method_multi_input.py +45 -45
  29. dtlpy/assets/voc_annotation_template.xml +23 -23
  30. dtlpy/caches/base_cache.py +32 -32
  31. dtlpy/caches/cache.py +473 -473
  32. dtlpy/caches/dl_cache.py +201 -201
  33. dtlpy/caches/filesystem_cache.py +89 -89
  34. dtlpy/caches/redis_cache.py +84 -84
  35. dtlpy/dlp/__init__.py +20 -20
  36. dtlpy/dlp/cli_utilities.py +367 -367
  37. dtlpy/dlp/command_executor.py +764 -764
  38. dtlpy/dlp/dlp +1 -1
  39. dtlpy/dlp/dlp.bat +1 -1
  40. dtlpy/dlp/dlp.py +128 -128
  41. dtlpy/dlp/parser.py +651 -651
  42. dtlpy/entities/__init__.py +83 -83
  43. dtlpy/entities/analytic.py +311 -311
  44. dtlpy/entities/annotation.py +1879 -1879
  45. dtlpy/entities/annotation_collection.py +699 -699
  46. dtlpy/entities/annotation_definitions/__init__.py +20 -20
  47. dtlpy/entities/annotation_definitions/base_annotation_definition.py +100 -100
  48. dtlpy/entities/annotation_definitions/box.py +195 -195
  49. dtlpy/entities/annotation_definitions/classification.py +67 -67
  50. dtlpy/entities/annotation_definitions/comparison.py +72 -72
  51. dtlpy/entities/annotation_definitions/cube.py +204 -204
  52. dtlpy/entities/annotation_definitions/cube_3d.py +149 -149
  53. dtlpy/entities/annotation_definitions/description.py +32 -32
  54. dtlpy/entities/annotation_definitions/ellipse.py +124 -124
  55. dtlpy/entities/annotation_definitions/free_text.py +62 -62
  56. dtlpy/entities/annotation_definitions/gis.py +69 -69
  57. dtlpy/entities/annotation_definitions/note.py +139 -139
  58. dtlpy/entities/annotation_definitions/point.py +117 -117
  59. dtlpy/entities/annotation_definitions/polygon.py +182 -182
  60. dtlpy/entities/annotation_definitions/polyline.py +111 -111
  61. dtlpy/entities/annotation_definitions/pose.py +92 -92
  62. dtlpy/entities/annotation_definitions/ref_image.py +86 -86
  63. dtlpy/entities/annotation_definitions/segmentation.py +240 -240
  64. dtlpy/entities/annotation_definitions/subtitle.py +34 -34
  65. dtlpy/entities/annotation_definitions/text.py +85 -85
  66. dtlpy/entities/annotation_definitions/undefined_annotation.py +74 -74
  67. dtlpy/entities/app.py +220 -220
  68. dtlpy/entities/app_module.py +107 -107
  69. dtlpy/entities/artifact.py +174 -174
  70. dtlpy/entities/assignment.py +399 -399
  71. dtlpy/entities/base_entity.py +214 -214
  72. dtlpy/entities/bot.py +113 -113
  73. dtlpy/entities/codebase.py +296 -296
  74. dtlpy/entities/collection.py +38 -38
  75. dtlpy/entities/command.py +169 -169
  76. dtlpy/entities/compute.py +442 -442
  77. dtlpy/entities/dataset.py +1285 -1285
  78. dtlpy/entities/directory_tree.py +44 -44
  79. dtlpy/entities/dpk.py +470 -470
  80. dtlpy/entities/driver.py +222 -222
  81. dtlpy/entities/execution.py +397 -397
  82. dtlpy/entities/feature.py +124 -124
  83. dtlpy/entities/feature_set.py +145 -145
  84. dtlpy/entities/filters.py +641 -641
  85. dtlpy/entities/gis_item.py +107 -107
  86. dtlpy/entities/integration.py +184 -184
  87. dtlpy/entities/item.py +953 -953
  88. dtlpy/entities/label.py +123 -123
  89. dtlpy/entities/links.py +85 -85
  90. dtlpy/entities/message.py +175 -175
  91. dtlpy/entities/model.py +694 -691
  92. dtlpy/entities/node.py +1005 -1005
  93. dtlpy/entities/ontology.py +803 -803
  94. dtlpy/entities/organization.py +287 -287
  95. dtlpy/entities/package.py +657 -657
  96. dtlpy/entities/package_defaults.py +5 -5
  97. dtlpy/entities/package_function.py +185 -185
  98. dtlpy/entities/package_module.py +113 -113
  99. dtlpy/entities/package_slot.py +118 -118
  100. dtlpy/entities/paged_entities.py +290 -267
  101. dtlpy/entities/pipeline.py +593 -593
  102. dtlpy/entities/pipeline_execution.py +279 -279
  103. dtlpy/entities/project.py +394 -394
  104. dtlpy/entities/prompt_item.py +499 -499
  105. dtlpy/entities/recipe.py +301 -301
  106. dtlpy/entities/reflect_dict.py +102 -102
  107. dtlpy/entities/resource_execution.py +138 -138
  108. dtlpy/entities/service.py +958 -958
  109. dtlpy/entities/service_driver.py +117 -117
  110. dtlpy/entities/setting.py +294 -294
  111. dtlpy/entities/task.py +491 -491
  112. dtlpy/entities/time_series.py +143 -143
  113. dtlpy/entities/trigger.py +426 -426
  114. dtlpy/entities/user.py +118 -118
  115. dtlpy/entities/webhook.py +124 -124
  116. dtlpy/examples/__init__.py +19 -19
  117. dtlpy/examples/add_labels.py +135 -135
  118. dtlpy/examples/add_metadata_to_item.py +21 -21
  119. dtlpy/examples/annotate_items_using_model.py +65 -65
  120. dtlpy/examples/annotate_video_using_model_and_tracker.py +75 -75
  121. dtlpy/examples/annotations_convert_to_voc.py +9 -9
  122. dtlpy/examples/annotations_convert_to_yolo.py +9 -9
  123. dtlpy/examples/convert_annotation_types.py +51 -51
  124. dtlpy/examples/converter.py +143 -143
  125. dtlpy/examples/copy_annotations.py +22 -22
  126. dtlpy/examples/copy_folder.py +31 -31
  127. dtlpy/examples/create_annotations.py +51 -51
  128. dtlpy/examples/create_video_annotations.py +83 -83
  129. dtlpy/examples/delete_annotations.py +26 -26
  130. dtlpy/examples/filters.py +113 -113
  131. dtlpy/examples/move_item.py +23 -23
  132. dtlpy/examples/play_video_annotation.py +13 -13
  133. dtlpy/examples/show_item_and_mask.py +53 -53
  134. dtlpy/examples/triggers.py +49 -49
  135. dtlpy/examples/upload_batch_of_items.py +20 -20
  136. dtlpy/examples/upload_items_and_custom_format_annotations.py +55 -55
  137. dtlpy/examples/upload_items_with_modalities.py +43 -43
  138. dtlpy/examples/upload_segmentation_annotations_from_mask_image.py +44 -44
  139. dtlpy/examples/upload_yolo_format_annotations.py +70 -70
  140. dtlpy/exceptions.py +125 -125
  141. dtlpy/miscellaneous/__init__.py +20 -20
  142. dtlpy/miscellaneous/dict_differ.py +95 -95
  143. dtlpy/miscellaneous/git_utils.py +217 -217
  144. dtlpy/miscellaneous/json_utils.py +14 -14
  145. dtlpy/miscellaneous/list_print.py +105 -105
  146. dtlpy/miscellaneous/zipping.py +130 -130
  147. dtlpy/ml/__init__.py +20 -20
  148. dtlpy/ml/base_feature_extractor_adapter.py +27 -27
  149. dtlpy/ml/base_model_adapter.py +945 -940
  150. dtlpy/ml/metrics.py +461 -461
  151. dtlpy/ml/predictions_utils.py +274 -274
  152. dtlpy/ml/summary_writer.py +57 -57
  153. dtlpy/ml/train_utils.py +60 -60
  154. dtlpy/new_instance.py +252 -252
  155. dtlpy/repositories/__init__.py +56 -56
  156. dtlpy/repositories/analytics.py +85 -85
  157. dtlpy/repositories/annotations.py +916 -916
  158. dtlpy/repositories/apps.py +383 -383
  159. dtlpy/repositories/artifacts.py +452 -452
  160. dtlpy/repositories/assignments.py +599 -599
  161. dtlpy/repositories/bots.py +213 -213
  162. dtlpy/repositories/codebases.py +559 -559
  163. dtlpy/repositories/collections.py +332 -348
  164. dtlpy/repositories/commands.py +158 -158
  165. dtlpy/repositories/compositions.py +61 -61
  166. dtlpy/repositories/computes.py +434 -406
  167. dtlpy/repositories/datasets.py +1291 -1291
  168. dtlpy/repositories/downloader.py +895 -895
  169. dtlpy/repositories/dpks.py +433 -433
  170. dtlpy/repositories/drivers.py +266 -266
  171. dtlpy/repositories/executions.py +817 -817
  172. dtlpy/repositories/feature_sets.py +226 -226
  173. dtlpy/repositories/features.py +238 -238
  174. dtlpy/repositories/integrations.py +484 -484
  175. dtlpy/repositories/items.py +909 -915
  176. dtlpy/repositories/messages.py +94 -94
  177. dtlpy/repositories/models.py +877 -867
  178. dtlpy/repositories/nodes.py +80 -80
  179. dtlpy/repositories/ontologies.py +511 -511
  180. dtlpy/repositories/organizations.py +525 -525
  181. dtlpy/repositories/packages.py +1941 -1941
  182. dtlpy/repositories/pipeline_executions.py +448 -448
  183. dtlpy/repositories/pipelines.py +642 -642
  184. dtlpy/repositories/projects.py +539 -539
  185. dtlpy/repositories/recipes.py +399 -399
  186. dtlpy/repositories/resource_executions.py +137 -137
  187. dtlpy/repositories/schema.py +120 -120
  188. dtlpy/repositories/service_drivers.py +213 -213
  189. dtlpy/repositories/services.py +1704 -1704
  190. dtlpy/repositories/settings.py +339 -339
  191. dtlpy/repositories/tasks.py +1124 -1124
  192. dtlpy/repositories/times_series.py +278 -278
  193. dtlpy/repositories/triggers.py +536 -536
  194. dtlpy/repositories/upload_element.py +257 -257
  195. dtlpy/repositories/uploader.py +651 -651
  196. dtlpy/repositories/webhooks.py +249 -249
  197. dtlpy/services/__init__.py +22 -22
  198. dtlpy/services/aihttp_retry.py +131 -131
  199. dtlpy/services/api_client.py +1782 -1782
  200. dtlpy/services/api_reference.py +40 -40
  201. dtlpy/services/async_utils.py +133 -133
  202. dtlpy/services/calls_counter.py +44 -44
  203. dtlpy/services/check_sdk.py +68 -68
  204. dtlpy/services/cookie.py +115 -115
  205. dtlpy/services/create_logger.py +156 -156
  206. dtlpy/services/events.py +84 -84
  207. dtlpy/services/logins.py +235 -235
  208. dtlpy/services/reporter.py +256 -256
  209. dtlpy/services/service_defaults.py +91 -91
  210. dtlpy/utilities/__init__.py +20 -20
  211. dtlpy/utilities/annotations/__init__.py +16 -16
  212. dtlpy/utilities/annotations/annotation_converters.py +269 -269
  213. dtlpy/utilities/base_package_runner.py +264 -264
  214. dtlpy/utilities/converter.py +1650 -1650
  215. dtlpy/utilities/dataset_generators/__init__.py +1 -1
  216. dtlpy/utilities/dataset_generators/dataset_generator.py +670 -670
  217. dtlpy/utilities/dataset_generators/dataset_generator_tensorflow.py +23 -23
  218. dtlpy/utilities/dataset_generators/dataset_generator_torch.py +21 -21
  219. dtlpy/utilities/local_development/__init__.py +1 -1
  220. dtlpy/utilities/local_development/local_session.py +179 -179
  221. dtlpy/utilities/reports/__init__.py +2 -2
  222. dtlpy/utilities/reports/figures.py +343 -343
  223. dtlpy/utilities/reports/report.py +71 -71
  224. dtlpy/utilities/videos/__init__.py +17 -17
  225. dtlpy/utilities/videos/video_player.py +598 -598
  226. dtlpy/utilities/videos/videos.py +470 -470
  227. {dtlpy-1.113.10.data → dtlpy-1.114.13.data}/scripts/dlp +1 -1
  228. dtlpy-1.114.13.data/scripts/dlp.bat +2 -0
  229. {dtlpy-1.113.10.data → dtlpy-1.114.13.data}/scripts/dlp.py +128 -128
  230. {dtlpy-1.113.10.dist-info → dtlpy-1.114.13.dist-info}/LICENSE +200 -200
  231. {dtlpy-1.113.10.dist-info → dtlpy-1.114.13.dist-info}/METADATA +172 -172
  232. dtlpy-1.114.13.dist-info/RECORD +240 -0
  233. {dtlpy-1.113.10.dist-info → dtlpy-1.114.13.dist-info}/WHEEL +1 -1
  234. tests/features/environment.py +551 -550
  235. dtlpy-1.113.10.data/scripts/dlp.bat +0 -2
  236. dtlpy-1.113.10.dist-info/RECORD +0 -244
  237. tests/assets/__init__.py +0 -0
  238. tests/assets/models_flow/__init__.py +0 -0
  239. tests/assets/models_flow/failedmain.py +0 -52
  240. tests/assets/models_flow/main.py +0 -62
  241. tests/assets/models_flow/main_model.py +0 -54
  242. {dtlpy-1.113.10.dist-info → dtlpy-1.114.13.dist-info}/entry_points.txt +0 -0
  243. {dtlpy-1.113.10.dist-info → dtlpy-1.114.13.dist-info}/top_level.txt +0 -0
@@ -1,407 +1,435 @@
1
- import base64
2
- import datetime
3
- import json
4
-
5
- from dtlpy import miscellaneous
6
-
7
- from ..services.api_client import ApiClient
8
- from .. import exceptions, entities, repositories
9
- from typing import List, Optional, Dict
10
- from ..entities import ComputeCluster, ComputeContext, ComputeType
11
- from ..entities.integration import IntegrationType
12
- import logging
13
- from urllib.parse import urlparse, urlencode, parse_qs, urlunparse
14
-
15
- logger = logging.getLogger(name='dtlpy')
16
-
17
-
18
- class Computes:
19
-
20
- def __init__(self, client_api: ApiClient):
21
- self._client_api = client_api
22
- self._base_url = '/compute'
23
- self._commands = None
24
- self._projects = None
25
- self._organizations = None
26
- self.log_cache = dict()
27
-
28
- @property
29
- def commands(self) -> repositories.Commands:
30
- if self._commands is None:
31
- self._commands = repositories.Commands(client_api=self._client_api)
32
- return self._commands
33
-
34
- @property
35
- def projects(self):
36
- if self._projects is None:
37
- self._projects = repositories.Projects(client_api=self._client_api)
38
- return self._projects
39
-
40
- @property
41
- def organizations(self):
42
- if self._organizations is None:
43
- self._organizations = repositories.Organizations(client_api=self._client_api)
44
- return self._organizations
45
-
46
- def create(
47
- self,
48
- name: str,
49
- context: entities.ComputeContext,
50
- shared_contexts: Optional[List[entities.ComputeContext]],
51
- cluster: entities.ComputeCluster,
52
- type: entities.ComputeType = entities.ComputeType.KUBERNETES,
53
- is_global: Optional[bool] = False,
54
- features: Optional[Dict] = None,
55
- wait=True,
56
- status: entities.ComputeStatus = None,
57
- settings: entities.ComputeSettings = None,
58
- metadata: dict = None
59
- ):
60
- """
61
- Create a new compute
62
-
63
- :param name: Compute name
64
- :param context: Compute context
65
- :param shared_contexts: Shared contexts
66
- :param cluster: Compute cluster
67
- :param type: Compute type
68
- :param is_global: Is global
69
- :param features: Features
70
- :param wait: Wait for compute creation
71
- :param status: Compute status
72
- :param settings: Compute settings
73
- :param metadata: Compute metadata
74
- :return: Compute
75
- :rtype: dl.entities.compute.Compute
76
- """
77
- if metadata is None:
78
- metadata = {}
79
- shared_contexts_json = []
80
- for shared_context in shared_contexts:
81
- src_json = shared_context.to_json() if isinstance(shared_context, entities.ComputeContext) else shared_context
82
- shared_contexts_json.append(src_json)
83
- payload = {
84
- 'name': name,
85
- 'context': context.to_json(),
86
- 'type': type.value,
87
- 'global': is_global,
88
- 'features': features,
89
- 'sharedContexts': shared_contexts_json,
90
- 'cluster': cluster.to_json(),
91
- 'status': status,
92
- "settings": settings.to_json() if isinstance(settings, entities.ComputeSettings) else settings,
93
- "metadata": metadata
94
- }
95
-
96
- # request
97
- success, response = self._client_api.gen_request(
98
- req_type='post',
99
- path=self._base_url,
100
- json_req=payload
101
- )
102
-
103
- if not success:
104
- raise exceptions.PlatformException(response)
105
-
106
- compute = self._build_compute_by_type(response.json())
107
-
108
- if wait:
109
- command_id = compute.metadata.get('system', {}).get('commands', {}).get('create', None)
110
- if command_id is not None:
111
- command = self.commands.get(command_id=command_id, url='api/v1/commands/faas/{}'.format(command_id))
112
- try:
113
- command.wait(iteration_callback=self.__get_log_compute_progress_callback(compute.id))
114
- except Exception as e:
115
- self.log_cache.pop(compute.id, None)
116
- raise e
117
- compute = self.get(compute_id=compute.id)
118
-
119
- return compute
120
-
121
- def _build_compute_by_type(self, _json):
122
- if _json.get('type') == 'kubernetes':
123
- compute = entities.KubernetesCompute.from_json(
124
- _json=_json,
125
- client_api=self._client_api
126
- )
127
- else:
128
- compute = entities.Compute.from_json(
129
- _json=_json,
130
- client_api=self._client_api
131
- )
132
- return compute
133
-
134
- def __get_log_compute_progress_callback(self, compute_id: str):
135
- def func():
136
- compute = self.get(compute_id=compute_id)
137
- bootstrap_progress = compute.metadata.get('system', {}).get('bootstrap', {}).get('progress', None)
138
- bootstrap_logs = compute.metadata.get('system', {}).get('bootstrap', {}).get('logs', None)
139
- validation_progress = compute.metadata.get('system', {}).get('validation', {}).get('progress', None)
140
- validation_logs = compute.metadata.get('system', {}).get('validation', {}).get('logs', None)
141
- if bootstrap_progress is not None:
142
- if 'bootstrap' not in self.log_cache.get(compute_id, {}):
143
- logger.info(f"Bootstrap in progress:")
144
- last_index = len(self.log_cache.get(compute_id, {}).get('bootstrap', []))
145
- new_logs = bootstrap_logs[last_index:]
146
- if new_logs:
147
- for log in new_logs:
148
- logger.info(log)
149
- logger.info(f'Bootstrap progress: {int(bootstrap_progress)}%')
150
- if compute_id not in self.log_cache:
151
- self.log_cache[compute_id] = {}
152
- self.log_cache[compute_id]['bootstrap'] = bootstrap_logs
153
- if bootstrap_progress in [100, None] and validation_progress is not None:
154
- if 'validation' not in self.log_cache.get(compute_id, {}):
155
- logger.info(f"Validating created compute:")
156
- last_index = len(self.log_cache.get(compute_id, {}).get('validation', []))
157
- new_logs = validation_logs[last_index:]
158
- if new_logs:
159
- for log in new_logs:
160
- logger.info(log)
161
- logger.info(f'Validation progress: {int(validation_progress)}%')
162
- if compute_id not in self.log_cache:
163
- self.log_cache[compute_id] = {}
164
- self.log_cache[compute_id]['validation'] = validation_logs
165
- return func
166
-
167
-
168
- def get(self, compute_id: str):
169
- """
170
- Get a compute
171
-
172
- :param compute_id: Compute ID
173
- :return: Compute
174
- :rtype: dl.entities.compute.Compute
175
- """
176
-
177
- # request
178
- success, response = self._client_api.gen_request(
179
- req_type='get',
180
- path=self._base_url + '/{}'.format(compute_id)
181
- )
182
-
183
- if not success:
184
- raise exceptions.PlatformException(response)
185
-
186
- compute = self._build_compute_by_type(response.json())
187
-
188
- return compute
189
-
190
- def update(self, compute: entities.Compute):
191
- """
192
- Update a compute
193
-
194
- :param compute: Compute
195
- :return: Compute
196
- :rtype: dl.entities.compute.Compute
197
- """
198
-
199
- # request
200
- success, response = self._client_api.gen_request(
201
- req_type='patch',
202
- path=self._base_url + '/{}'.format(compute.id),
203
- json_req=compute.to_json()
204
- )
205
-
206
- if not success:
207
- raise exceptions.PlatformException(response)
208
-
209
- compute = self._build_compute_by_type(response.json())
210
-
211
- return compute
212
-
213
- def delete(self, compute_id: str, skip_destroy: bool = False
214
- ):
215
- """
216
- Delete a compute
217
-
218
- :param compute_id: compute ID
219
- :param skip_destroy: bool
220
- """
221
- url_path = self._base_url + '/{}'.format(compute_id)
222
- params_to_add = {"skipDestroy": "true" if skip_destroy else "false" }
223
- parsed_url = urlparse(url_path)
224
- query_dict = parse_qs(parsed_url.query)
225
- query_dict.update(params_to_add)
226
- new_query = urlencode(query_dict, doseq=True)
227
- url_path = urlunparse(parsed_url._replace(query=new_query))
228
- # request
229
- success, response = self._client_api.gen_request(
230
- req_type='delete',
231
- path=url_path
232
- )
233
-
234
- if not success:
235
- raise exceptions.PlatformException(response)
236
-
237
- return True
238
-
239
- def validate(self, compute_id: str, wait: bool = True):
240
- """
241
- Validate a compute
242
-
243
- :param str compute_id: Compute ID
244
- :param bool wait: Wait for validation
245
- :return: Compute
246
- :rtype: dl.entities.compute.Compute
247
- """
248
-
249
- # request
250
- success, response = self._client_api.gen_request(
251
- req_type='post',
252
- path=self._base_url + '/{}/validate'.format(compute_id)
253
- )
254
-
255
- if not success:
256
- raise exceptions.PlatformException(response)
257
-
258
- compute = self._build_compute_by_type(response.json())
259
-
260
- if wait:
261
- command_id = compute.metadata.get('system', {}).get('commands', {}).get('validate', None)
262
- if command_id is not None:
263
- command = self.commands.get(command_id=command_id, url='api/v1/commands/faas/{}'.format(command_id))
264
- try:
265
- command.wait(iteration_callback=self.__get_log_compute_progress_callback(compute.id))
266
- except Exception as e:
267
- self.log_cache.pop(compute.id, None)
268
- raise e
269
- compute = self.get(compute_id=compute.id)
270
-
271
- return compute
272
-
273
- def list_global(self):
274
- """
275
- List computes
276
-
277
- :return: List of computes
278
- :rtype: list[str]
279
- """
280
-
281
- # request
282
- success, response = self._client_api.gen_request(
283
- req_type='get',
284
- path=self._base_url + '/globals',
285
- )
286
-
287
- if not success:
288
- raise exceptions.PlatformException(response)
289
-
290
-
291
- return response.json()
292
-
293
- @staticmethod
294
- def read_file(file_path):
295
- try:
296
- with open(file_path, 'r') as file:
297
- content = file.read()
298
- return content
299
- except FileNotFoundError:
300
- print(f"The file at {file_path} was not found.")
301
- except IOError:
302
- print(f"An error occurred while reading the file at {file_path}.")
303
-
304
- def decode_and_parse_input(self, file_path):
305
- """Decode a base64 encoded string from file a and parse it as JSON."""
306
- decoded_bytes = base64.b64decode(self.read_file(file_path))
307
- return json.loads(decoded_bytes)
308
-
309
- @staticmethod
310
- def create_integration(org, name, auth_data):
311
- """Create a new key-value integration within the specified project."""
312
- return org.integrations.create(
313
- integrations_type=IntegrationType.KEY_VALUE,
314
- name=name,
315
- options={
316
- 'key': name,
317
- 'value': json.dumps(auth_data)
318
- }
319
- )
320
-
321
- def setup_compute_cluster(self, config, integration, org_id, project=None):
322
- """Set up a compute cluster using the provided configuration and integration."""
323
- cluster = ComputeCluster.from_setup_json(config, integration)
324
- project_id = None
325
- if project is not None:
326
- project_id = project.id
327
- compute = self.create(
328
- config['config']['name'],
329
- ComputeContext([], org_id, project_id),
330
- [],
331
- cluster,
332
- ComputeType.KUBERNETES,
333
- status=config['config'].get('status', None),
334
- settings=config['config'].get('settings', None),
335
- metadata=config['config'].get('metadata', None))
336
-
337
- return compute
338
-
339
- def create_from_config_file(self, config_file_path, org_id, project_name: Optional[str] = None):
340
- config = self.decode_and_parse_input(config_file_path)
341
- project = None
342
- if project_name is not None:
343
- project = self.projects.get(project_name=project_name)
344
- org = self.organizations.get(organization_id=org_id)
345
- integration_name = ('cluster_integration_test_' + datetime.datetime.now().isoformat().split('.')[0]
346
- .replace(':', '_'))
347
- integration = self.create_integration(org, integration_name, config['authentication'])
348
- compute = self.setup_compute_cluster(config, integration, org_id, project)
349
- return compute
350
-
351
-
352
- def _list(self, filters: entities.Filters):
353
- url = self._base_url + '/query'
354
- success, response = self._client_api.gen_request(req_type='POST',
355
- path=url,
356
- json_req=filters.prepare())
357
- if not success:
358
- raise exceptions.PlatformException(response)
359
-
360
- return response.json()
361
-
362
- def _build_entities_from_response(self, response_items) -> miscellaneous.List[entities.Compute]:
363
- pool = self._client_api.thread_pools(pool_name='entity.create')
364
- jobs = [None for _ in range(len(response_items))]
365
- for i_item, item in enumerate(response_items):
366
- jobs[i_item] = pool.submit(entities.Compute._protected_from_json,
367
- **{'client_api': self._client_api,
368
- '_json': item})
369
- results = [j.result() for j in jobs]
370
- _ = [logger.warning(r[1]) for r in results if r[0] is False]
371
- items = miscellaneous.List([r[1] for r in results if r[0] is True])
372
- return items
373
-
374
- def list(self, filters: entities.Filters = None) -> entities.PagedEntities:
375
- """
376
- List all services drivers
377
-
378
- :param dtlpy.entities.filters.Filters filters: Filters entity or a dictionary containing filters parameters
379
- :return: Paged entity
380
- :rtype: dtlpy.entities.paged_entities.PagedEntities
381
-
382
- **Example**:
383
-
384
- .. code-block:: python
385
-
386
- services = dl.service_drivers.list()
387
- """
388
- # default filters
389
- if filters is None:
390
- filters = entities.Filters(resource=entities.FiltersResource.COMPUTE)
391
-
392
- if filters.resource != entities.FiltersResource.COMPUTE:
393
- raise exceptions.PlatformException(
394
- error='400',
395
- message='Filters resource must to be FiltersResource.COMPUTE. Got: {!r}'.format(
396
- filters.resource))
397
-
398
- if not isinstance(filters, entities.Filters):
399
- raise exceptions.PlatformException('400', 'Unknown filters type')
400
-
401
- paged = entities.PagedEntities(items_repository=self,
402
- filters=filters,
403
- page_offset=filters.page,
404
- page_size=filters.page_size,
405
- client_api=self._client_api)
406
- paged.get_page()
1
+ import base64
2
+ import datetime
3
+ import json
4
+
5
+ from dtlpy import miscellaneous
6
+
7
+ from ..services.api_client import ApiClient
8
+ from .. import exceptions, entities, repositories
9
+ from typing import List, Optional, Dict
10
+ from ..entities import ComputeCluster, ComputeContext, ComputeType
11
+ from ..entities.integration import IntegrationType
12
+ import logging
13
+ from urllib.parse import urlparse, urlencode, parse_qs, urlunparse
14
+
15
+ logger = logging.getLogger(name='dtlpy')
16
+
17
+
18
+ class Computes:
19
+
20
+ def __init__(self, client_api: ApiClient):
21
+ self._client_api = client_api
22
+ self._base_url = '/compute'
23
+ self._commands = None
24
+ self._projects = None
25
+ self._organizations = None
26
+ self.log_cache = dict()
27
+
28
+ @property
29
+ def commands(self) -> repositories.Commands:
30
+ if self._commands is None:
31
+ self._commands = repositories.Commands(client_api=self._client_api)
32
+ return self._commands
33
+
34
+ @property
35
+ def projects(self):
36
+ if self._projects is None:
37
+ self._projects = repositories.Projects(client_api=self._client_api)
38
+ return self._projects
39
+
40
+ @property
41
+ def organizations(self):
42
+ if self._organizations is None:
43
+ self._organizations = repositories.Organizations(client_api=self._client_api)
44
+ return self._organizations
45
+
46
+ def create(
47
+ self,
48
+ name: str,
49
+ context: entities.ComputeContext,
50
+ shared_contexts: Optional[List[entities.ComputeContext]],
51
+ cluster: entities.ComputeCluster,
52
+ type: entities.ComputeType = entities.ComputeType.KUBERNETES,
53
+ is_global: Optional[bool] = False,
54
+ features: Optional[Dict] = None,
55
+ wait=True,
56
+ status: entities.ComputeStatus = None,
57
+ settings: entities.ComputeSettings = None,
58
+ metadata: dict = None
59
+ ):
60
+ """
61
+ Create a new compute
62
+
63
+ :param name: Compute name
64
+ :param context: Compute context
65
+ :param shared_contexts: Shared contexts
66
+ :param cluster: Compute cluster
67
+ :param type: Compute type
68
+ :param is_global: Is global
69
+ :param features: Features
70
+ :param wait: Wait for compute creation
71
+ :param status: Compute status
72
+ :param settings: Compute settings
73
+ :param metadata: Compute metadata
74
+ :return: Compute
75
+ :rtype: dl.entities.compute.Compute
76
+ """
77
+ if metadata is None:
78
+ metadata = {}
79
+ shared_contexts_json = []
80
+ for shared_context in shared_contexts:
81
+ src_json = shared_context.to_json() if isinstance(shared_context, entities.ComputeContext) else shared_context
82
+ shared_contexts_json.append(src_json)
83
+ payload = {
84
+ 'name': name,
85
+ 'context': context.to_json(),
86
+ 'type': type.value,
87
+ 'global': is_global,
88
+ 'features': features,
89
+ 'sharedContexts': shared_contexts_json,
90
+ 'cluster': cluster.to_json(),
91
+ 'status': status,
92
+ "settings": settings.to_json() if isinstance(settings, entities.ComputeSettings) else settings,
93
+ "metadata": metadata
94
+ }
95
+
96
+ # request
97
+ success, response = self._client_api.gen_request(
98
+ req_type='post',
99
+ path=self._base_url,
100
+ json_req=payload
101
+ )
102
+
103
+ if not success:
104
+ raise exceptions.PlatformException(response)
105
+
106
+ compute = self._build_compute_by_type(response.json())
107
+
108
+ if wait:
109
+ command_id = compute.metadata.get('system', {}).get('commands', {}).get('create', None)
110
+ if command_id is not None:
111
+ command = self.commands.get(command_id=command_id, url='api/v1/commands/faas/{}'.format(command_id))
112
+ try:
113
+ callback = self.__get_log_compute_progress_callback(compute.id)
114
+ command.wait(iteration_callback=callback)
115
+ callback()
116
+ except Exception as e:
117
+ self.log_cache.pop(compute.id, None)
118
+ raise e
119
+ compute = self.get(compute_id=compute.id)
120
+
121
+ return compute
122
+
123
+ def _build_compute_by_type(self, _json):
124
+ if _json.get('type') == 'kubernetes':
125
+ compute = entities.KubernetesCompute.from_json(
126
+ _json=_json,
127
+ client_api=self._client_api
128
+ )
129
+ else:
130
+ compute = entities.Compute.from_json(
131
+ _json=_json,
132
+ client_api=self._client_api
133
+ )
134
+ return compute
135
+
136
+ def __get_log_compute_progress_callback(self, compute_id: str, is_destroy=False):
137
+ def func():
138
+ compute = self.get(compute_id=compute_id, archived=True)
139
+ log_type = 'bootstrap'
140
+ validation_progress = None
141
+ validation_logs = None
142
+ if is_destroy is True:
143
+ log_type = 'destroy'
144
+ else:
145
+ validation_progress = compute.metadata.get('system', {}).get('validation', {}).get('progress', None)
146
+ validation_logs = compute.metadata.get('system', {}).get('validation', {}).get('logs', None)
147
+ bootstrap_progress = compute.metadata.get('system', {}).get(log_type, {}).get('progress', None)
148
+ bootstrap_logs = compute.metadata.get('system', {}).get(log_type, {}).get('logs', None)
149
+
150
+ if bootstrap_progress is not None:
151
+ if 'bootstrap' not in self.log_cache.get(compute_id, {}):
152
+ logger.info(f"{log_type} in progress:")
153
+ last_index = len(self.log_cache.get(compute_id, {}).get(log_type, []))
154
+ new_logs = bootstrap_logs[last_index:]
155
+ if new_logs:
156
+ for log in new_logs:
157
+ logger.info(log)
158
+ logger.info(f'{log_type} progress: {int(bootstrap_progress)}%')
159
+ if compute_id not in self.log_cache:
160
+ self.log_cache[compute_id] = {}
161
+ self.log_cache[compute_id][log_type] = bootstrap_logs
162
+ if bootstrap_progress in [100, None] and validation_progress is not None:
163
+ if 'validation' not in self.log_cache.get(compute_id, {}):
164
+ logger.info(f"Validating created compute:")
165
+ last_index = len(self.log_cache.get(compute_id, {}).get('validation', []))
166
+ new_logs = validation_logs[last_index:]
167
+ if new_logs:
168
+ for log in new_logs:
169
+ logger.info(log)
170
+ logger.info(f'Validation progress: {int(validation_progress)}%')
171
+ if compute_id not in self.log_cache:
172
+ self.log_cache[compute_id] = {}
173
+ self.log_cache[compute_id]['validation'] = validation_logs
174
+ return func
175
+
176
+ def get(self, compute_id: str, archived = False):
177
+ """
178
+ Get a compute
179
+
180
+ :param compute_id: Compute ID
181
+ :param archived: Archived
182
+ :return: Compute
183
+ :rtype: dl.entities.compute.Compute
184
+ """
185
+ url_path = self._base_url + '/{}'.format(compute_id)
186
+ params_to_add = {"archived": "true" if archived else "false" }
187
+ parsed_url = urlparse(url_path)
188
+ query_dict = parse_qs(parsed_url.query)
189
+ query_dict.update(params_to_add)
190
+ new_query = urlencode(query_dict, doseq=True)
191
+ url_path = urlunparse(parsed_url._replace(query=new_query))
192
+ # request
193
+ success, response = self._client_api.gen_request(
194
+ req_type='get',
195
+ path=url_path
196
+ )
197
+
198
+ if not success:
199
+ raise exceptions.PlatformException(response)
200
+
201
+ compute = self._build_compute_by_type(response.json())
202
+
203
+ return compute
204
+
205
+ def update(self, compute: entities.Compute):
206
+ """
207
+ Update a compute
208
+
209
+ :param compute: Compute
210
+ :return: Compute
211
+ :rtype: dl.entities.compute.Compute
212
+ """
213
+
214
+ # request
215
+ success, response = self._client_api.gen_request(
216
+ req_type='patch',
217
+ path=self._base_url + '/{}'.format(compute.id),
218
+ json_req=compute.to_json()
219
+ )
220
+
221
+ if not success:
222
+ raise exceptions.PlatformException(response)
223
+
224
+ compute = self._build_compute_by_type(response.json())
225
+
226
+ return compute
227
+
228
+ def delete(self, compute_id: str, skip_destroy: bool = False, wait: bool = True):
229
+ """
230
+ Delete a compute
231
+
232
+ :param compute_id: compute ID
233
+ :param skip_destroy: bool
234
+ :param bool wait: Wait for deletion
235
+ """
236
+ url_path = self._base_url + '/{}'.format(compute_id)
237
+ params_to_add = {"skipDestroy": "true" if skip_destroy else "false" }
238
+ parsed_url = urlparse(url_path)
239
+ query_dict = parse_qs(parsed_url.query)
240
+ query_dict.update(params_to_add)
241
+ new_query = urlencode(query_dict, doseq=True)
242
+ url_path = urlunparse(parsed_url._replace(query=new_query))
243
+ # request
244
+ success, response = self._client_api.gen_request(
245
+ req_type='delete',
246
+ path=url_path
247
+ )
248
+
249
+ if not success:
250
+ raise exceptions.PlatformException(response)
251
+ if skip_destroy is not True and wait is True:
252
+ command_response = response.json()
253
+ command_id = command_response['id']
254
+ command = self.commands.get(command_id, url='api/v1/commands/faas/{}'.format(command_id))
255
+ try:
256
+ callback = self.__get_log_compute_progress_callback(compute_id, is_destroy=True)
257
+ command.wait(iteration_callback=callback)
258
+ callback()
259
+ except Exception as e:
260
+ self.log_cache.pop(command_id, None)
261
+ raise e
262
+
263
+ return True
264
+
265
+ def validate(self, compute_id: str, wait: bool = True):
266
+ """
267
+ Validate a compute
268
+
269
+ :param str compute_id: Compute ID
270
+ :param bool wait: Wait for validation
271
+ :return: Compute
272
+ :rtype: dl.entities.compute.Compute
273
+ """
274
+
275
+ # request
276
+ success, response = self._client_api.gen_request(
277
+ req_type='post',
278
+ path=self._base_url + '/{}/validate'.format(compute_id)
279
+ )
280
+
281
+ if not success:
282
+ raise exceptions.PlatformException(response)
283
+
284
+ compute = self._build_compute_by_type(response.json())
285
+
286
+ if wait:
287
+ command_id = compute.metadata.get('system', {}).get('commands', {}).get('validate', None)
288
+ if command_id is not None:
289
+ command = self.commands.get(command_id=command_id, url='api/v1/commands/faas/{}'.format(command_id))
290
+ try:
291
+ callback = self.__get_log_compute_progress_callback(compute.id)
292
+ command.wait(iteration_callback=callback)
293
+ callback()
294
+ except Exception as e:
295
+ self.log_cache.pop(compute.id, None)
296
+ raise e
297
+ compute = self.get(compute_id=compute.id)
298
+
299
+ return compute
300
+
301
+ def list_global(self):
302
+ """
303
+ List computes
304
+
305
+ :return: List of computes
306
+ :rtype: list[str]
307
+ """
308
+
309
+ # request
310
+ success, response = self._client_api.gen_request(
311
+ req_type='get',
312
+ path=self._base_url + '/globals',
313
+ )
314
+
315
+ if not success:
316
+ raise exceptions.PlatformException(response)
317
+
318
+
319
+ return response.json()
320
+
321
+ @staticmethod
322
+ def read_file(file_path):
323
+ try:
324
+ with open(file_path, 'r') as file:
325
+ content = file.read()
326
+ return content
327
+ except FileNotFoundError:
328
+ print(f"The file at {file_path} was not found.")
329
+ except IOError:
330
+ print(f"An error occurred while reading the file at {file_path}.")
331
+
332
+ def decode_and_parse_input(self, file_path):
333
+ """Decode a base64 encoded string from file a and parse it as JSON."""
334
+ decoded_bytes = base64.b64decode(self.read_file(file_path))
335
+ return json.loads(decoded_bytes)
336
+
337
+ @staticmethod
338
+ def create_integration(org, name, auth_data):
339
+ """Create a new key-value integration within the specified project."""
340
+ return org.integrations.create(
341
+ integrations_type=IntegrationType.KEY_VALUE,
342
+ name=name,
343
+ options={
344
+ 'key': name,
345
+ 'value': json.dumps(auth_data)
346
+ }
347
+ )
348
+
349
+ def setup_compute_cluster(self, config, integration, org_id, project=None):
350
+ """Set up a compute cluster using the provided configuration and integration."""
351
+ cluster = ComputeCluster.from_setup_json(config, integration)
352
+ project_id = None
353
+ if project is not None:
354
+ project_id = project.id
355
+ compute = self.create(
356
+ config['config']['name'],
357
+ ComputeContext([], org_id, project_id),
358
+ [],
359
+ cluster,
360
+ ComputeType.KUBERNETES,
361
+ status=config['config'].get('status', None),
362
+ settings=config['config'].get('settings', None),
363
+ metadata=config['config'].get('metadata', None))
364
+
365
+ return compute
366
+
367
+ def create_from_config_file(self, config_file_path, org_id, project_name: Optional[str] = None):
368
+ config = self.decode_and_parse_input(config_file_path)
369
+ project = None
370
+ if project_name is not None:
371
+ project = self.projects.get(project_name=project_name)
372
+ org = self.organizations.get(organization_id=org_id)
373
+ integration_name = ('cluster_integration_test_' + datetime.datetime.now().isoformat().split('.')[0]
374
+ .replace(':', '_'))
375
+ integration = self.create_integration(org, integration_name, config['authentication'])
376
+ compute = self.setup_compute_cluster(config, integration, org_id, project)
377
+ return compute
378
+
379
+
380
+ def _list(self, filters: entities.Filters):
381
+ url = self._base_url + '/query'
382
+ success, response = self._client_api.gen_request(req_type='POST',
383
+ path=url,
384
+ json_req=filters.prepare())
385
+ if not success:
386
+ raise exceptions.PlatformException(response)
387
+
388
+ return response.json()
389
+
390
+ def _build_entities_from_response(self, response_items) -> miscellaneous.List[entities.Compute]:
391
+ pool = self._client_api.thread_pools(pool_name='entity.create')
392
+ jobs = [None for _ in range(len(response_items))]
393
+ for i_item, item in enumerate(response_items):
394
+ jobs[i_item] = pool.submit(entities.Compute._protected_from_json,
395
+ **{'client_api': self._client_api,
396
+ '_json': item})
397
+ results = [j.result() for j in jobs]
398
+ _ = [logger.warning(r[1]) for r in results if r[0] is False]
399
+ items = miscellaneous.List([r[1] for r in results if r[0] is True])
400
+ return items
401
+
402
+ def list(self, filters: entities.Filters = None) -> entities.PagedEntities:
403
+ """
404
+ List all services drivers
405
+
406
+ :param dtlpy.entities.filters.Filters filters: Filters entity or a dictionary containing filters parameters
407
+ :return: Paged entity
408
+ :rtype: dtlpy.entities.paged_entities.PagedEntities
409
+
410
+ **Example**:
411
+
412
+ .. code-block:: python
413
+
414
+ services = dl.service_drivers.list()
415
+ """
416
+ # default filters
417
+ if filters is None:
418
+ filters = entities.Filters(resource=entities.FiltersResource.COMPUTE)
419
+
420
+ if filters.resource != entities.FiltersResource.COMPUTE:
421
+ raise exceptions.PlatformException(
422
+ error='400',
423
+ message='Filters resource must to be FiltersResource.COMPUTE. Got: {!r}'.format(
424
+ filters.resource))
425
+
426
+ if not isinstance(filters, entities.Filters):
427
+ raise exceptions.PlatformException('400', 'Unknown filters type')
428
+
429
+ paged = entities.PagedEntities(items_repository=self,
430
+ filters=filters,
431
+ page_offset=filters.page,
432
+ page_size=filters.page_size,
433
+ client_api=self._client_api)
434
+ paged.get_page()
407
435
  return paged