dtlpy 1.113.10__py3-none-any.whl → 1.114.13__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (243) hide show
  1. dtlpy/__init__.py +488 -488
  2. dtlpy/__version__.py +1 -1
  3. dtlpy/assets/__init__.py +26 -26
  4. dtlpy/assets/__pycache__/__init__.cpython-38.pyc +0 -0
  5. dtlpy/assets/code_server/config.yaml +2 -2
  6. dtlpy/assets/code_server/installation.sh +24 -24
  7. dtlpy/assets/code_server/launch.json +13 -13
  8. dtlpy/assets/code_server/settings.json +2 -2
  9. dtlpy/assets/main.py +53 -53
  10. dtlpy/assets/main_partial.py +18 -18
  11. dtlpy/assets/mock.json +11 -11
  12. dtlpy/assets/model_adapter.py +83 -83
  13. dtlpy/assets/package.json +61 -61
  14. dtlpy/assets/package_catalog.json +29 -29
  15. dtlpy/assets/package_gitignore +307 -307
  16. dtlpy/assets/service_runners/__init__.py +33 -33
  17. dtlpy/assets/service_runners/converter.py +96 -96
  18. dtlpy/assets/service_runners/multi_method.py +49 -49
  19. dtlpy/assets/service_runners/multi_method_annotation.py +54 -54
  20. dtlpy/assets/service_runners/multi_method_dataset.py +55 -55
  21. dtlpy/assets/service_runners/multi_method_item.py +52 -52
  22. dtlpy/assets/service_runners/multi_method_json.py +52 -52
  23. dtlpy/assets/service_runners/single_method.py +37 -37
  24. dtlpy/assets/service_runners/single_method_annotation.py +43 -43
  25. dtlpy/assets/service_runners/single_method_dataset.py +43 -43
  26. dtlpy/assets/service_runners/single_method_item.py +41 -41
  27. dtlpy/assets/service_runners/single_method_json.py +42 -42
  28. dtlpy/assets/service_runners/single_method_multi_input.py +45 -45
  29. dtlpy/assets/voc_annotation_template.xml +23 -23
  30. dtlpy/caches/base_cache.py +32 -32
  31. dtlpy/caches/cache.py +473 -473
  32. dtlpy/caches/dl_cache.py +201 -201
  33. dtlpy/caches/filesystem_cache.py +89 -89
  34. dtlpy/caches/redis_cache.py +84 -84
  35. dtlpy/dlp/__init__.py +20 -20
  36. dtlpy/dlp/cli_utilities.py +367 -367
  37. dtlpy/dlp/command_executor.py +764 -764
  38. dtlpy/dlp/dlp +1 -1
  39. dtlpy/dlp/dlp.bat +1 -1
  40. dtlpy/dlp/dlp.py +128 -128
  41. dtlpy/dlp/parser.py +651 -651
  42. dtlpy/entities/__init__.py +83 -83
  43. dtlpy/entities/analytic.py +311 -311
  44. dtlpy/entities/annotation.py +1879 -1879
  45. dtlpy/entities/annotation_collection.py +699 -699
  46. dtlpy/entities/annotation_definitions/__init__.py +20 -20
  47. dtlpy/entities/annotation_definitions/base_annotation_definition.py +100 -100
  48. dtlpy/entities/annotation_definitions/box.py +195 -195
  49. dtlpy/entities/annotation_definitions/classification.py +67 -67
  50. dtlpy/entities/annotation_definitions/comparison.py +72 -72
  51. dtlpy/entities/annotation_definitions/cube.py +204 -204
  52. dtlpy/entities/annotation_definitions/cube_3d.py +149 -149
  53. dtlpy/entities/annotation_definitions/description.py +32 -32
  54. dtlpy/entities/annotation_definitions/ellipse.py +124 -124
  55. dtlpy/entities/annotation_definitions/free_text.py +62 -62
  56. dtlpy/entities/annotation_definitions/gis.py +69 -69
  57. dtlpy/entities/annotation_definitions/note.py +139 -139
  58. dtlpy/entities/annotation_definitions/point.py +117 -117
  59. dtlpy/entities/annotation_definitions/polygon.py +182 -182
  60. dtlpy/entities/annotation_definitions/polyline.py +111 -111
  61. dtlpy/entities/annotation_definitions/pose.py +92 -92
  62. dtlpy/entities/annotation_definitions/ref_image.py +86 -86
  63. dtlpy/entities/annotation_definitions/segmentation.py +240 -240
  64. dtlpy/entities/annotation_definitions/subtitle.py +34 -34
  65. dtlpy/entities/annotation_definitions/text.py +85 -85
  66. dtlpy/entities/annotation_definitions/undefined_annotation.py +74 -74
  67. dtlpy/entities/app.py +220 -220
  68. dtlpy/entities/app_module.py +107 -107
  69. dtlpy/entities/artifact.py +174 -174
  70. dtlpy/entities/assignment.py +399 -399
  71. dtlpy/entities/base_entity.py +214 -214
  72. dtlpy/entities/bot.py +113 -113
  73. dtlpy/entities/codebase.py +296 -296
  74. dtlpy/entities/collection.py +38 -38
  75. dtlpy/entities/command.py +169 -169
  76. dtlpy/entities/compute.py +442 -442
  77. dtlpy/entities/dataset.py +1285 -1285
  78. dtlpy/entities/directory_tree.py +44 -44
  79. dtlpy/entities/dpk.py +470 -470
  80. dtlpy/entities/driver.py +222 -222
  81. dtlpy/entities/execution.py +397 -397
  82. dtlpy/entities/feature.py +124 -124
  83. dtlpy/entities/feature_set.py +145 -145
  84. dtlpy/entities/filters.py +641 -641
  85. dtlpy/entities/gis_item.py +107 -107
  86. dtlpy/entities/integration.py +184 -184
  87. dtlpy/entities/item.py +953 -953
  88. dtlpy/entities/label.py +123 -123
  89. dtlpy/entities/links.py +85 -85
  90. dtlpy/entities/message.py +175 -175
  91. dtlpy/entities/model.py +694 -691
  92. dtlpy/entities/node.py +1005 -1005
  93. dtlpy/entities/ontology.py +803 -803
  94. dtlpy/entities/organization.py +287 -287
  95. dtlpy/entities/package.py +657 -657
  96. dtlpy/entities/package_defaults.py +5 -5
  97. dtlpy/entities/package_function.py +185 -185
  98. dtlpy/entities/package_module.py +113 -113
  99. dtlpy/entities/package_slot.py +118 -118
  100. dtlpy/entities/paged_entities.py +290 -267
  101. dtlpy/entities/pipeline.py +593 -593
  102. dtlpy/entities/pipeline_execution.py +279 -279
  103. dtlpy/entities/project.py +394 -394
  104. dtlpy/entities/prompt_item.py +499 -499
  105. dtlpy/entities/recipe.py +301 -301
  106. dtlpy/entities/reflect_dict.py +102 -102
  107. dtlpy/entities/resource_execution.py +138 -138
  108. dtlpy/entities/service.py +958 -958
  109. dtlpy/entities/service_driver.py +117 -117
  110. dtlpy/entities/setting.py +294 -294
  111. dtlpy/entities/task.py +491 -491
  112. dtlpy/entities/time_series.py +143 -143
  113. dtlpy/entities/trigger.py +426 -426
  114. dtlpy/entities/user.py +118 -118
  115. dtlpy/entities/webhook.py +124 -124
  116. dtlpy/examples/__init__.py +19 -19
  117. dtlpy/examples/add_labels.py +135 -135
  118. dtlpy/examples/add_metadata_to_item.py +21 -21
  119. dtlpy/examples/annotate_items_using_model.py +65 -65
  120. dtlpy/examples/annotate_video_using_model_and_tracker.py +75 -75
  121. dtlpy/examples/annotations_convert_to_voc.py +9 -9
  122. dtlpy/examples/annotations_convert_to_yolo.py +9 -9
  123. dtlpy/examples/convert_annotation_types.py +51 -51
  124. dtlpy/examples/converter.py +143 -143
  125. dtlpy/examples/copy_annotations.py +22 -22
  126. dtlpy/examples/copy_folder.py +31 -31
  127. dtlpy/examples/create_annotations.py +51 -51
  128. dtlpy/examples/create_video_annotations.py +83 -83
  129. dtlpy/examples/delete_annotations.py +26 -26
  130. dtlpy/examples/filters.py +113 -113
  131. dtlpy/examples/move_item.py +23 -23
  132. dtlpy/examples/play_video_annotation.py +13 -13
  133. dtlpy/examples/show_item_and_mask.py +53 -53
  134. dtlpy/examples/triggers.py +49 -49
  135. dtlpy/examples/upload_batch_of_items.py +20 -20
  136. dtlpy/examples/upload_items_and_custom_format_annotations.py +55 -55
  137. dtlpy/examples/upload_items_with_modalities.py +43 -43
  138. dtlpy/examples/upload_segmentation_annotations_from_mask_image.py +44 -44
  139. dtlpy/examples/upload_yolo_format_annotations.py +70 -70
  140. dtlpy/exceptions.py +125 -125
  141. dtlpy/miscellaneous/__init__.py +20 -20
  142. dtlpy/miscellaneous/dict_differ.py +95 -95
  143. dtlpy/miscellaneous/git_utils.py +217 -217
  144. dtlpy/miscellaneous/json_utils.py +14 -14
  145. dtlpy/miscellaneous/list_print.py +105 -105
  146. dtlpy/miscellaneous/zipping.py +130 -130
  147. dtlpy/ml/__init__.py +20 -20
  148. dtlpy/ml/base_feature_extractor_adapter.py +27 -27
  149. dtlpy/ml/base_model_adapter.py +945 -940
  150. dtlpy/ml/metrics.py +461 -461
  151. dtlpy/ml/predictions_utils.py +274 -274
  152. dtlpy/ml/summary_writer.py +57 -57
  153. dtlpy/ml/train_utils.py +60 -60
  154. dtlpy/new_instance.py +252 -252
  155. dtlpy/repositories/__init__.py +56 -56
  156. dtlpy/repositories/analytics.py +85 -85
  157. dtlpy/repositories/annotations.py +916 -916
  158. dtlpy/repositories/apps.py +383 -383
  159. dtlpy/repositories/artifacts.py +452 -452
  160. dtlpy/repositories/assignments.py +599 -599
  161. dtlpy/repositories/bots.py +213 -213
  162. dtlpy/repositories/codebases.py +559 -559
  163. dtlpy/repositories/collections.py +332 -348
  164. dtlpy/repositories/commands.py +158 -158
  165. dtlpy/repositories/compositions.py +61 -61
  166. dtlpy/repositories/computes.py +434 -406
  167. dtlpy/repositories/datasets.py +1291 -1291
  168. dtlpy/repositories/downloader.py +895 -895
  169. dtlpy/repositories/dpks.py +433 -433
  170. dtlpy/repositories/drivers.py +266 -266
  171. dtlpy/repositories/executions.py +817 -817
  172. dtlpy/repositories/feature_sets.py +226 -226
  173. dtlpy/repositories/features.py +238 -238
  174. dtlpy/repositories/integrations.py +484 -484
  175. dtlpy/repositories/items.py +909 -915
  176. dtlpy/repositories/messages.py +94 -94
  177. dtlpy/repositories/models.py +877 -867
  178. dtlpy/repositories/nodes.py +80 -80
  179. dtlpy/repositories/ontologies.py +511 -511
  180. dtlpy/repositories/organizations.py +525 -525
  181. dtlpy/repositories/packages.py +1941 -1941
  182. dtlpy/repositories/pipeline_executions.py +448 -448
  183. dtlpy/repositories/pipelines.py +642 -642
  184. dtlpy/repositories/projects.py +539 -539
  185. dtlpy/repositories/recipes.py +399 -399
  186. dtlpy/repositories/resource_executions.py +137 -137
  187. dtlpy/repositories/schema.py +120 -120
  188. dtlpy/repositories/service_drivers.py +213 -213
  189. dtlpy/repositories/services.py +1704 -1704
  190. dtlpy/repositories/settings.py +339 -339
  191. dtlpy/repositories/tasks.py +1124 -1124
  192. dtlpy/repositories/times_series.py +278 -278
  193. dtlpy/repositories/triggers.py +536 -536
  194. dtlpy/repositories/upload_element.py +257 -257
  195. dtlpy/repositories/uploader.py +651 -651
  196. dtlpy/repositories/webhooks.py +249 -249
  197. dtlpy/services/__init__.py +22 -22
  198. dtlpy/services/aihttp_retry.py +131 -131
  199. dtlpy/services/api_client.py +1782 -1782
  200. dtlpy/services/api_reference.py +40 -40
  201. dtlpy/services/async_utils.py +133 -133
  202. dtlpy/services/calls_counter.py +44 -44
  203. dtlpy/services/check_sdk.py +68 -68
  204. dtlpy/services/cookie.py +115 -115
  205. dtlpy/services/create_logger.py +156 -156
  206. dtlpy/services/events.py +84 -84
  207. dtlpy/services/logins.py +235 -235
  208. dtlpy/services/reporter.py +256 -256
  209. dtlpy/services/service_defaults.py +91 -91
  210. dtlpy/utilities/__init__.py +20 -20
  211. dtlpy/utilities/annotations/__init__.py +16 -16
  212. dtlpy/utilities/annotations/annotation_converters.py +269 -269
  213. dtlpy/utilities/base_package_runner.py +264 -264
  214. dtlpy/utilities/converter.py +1650 -1650
  215. dtlpy/utilities/dataset_generators/__init__.py +1 -1
  216. dtlpy/utilities/dataset_generators/dataset_generator.py +670 -670
  217. dtlpy/utilities/dataset_generators/dataset_generator_tensorflow.py +23 -23
  218. dtlpy/utilities/dataset_generators/dataset_generator_torch.py +21 -21
  219. dtlpy/utilities/local_development/__init__.py +1 -1
  220. dtlpy/utilities/local_development/local_session.py +179 -179
  221. dtlpy/utilities/reports/__init__.py +2 -2
  222. dtlpy/utilities/reports/figures.py +343 -343
  223. dtlpy/utilities/reports/report.py +71 -71
  224. dtlpy/utilities/videos/__init__.py +17 -17
  225. dtlpy/utilities/videos/video_player.py +598 -598
  226. dtlpy/utilities/videos/videos.py +470 -470
  227. {dtlpy-1.113.10.data → dtlpy-1.114.13.data}/scripts/dlp +1 -1
  228. dtlpy-1.114.13.data/scripts/dlp.bat +2 -0
  229. {dtlpy-1.113.10.data → dtlpy-1.114.13.data}/scripts/dlp.py +128 -128
  230. {dtlpy-1.113.10.dist-info → dtlpy-1.114.13.dist-info}/LICENSE +200 -200
  231. {dtlpy-1.113.10.dist-info → dtlpy-1.114.13.dist-info}/METADATA +172 -172
  232. dtlpy-1.114.13.dist-info/RECORD +240 -0
  233. {dtlpy-1.113.10.dist-info → dtlpy-1.114.13.dist-info}/WHEEL +1 -1
  234. tests/features/environment.py +551 -550
  235. dtlpy-1.113.10.data/scripts/dlp.bat +0 -2
  236. dtlpy-1.113.10.dist-info/RECORD +0 -244
  237. tests/assets/__init__.py +0 -0
  238. tests/assets/models_flow/__init__.py +0 -0
  239. tests/assets/models_flow/failedmain.py +0 -52
  240. tests/assets/models_flow/main.py +0 -62
  241. tests/assets/models_flow/main_model.py +0 -54
  242. {dtlpy-1.113.10.dist-info → dtlpy-1.114.13.dist-info}/entry_points.txt +0 -0
  243. {dtlpy-1.113.10.dist-info → dtlpy-1.114.13.dist-info}/top_level.txt +0 -0
dtlpy/entities/service.py CHANGED
@@ -1,958 +1,958 @@
1
- import warnings
2
- from collections import namedtuple
3
- from enum import Enum
4
- import traceback
5
- import logging
6
- from typing import List
7
- from urllib.parse import urlsplit
8
- import attr
9
- from .. import repositories, entities
10
- from ..services.api_client import ApiClient
11
-
12
- logger = logging.getLogger(name='dtlpy')
13
-
14
-
15
- class ServiceType(str, Enum):
16
- """ The type of the service (SYSTEM).
17
-
18
- .. list-table::
19
- :widths: 15 150
20
- :header-rows: 1
21
-
22
- * - State
23
- - Description
24
- * - SYSTEM
25
- - Dataloop internal service
26
- """
27
- SYSTEM = 'system'
28
- REGULAR = 'regular'
29
-
30
-
31
- class ServiceModeType(str, Enum):
32
- """ The type of the service mode.
33
-
34
- .. list-table::
35
- :widths: 15 150
36
- :header-rows: 1
37
-
38
- * - State
39
- - Description
40
- * - REGULAR
41
- - Service regular mode type
42
- * - DEBUG
43
- - Service debug mode type
44
- """
45
- REGULAR = 'regular'
46
- DEBUG = 'debug'
47
-
48
-
49
- class OnResetAction(str, Enum):
50
- """ The Execution action when the service reset (RERUN, FAILED).
51
-
52
- .. list-table::
53
- :widths: 15 150
54
- :header-rows: 1
55
-
56
- * - State
57
- - Description
58
- * - RERUN
59
- - When the service resting rerun the execution
60
- * - FAILED
61
- - When the service resting fail the execution
62
- """
63
- RERUN = 'rerun'
64
- FAILED = 'failed'
65
-
66
-
67
- class InstanceCatalog(str, Enum):
68
- """ The Service Pode size.
69
-
70
- .. list-table::
71
- :widths: 15 150
72
- :header-rows: 1
73
-
74
- * - State
75
- - Description
76
- * - REGULAR_XS
77
- - regular pod with extra small size
78
- * - REGULAR_S
79
- - regular pod with small size
80
- * - REGULAR_M
81
- - regular pod with medium size
82
- * - REGULAR_L
83
- - regular pod with large size
84
- * - HIGHMEM_XS
85
- - highmem pod with extra small size
86
- * - HIGHMEM_S
87
- - highmem pod with small size
88
- * - HIGHMEM_M
89
- - highmem pod with medium size
90
- * - HIGHMEM_L
91
- - highmem pod with large size
92
- * - GPU_T4_S
93
- - GPU NVIDIA T4 pod with regular memory
94
- * - GPU_T4_M
95
- - GPU NVIDIA T4 pod with highmem
96
- """
97
- REGULAR_XS = "regular-xs"
98
- REGULAR_S = "regular-s"
99
- REGULAR_M = "regular-m"
100
- REGULAR_L = "regular-l"
101
- HIGHMEM_XS = "highmem-xs"
102
- HIGHMEM_S = "highmem-s"
103
- HIGHMEM_M = "highmem-m"
104
- HIGHMEM_L = "highmem-l"
105
- GPU_T4_S = "gpu-t4"
106
- GPU_T4_M = "gpu-t4-m"
107
-
108
-
109
- class RuntimeType(str, Enum):
110
- """ Service culture Runtime (KUBERNETES).
111
-
112
- .. list-table::
113
- :widths: 15 150
114
- :header-rows: 1
115
-
116
- * - State
117
- - Description
118
- * - KUBERNETES
119
- - Service run in kubernetes culture
120
- """
121
- KUBERNETES = 'kubernetes'
122
-
123
-
124
- class ServiceRuntime(entities.BaseEntity):
125
- def __init__(self, service_type: RuntimeType = RuntimeType.KUBERNETES):
126
- self.service_type = service_type
127
-
128
-
129
- class KubernetesRuntime(ServiceRuntime):
130
- DEFAULT_POD_TYPE = InstanceCatalog.REGULAR_S
131
- DEFAULT_NUM_REPLICAS = 1
132
- DEFAULT_CONCURRENCY = 10
133
-
134
- def __init__(self,
135
- pod_type: InstanceCatalog = DEFAULT_POD_TYPE,
136
- num_replicas=DEFAULT_NUM_REPLICAS,
137
- concurrency=DEFAULT_CONCURRENCY,
138
- runner_image=None,
139
- autoscaler=None,
140
- **kwargs):
141
-
142
- super().__init__(service_type=RuntimeType.KUBERNETES)
143
- self.pod_type = kwargs.get('podType', pod_type)
144
- self.num_replicas = kwargs.get('numReplicas', num_replicas)
145
- self.concurrency = kwargs.get('concurrency', concurrency)
146
- self.runner_image = kwargs.get('runnerImage', runner_image)
147
- self._proxy_image = kwargs.get('proxyImage', None)
148
- self.single_agent = kwargs.get('singleAgent', None)
149
- self.preemptible = kwargs.get('preemptible', None)
150
-
151
- self.autoscaler = kwargs.get('autoscaler', autoscaler)
152
- if self.autoscaler is not None and isinstance(self.autoscaler, dict):
153
- if self.autoscaler['type'] == KubernetesAutoscalerType.RABBITMQ:
154
- self.autoscaler = KubernetesRabbitmqAutoscaler(**self.autoscaler)
155
- elif self.autoscaler['type'] == KubernetesAutoscalerType.RPS:
156
- self.autoscaler = KubernetesRPSAutoscaler(**self.autoscaler)
157
- else:
158
- raise NotImplementedError(
159
- 'Unknown kubernetes autoscaler type: {}'.format(self.autoscaler['type']))
160
-
161
- def to_json(self):
162
- _json = {
163
- 'podType': self.pod_type,
164
- 'numReplicas': self.num_replicas,
165
- 'concurrency': self.concurrency,
166
- 'autoscaler': None if self.autoscaler is None else self.autoscaler.to_json()
167
- }
168
-
169
- if self.single_agent is not None:
170
- _json['singleAgent'] = self.single_agent
171
-
172
- if self.runner_image is not None:
173
- _json['runnerImage'] = self.runner_image
174
-
175
- if self._proxy_image is not None:
176
- _json['proxyImage'] = self._proxy_image
177
-
178
- if self.preemptible is not None:
179
- _json['preemptible'] = self.preemptible
180
-
181
- return _json
182
-
183
-
184
- @attr.s
185
- class Service(entities.BaseEntity):
186
- """
187
- Service object
188
- """
189
- # platform
190
- created_at = attr.ib()
191
- updated_at = attr.ib(repr=False)
192
- creator = attr.ib()
193
- version = attr.ib()
194
-
195
- package_id = attr.ib()
196
- package_revision = attr.ib()
197
-
198
- bot = attr.ib()
199
- use_user_jwt = attr.ib(repr=False)
200
- init_input = attr.ib()
201
- versions = attr.ib(repr=False)
202
- module_name = attr.ib()
203
- name = attr.ib()
204
- url = attr.ib()
205
- id = attr.ib()
206
- active = attr.ib()
207
- driver_id = attr.ib(repr=False)
208
- secrets = attr.ib(repr=False)
209
-
210
- # name change
211
- runtime = attr.ib(repr=False, type=KubernetesRuntime)
212
- queue_length_limit = attr.ib()
213
- run_execution_as_process = attr.ib(type=bool)
214
- execution_timeout = attr.ib()
215
- drain_time = attr.ib()
216
- on_reset = attr.ib(type=OnResetAction)
217
- _type = attr.ib(type=ServiceType)
218
- project_id = attr.ib()
219
- org_id = attr.ib()
220
- is_global = attr.ib()
221
- max_attempts = attr.ib()
222
- mode = attr.ib(repr=False)
223
- metadata = attr.ib()
224
- archive = attr.ib(repr=False)
225
- config = attr.ib(repr=False)
226
- settings = attr.ib(repr=False)
227
- panels = attr.ib(repr=False)
228
-
229
- # SDK
230
- _package = attr.ib(repr=False)
231
- _client_api = attr.ib(type=ApiClient, repr=False)
232
- _revisions = attr.ib(default=None, repr=False)
233
- # repositories
234
- _project = attr.ib(default=None, repr=False)
235
- _repositories = attr.ib(repr=False)
236
- updated_by = attr.ib(default=None)
237
- app = attr.ib(default=None)
238
- integrations = attr.ib(default=None)
239
-
240
- @property
241
- def createdAt(self):
242
- return self.created_at
243
-
244
- @property
245
- def updatedAt(self):
246
- return self.updated_at
247
-
248
- @staticmethod
249
- def _protected_from_json(_json: dict, client_api: ApiClient, package=None, project=None, is_fetched=True):
250
- """
251
- Same as from_json but with try-except to catch if error
252
-
253
- :param _json: platform json
254
- :param client_api: ApiClient entity
255
- :param package:
256
- :param project: project entity
257
- :param is_fetched: is Entity fetched from Platform
258
- :return:
259
- """
260
- try:
261
- service = Service.from_json(_json=_json,
262
- client_api=client_api,
263
- package=package,
264
- project=project,
265
- is_fetched=is_fetched)
266
- status = True
267
- except Exception:
268
- service = traceback.format_exc()
269
- status = False
270
- return status, service
271
-
272
- @classmethod
273
- def from_json(cls, _json: dict, client_api: ApiClient = None, package=None, project=None, is_fetched=True):
274
- """
275
- Build a service entity object from a json
276
-
277
- :param dict _json: platform json
278
- :param dl.ApiClient client_api: ApiClient entity
279
- :param dtlpy.entities.package.Package package: package entity
280
- :param dtlpy.entities.project.Project project: project entity
281
- :param bool is_fetched: is Entity fetched from Platform
282
- :return: service object
283
- :rtype: dtlpy.entities.service.Service
284
- """
285
- if project is not None:
286
- if project.id != _json.get('projectId', None):
287
- logger.warning('Service has been fetched from a project that is not belong to it')
288
- project = None
289
-
290
- if package is not None:
291
- if package.id != _json.get('packageId', None):
292
- logger.warning('Service has been fetched from a package that is not belong to it')
293
- package = None
294
-
295
- versions = _json.get('versions', dict())
296
- runtime = _json.get("runtime", None)
297
- if runtime:
298
- runtime = KubernetesRuntime(**runtime)
299
-
300
- inst = cls(
301
- package_revision=_json.get("packageRevision", None),
302
- bot=_json.get("botUserName", None),
303
- use_user_jwt=_json.get("useUserJwt", False),
304
- created_at=_json.get("createdAt", None),
305
- updated_at=_json.get("updatedAt", None),
306
- project_id=_json.get('projectId', None),
307
- package_id=_json.get('packageId', None),
308
- driver_id=_json.get('driverId', None),
309
- max_attempts=_json.get('maxAttempts', None),
310
- version=_json.get('version', None),
311
- creator=_json.get('creator', None),
312
- revisions=_json.get('revisions', None),
313
- queue_length_limit=_json.get('queueLengthLimit', None),
314
- active=_json.get('active', None),
315
- runtime=runtime,
316
- is_global=_json.get("global", False),
317
- init_input=_json.get("initParams", dict()),
318
- module_name=_json.get("moduleName", None),
319
- run_execution_as_process=_json.get('runExecutionAsProcess', False),
320
- execution_timeout=_json.get('executionTimeout', 60 * 60),
321
- drain_time=_json.get('drainTime', 60 * 10),
322
- on_reset=_json.get('onReset', OnResetAction.FAILED),
323
- name=_json.get("name", None),
324
- url=_json.get("url", None),
325
- id=_json.get("id", None),
326
- versions=versions,
327
- client_api=client_api,
328
- package=package,
329
- project=project,
330
- secrets=_json.get("secrets", None),
331
- type=_json.get("type", None),
332
- mode=_json.get('mode', dict()),
333
- metadata=_json.get('metadata', None),
334
- archive=_json.get('archive', None),
335
- updated_by=_json.get('updatedBy', None),
336
- config=_json.get('config', None),
337
- settings=_json.get('settings', None),
338
- app=_json.get('app', None),
339
- integrations=_json.get('integrations', None),
340
- org_id=_json.get('orgId', None),
341
- panels=_json.get('panels', None)
342
- )
343
- inst.is_fetched = is_fetched
344
- return inst
345
-
346
- ############
347
- # Entities #
348
- ############
349
- @property
350
- def revisions(self):
351
- if self._revisions is None:
352
- self._revisions = self.services.revisions(service=self)
353
- return self._revisions
354
-
355
- @property
356
- def platform_url(self):
357
- return self._client_api._get_resource_url("projects/{}/services/{}/main".format(self.project.id, self.id))
358
-
359
- @property
360
- def project(self):
361
- if self._project is None:
362
- self._project = repositories.Projects(client_api=self._client_api).get(project_id=self.project_id,
363
- fetch=None)
364
- assert isinstance(self._project, entities.Project)
365
- return self._project
366
-
367
- @property
368
- def package(self):
369
- if self._package is None:
370
- try:
371
- dpk_id = None
372
- dpk_version = None
373
- if self.app and isinstance(self.app, dict):
374
- dpk_id = self.app.get('dpkId', None)
375
- dpk_version = self.app.get('dpkVersion', None)
376
- if dpk_id is None:
377
- self._package = repositories.Dpks(client_api=self._client_api, project=self.project).get(
378
- dpk_id=self.package_id)
379
- else:
380
- self._package = repositories.Dpks(client_api=self._client_api, project=self.project).get_revisions(
381
- dpk_id=dpk_id,
382
- version=dpk_version)
383
-
384
- assert isinstance(self._package, entities.Dpk)
385
- except:
386
- self._package = repositories.Packages(client_api=self._client_api).get(package_id=self.package_id,
387
- fetch=None,
388
- log_error=False)
389
- assert isinstance(self._package, entities.Package)
390
- return self._package
391
-
392
- @property
393
- def execution_url(self):
394
- return 'CURL -X POST' \
395
- '\nauthorization: Bearer <token>' \
396
- '\nContent-Type: application/json" -d {' \
397
- '\n"input": {<input json>}, ' \
398
- '"projectId": "{<project_id>}", ' \
399
- '"functionName": "<function_name>"}'
400
-
401
- ################
402
- # repositories #
403
- ################
404
- @_repositories.default
405
- def set_repositories(self):
406
- reps = namedtuple('repositories',
407
- field_names=['executions', 'services', 'triggers'])
408
-
409
- if self._package is None:
410
- services_repo = repositories.Services(client_api=self._client_api,
411
- package=self._package,
412
- project=self._project)
413
- else:
414
- services_repo = self._package.services
415
-
416
- triggers = repositories.Triggers(client_api=self._client_api,
417
- project=self._project,
418
- service=self)
419
-
420
- r = reps(executions=repositories.Executions(client_api=self._client_api, service=self),
421
- services=services_repo, triggers=triggers)
422
- return r
423
-
424
- @property
425
- def executions(self):
426
- assert isinstance(self._repositories.executions, repositories.Executions)
427
- return self._repositories.executions
428
-
429
- @property
430
- def triggers(self):
431
- assert isinstance(self._repositories.triggers, repositories.Triggers)
432
- return self._repositories.triggers
433
-
434
- @property
435
- def services(self):
436
- assert isinstance(self._repositories.services, repositories.Services)
437
- return self._repositories.services
438
-
439
- ###########
440
- # methods #
441
- ###########
442
- def to_json(self):
443
- """
444
- Returns platform _json format of object
445
-
446
- :return: platform json format of object
447
- :rtype: dict
448
- """
449
- _json = attr.asdict(
450
- self,
451
- filter=attr.filters.exclude(
452
- attr.fields(Service)._project,
453
- attr.fields(Service)._package,
454
- attr.fields(Service)._revisions,
455
- attr.fields(Service)._client_api,
456
- attr.fields(Service)._repositories,
457
- attr.fields(Service).project_id,
458
- attr.fields(Service).init_input,
459
- attr.fields(Service).module_name,
460
- attr.fields(Service).bot,
461
- attr.fields(Service).package_id,
462
- attr.fields(Service).is_global,
463
- attr.fields(Service).use_user_jwt,
464
- attr.fields(Service).package_revision,
465
- attr.fields(Service).driver_id,
466
- attr.fields(Service).run_execution_as_process,
467
- attr.fields(Service).execution_timeout,
468
- attr.fields(Service).drain_time,
469
- attr.fields(Service).runtime,
470
- attr.fields(Service).queue_length_limit,
471
- attr.fields(Service).max_attempts,
472
- attr.fields(Service).on_reset,
473
- attr.fields(Service).created_at,
474
- attr.fields(Service).updated_at,
475
- attr.fields(Service).secrets,
476
- attr.fields(Service)._type,
477
- attr.fields(Service).mode,
478
- attr.fields(Service).metadata,
479
- attr.fields(Service).archive,
480
- attr.fields(Service).updated_by,
481
- attr.fields(Service).config,
482
- attr.fields(Service).settings,
483
- attr.fields(Service).app,
484
- attr.fields(Service).integrations,
485
- attr.fields(Service).org_id,
486
- attr.fields(Service).panels
487
- )
488
- )
489
-
490
- _json['projectId'] = self.project_id
491
- _json['orgId'] = self.org_id
492
- _json['packageId'] = self.package_id
493
- _json['initParams'] = self.init_input
494
- _json['moduleName'] = self.module_name
495
- _json['botUserName'] = self.bot
496
- _json['useUserJwt'] = self.use_user_jwt
497
- _json['global'] = self.is_global
498
- _json['driverId'] = self.driver_id
499
- _json['packageRevision'] = self.package_revision
500
- _json['runExecutionAsProcess'] = self.run_execution_as_process
501
- _json['executionTimeout'] = self.execution_timeout
502
- _json['drainTime'] = self.drain_time
503
- _json['onReset'] = self.on_reset
504
- _json['createdAt'] = self.created_at
505
- _json['updatedAt'] = self.updated_at
506
-
507
- if self.updated_by is not None:
508
- _json['updatedBy'] = self.updated_by
509
-
510
- if self.panels is not None:
511
- _json['panels'] = self.panels
512
-
513
- if self.max_attempts is not None:
514
- _json['maxAttempts'] = self.max_attempts
515
-
516
- if self.is_global is not None:
517
- _json['global'] = self.is_global
518
-
519
- if self.runtime:
520
- _json['runtime'] = self.runtime if isinstance(self.runtime, dict) else self.runtime.to_json()
521
-
522
- if self.queue_length_limit is not None:
523
- _json['queueLengthLimit'] = self.queue_length_limit
524
-
525
- if self.secrets is not None:
526
- _json['secrets'] = self.secrets
527
-
528
- if self._type is not None:
529
- _json['type'] = self._type
530
-
531
- if self.mode:
532
- _json['mode'] = self.mode
533
-
534
- if self.metadata:
535
- _json['metadata'] = self.metadata
536
-
537
- if self.archive is not None:
538
- _json['archive'] = self.archive
539
-
540
- if self.config is not None:
541
- _json['config'] = self.config
542
-
543
- if self.settings is not None:
544
- _json['settings'] = self.settings
545
-
546
- if self.app is not None:
547
- _json['app'] = self.app
548
-
549
- if self.integrations is not None:
550
- _json['integrations'] = self.integrations
551
-
552
- return _json
553
-
554
- def update(self, force=False):
555
- """
556
- Update Service changes to platform
557
-
558
- :param bool force: force update
559
- :return: Service entity
560
- :rtype: dtlpy.entities.service.Service
561
- """
562
- return self.services.update(service=self, force=force)
563
-
564
- def delete(self, force: bool = False):
565
- """
566
- Delete Service object
567
-
568
- :return: True
569
- :rtype: bool
570
- """
571
- return self.services.delete(service_id=self.id, force=force)
572
-
573
- def status(self):
574
- """
575
- Get Service status
576
-
577
- :return: status json
578
- :rtype: dict
579
- """
580
- return self.services.status(service_id=self.id)
581
-
582
- def log(self,
583
- size=None,
584
- checkpoint=None,
585
- start=None,
586
- end=None,
587
- follow=False,
588
- text=None,
589
- execution_id=None,
590
- function_name=None,
591
- replica_id=None,
592
- system=False,
593
- view=True,
594
- until_completed=True,
595
- model_id: str = None,
596
- model_operation: str = None,
597
- ):
598
- """
599
- Get service logs
600
-
601
- :param int size: size
602
- :param dict checkpoint: the information from the lst point checked in the service
603
- :param str start: iso format time
604
- :param str end: iso format time
605
- :param bool follow: if true, keep stream future logs
606
- :param str text: text
607
- :param str execution_id: execution id
608
- :param str function_name: function name
609
- :param str replica_id: replica id
610
- :param bool system: system
611
- :param bool view: if true, print out all the logs
612
- :param bool until_completed: wait until completed
613
- :param str model_id: model id
614
- :param str model_operation: model operation action
615
- :return: ServiceLog entity
616
- :rtype: ServiceLog
617
-
618
- **Example**:
619
-
620
- .. code-block:: python
621
-
622
- service_log = service.log()
623
- """
624
- return self.services.log(service=self,
625
- size=size,
626
- checkpoint=checkpoint,
627
- start=start,
628
- end=end,
629
- follow=follow,
630
- execution_id=execution_id,
631
- function_name=function_name,
632
- replica_id=replica_id,
633
- system=system,
634
- text=text,
635
- view=view,
636
- until_completed=until_completed,
637
- model_id=model_id,
638
- model_operation=model_operation)
639
-
640
- def open_in_web(self):
641
- """
642
- Open the service in web platform
643
-
644
- :return:
645
- """
646
- parsed_url = urlsplit(self.platform_url)
647
- base_url = parsed_url.scheme + "://" + parsed_url.netloc
648
- url = '{}/projects/{}/services/{}'.format(base_url, self.project_id, self.id)
649
- self._client_api._open_in_web(url=url)
650
-
651
- def checkout(self):
652
- """
653
- Checkout
654
-
655
- :return:
656
- """
657
- return self.services.checkout(service=self)
658
-
659
- def pause(self):
660
- """
661
- pause
662
-
663
- :return:
664
- """
665
- return self.services.pause(service_id=self.id)
666
-
667
- def resume(self):
668
- """
669
- resume
670
-
671
- :return:
672
- """
673
- return self.services.resume(service_id=self.id)
674
-
675
- def execute(
676
- self,
677
- execution_input=None,
678
- function_name=None,
679
- resource=None,
680
- item_id=None,
681
- dataset_id=None,
682
- annotation_id=None,
683
- project_id=None,
684
- sync=False,
685
- stream_logs=True,
686
- return_output=True
687
- ):
688
- """
689
- Execute a function on an existing service
690
-
691
- :param List[FunctionIO] or dict execution_input: input dictionary or list of FunctionIO entities
692
- :param str function_name: function name to run
693
- :param str resource: input type.
694
- :param str item_id: optional - item id as input to function
695
- :param str dataset_id: optional - dataset id as input to function
696
- :param str annotation_id: optional - annotation id as input to function
697
- :param str project_id: resource's project
698
- :param bool sync: if true, wait for function to end
699
- :param bool stream_logs: prints logs of the new execution. only works with sync=True
700
- :param bool return_output: if True and sync is True - will return the output directly
701
- :return: execution object
702
- :rtype: dtlpy.entities.execution.Execution
703
-
704
- **Example**:
705
-
706
- .. code-block:: python
707
-
708
- execution = service.execute(function_name='function_name', item_id='item_id', project_id='project_id')
709
- """
710
- execution = self.executions.create(sync=sync,
711
- execution_input=execution_input,
712
- function_name=function_name,
713
- resource=resource,
714
- item_id=item_id,
715
- dataset_id=dataset_id,
716
- annotation_id=annotation_id,
717
- stream_logs=stream_logs,
718
- project_id=project_id,
719
- return_output=return_output)
720
- return execution
721
-
722
- def execute_batch(self,
723
- filters,
724
- function_name: str = None,
725
- execution_inputs: list = None,
726
- wait=True
727
- ):
728
- """
729
- Execute a function on an existing service
730
-
731
- **Prerequisites**: You must be in the role of an *owner* or *developer*. You must have a service.
732
-
733
- :param filters: Filters entity for a filtering before execute
734
- :param str function_name: function name to run
735
- :param List[FunctionIO] or dict execution_inputs: input dictionary or list of FunctionIO entities, that represent the extra inputs of the function
736
- :param bool wait: wait until create task finish
737
- :return: execution object
738
- :rtype: dtlpy.entities.execution.Execution
739
-
740
- **Example**:
741
-
742
- .. code-block:: python
743
-
744
- command = service.execute_batch(
745
- execution_inputs=dl.FunctionIO(type=dl.PackageInputType.STRING, value='test', name='string'),
746
- filters=dl.Filters(field='dir', values='/test', context={"datasets": [dataset.id]),
747
- function_name='run')
748
- """
749
- execution = self.executions.create_batch(service_id=self.id,
750
- execution_inputs=execution_inputs,
751
- filters=filters,
752
- function_name=function_name,
753
- wait=wait)
754
- return execution
755
-
756
- def rerun_batch(self,
757
- filters,
758
- wait=True
759
- ):
760
- """
761
- rerun a executions on an existing service
762
-
763
- **Prerequisites**: You must be in the role of an *owner* or *developer*. You must have a Filter.
764
-
765
- :param filters: Filters entity for a filtering before rerun
766
- :param bool wait: wait until create task finish
767
- :return: rerun command
768
- :rtype: dtlpy.entities.command.Command
769
-
770
- **Example**:
771
-
772
- .. code-block:: python
773
-
774
- command = service.executions.rerun_batch(
775
- filters=dl.Filters(field='id', values=['executionId'], operator=dl.FiltersOperations.IN, resource=dl.FiltersResource.EXECUTION))
776
- """
777
- execution = self.executions.rerun_batch(service_id=self.id,
778
- filters=filters,
779
- wait=wait)
780
- return execution
781
-
782
- def activate_slots(
783
- self,
784
- project_id: str = None,
785
- task_id: str = None,
786
- dataset_id: str = None,
787
- org_id: str = None,
788
- user_email: str = None,
789
- slots=None,
790
- role=None,
791
- prevent_override: bool = True,
792
- visible: bool = True,
793
- icon: str = 'fas fa-magic',
794
- **kwargs
795
- ) -> object:
796
- """
797
- Activate service slots
798
-
799
- :param str project_id: project id
800
- :param str task_id: task id
801
- :param str dataset_id: dataset id
802
- :param str org_id: org id
803
- :param str user_email: user email
804
- :param list slots: list of entities.PackageSlot
805
- :param str role: user role MemberOrgRole.ADMIN, MemberOrgRole.owner, MemberOrgRole.MEMBER, MemberOrgRole.WORKER
806
- :param bool prevent_override: True to prevent override
807
- :param bool visible: visible
808
- :param str icon: icon
809
- :param kwargs: all additional arguments
810
- :return: list of user setting for activated slots
811
- :rtype: list
812
-
813
- **Example**:
814
-
815
- .. code-block:: python
816
-
817
- setting = service.activate_slots(project_id='project_id',
818
- slots=List[entities.PackageSlot],
819
- icon='fas fa-magic')
820
- """
821
- return self.services.activate_slots(
822
- service=self,
823
- project_id=project_id,
824
- task_id=task_id,
825
- dataset_id=dataset_id,
826
- org_id=org_id,
827
- user_email=user_email,
828
- slots=slots,
829
- role=role,
830
- prevent_override=prevent_override,
831
- visible=visible,
832
- icon=icon,
833
- **kwargs
834
- )
835
-
836
- def restart(self, replica_name: str = None):
837
- """
838
- Restart service
839
-
840
- :param str replica_name: replica name
841
- :return: True
842
- :rtype: bool
843
- """
844
- return self.services.restart(service=self, replica_name=replica_name)
845
-
846
-
847
- class KubernetesAutoscalerType(str, Enum):
848
- """ The Service Autoscaler Type (RABBITMQ, CPU).
849
-
850
- .. list-table::
851
- :widths: 15 150
852
- :header-rows: 1
853
-
854
- * - State
855
- - Description
856
- * - RABBITMQ
857
- - Service Autoscaler based on service queue length
858
- * - CPU
859
- - Service Autoscaler based on service CPU usage
860
- * - RPS
861
- - Service Autoscaler based on service RPS
862
- """
863
- RABBITMQ = 'rabbitmq'
864
- CPU = 'cpu'
865
- RPS = 'rps'
866
-
867
-
868
- # added this class to avoid breaking changes after fixing a spelling mistake in KubernetesAutoscalerType
869
- class KubernetesAutuscalerTypeMeta(type):
870
- def __getattribute__(cls, item):
871
- if hasattr(KubernetesAutoscalerType, item):
872
- return getattr(KubernetesAutoscalerType, item)
873
- else:
874
- raise AttributeError(f"KubernetesAutuscalerType has no attribute '{item}'")
875
-
876
-
877
- class KubernetesAutoscaler(entities.BaseEntity):
878
- MIN_REPLICA_DEFAULT = 0
879
- MAX_REPLICA_DEFAULT = 1
880
- AUTOSCALER_TYPE_DEFAULT = KubernetesAutoscalerType.RABBITMQ
881
-
882
- def __init__(self,
883
- autoscaler_type: KubernetesAutoscalerType.RABBITMQ = AUTOSCALER_TYPE_DEFAULT,
884
- min_replicas=MIN_REPLICA_DEFAULT,
885
- max_replicas=MAX_REPLICA_DEFAULT,
886
- cooldown_period=None,
887
- polling_interval=None,
888
- **kwargs):
889
- self.autoscaler_type = kwargs.get('type', autoscaler_type)
890
- self.min_replicas = kwargs.get('minReplicas', min_replicas)
891
- self.max_replicas = kwargs.get('maxReplicas', max_replicas)
892
- self.cooldown_period = kwargs.get('cooldownPeriod', cooldown_period)
893
- self.polling_interval = kwargs.get('pollingInterval', polling_interval)
894
-
895
- def to_json(self):
896
- _json = {
897
- 'type': self.autoscaler_type,
898
- 'minReplicas': self.min_replicas,
899
- 'maxReplicas': self.max_replicas
900
- }
901
-
902
- if self.cooldown_period is not None:
903
- _json['cooldownPeriod'] = self.cooldown_period
904
-
905
- if self.polling_interval is not None:
906
- _json['pollingInterval'] = self.polling_interval
907
-
908
- return _json
909
-
910
-
911
- class KubernetesRabbitmqAutoscaler(KubernetesAutoscaler):
912
- QUEUE_LENGTH_DEFAULT = 1000
913
-
914
- def __init__(self,
915
- min_replicas=KubernetesAutoscaler.MIN_REPLICA_DEFAULT,
916
- max_replicas=KubernetesAutoscaler.MAX_REPLICA_DEFAULT,
917
- queue_length=QUEUE_LENGTH_DEFAULT,
918
- cooldown_period=None,
919
- polling_interval=None,
920
- **kwargs):
921
- super().__init__(min_replicas=min_replicas,
922
- max_replicas=max_replicas,
923
- autoscaler_type=KubernetesAutoscalerType.RABBITMQ,
924
- cooldown_period=cooldown_period,
925
- polling_interval=polling_interval, **kwargs)
926
- self.queue_length = kwargs.get('queueLength', queue_length)
927
-
928
- def to_json(self):
929
- _json = super().to_json()
930
- _json['queueLength'] = self.queue_length
931
- return _json
932
-
933
-
934
- class KubernetesRPSAutoscaler(KubernetesAutoscaler):
935
- THRESHOLD_DEFAULT = 10
936
- RATE_SECONDS_DEFAULT = 30
937
-
938
- def __init__(self,
939
- min_replicas=KubernetesAutoscaler.MIN_REPLICA_DEFAULT,
940
- max_replicas=KubernetesAutoscaler.MAX_REPLICA_DEFAULT,
941
- threshold=THRESHOLD_DEFAULT,
942
- rate_seconds=RATE_SECONDS_DEFAULT,
943
- cooldown_period=None,
944
- polling_interval=None,
945
- **kwargs):
946
- super().__init__(min_replicas=min_replicas,
947
- max_replicas=max_replicas,
948
- autoscaler_type=KubernetesAutoscalerType.RPS,
949
- cooldown_period=cooldown_period,
950
- polling_interval=polling_interval, **kwargs)
951
- self.threshold = kwargs.get('threshold', threshold)
952
- self.rate_seconds = kwargs.get('rateSeconds', rate_seconds)
953
-
954
- def to_json(self):
955
- _json = super().to_json()
956
- _json['rateSeconds'] = self.rate_seconds
957
- _json['threshold'] = self.threshold
958
- return _json
1
+ import warnings
2
+ from collections import namedtuple
3
+ from enum import Enum
4
+ import traceback
5
+ import logging
6
+ from typing import List
7
+ from urllib.parse import urlsplit
8
+ import attr
9
+ from .. import repositories, entities
10
+ from ..services.api_client import ApiClient
11
+
12
+ logger = logging.getLogger(name='dtlpy')
13
+
14
+
15
+ class ServiceType(str, Enum):
16
+ """ The type of the service (SYSTEM).
17
+
18
+ .. list-table::
19
+ :widths: 15 150
20
+ :header-rows: 1
21
+
22
+ * - State
23
+ - Description
24
+ * - SYSTEM
25
+ - Dataloop internal service
26
+ """
27
+ SYSTEM = 'system'
28
+ REGULAR = 'regular'
29
+
30
+
31
+ class ServiceModeType(str, Enum):
32
+ """ The type of the service mode.
33
+
34
+ .. list-table::
35
+ :widths: 15 150
36
+ :header-rows: 1
37
+
38
+ * - State
39
+ - Description
40
+ * - REGULAR
41
+ - Service regular mode type
42
+ * - DEBUG
43
+ - Service debug mode type
44
+ """
45
+ REGULAR = 'regular'
46
+ DEBUG = 'debug'
47
+
48
+
49
+ class OnResetAction(str, Enum):
50
+ """ The Execution action when the service reset (RERUN, FAILED).
51
+
52
+ .. list-table::
53
+ :widths: 15 150
54
+ :header-rows: 1
55
+
56
+ * - State
57
+ - Description
58
+ * - RERUN
59
+ - When the service resting rerun the execution
60
+ * - FAILED
61
+ - When the service resting fail the execution
62
+ """
63
+ RERUN = 'rerun'
64
+ FAILED = 'failed'
65
+
66
+
67
+ class InstanceCatalog(str, Enum):
68
+ """ The Service Pode size.
69
+
70
+ .. list-table::
71
+ :widths: 15 150
72
+ :header-rows: 1
73
+
74
+ * - State
75
+ - Description
76
+ * - REGULAR_XS
77
+ - regular pod with extra small size
78
+ * - REGULAR_S
79
+ - regular pod with small size
80
+ * - REGULAR_M
81
+ - regular pod with medium size
82
+ * - REGULAR_L
83
+ - regular pod with large size
84
+ * - HIGHMEM_XS
85
+ - highmem pod with extra small size
86
+ * - HIGHMEM_S
87
+ - highmem pod with small size
88
+ * - HIGHMEM_M
89
+ - highmem pod with medium size
90
+ * - HIGHMEM_L
91
+ - highmem pod with large size
92
+ * - GPU_T4_S
93
+ - GPU NVIDIA T4 pod with regular memory
94
+ * - GPU_T4_M
95
+ - GPU NVIDIA T4 pod with highmem
96
+ """
97
+ REGULAR_XS = "regular-xs"
98
+ REGULAR_S = "regular-s"
99
+ REGULAR_M = "regular-m"
100
+ REGULAR_L = "regular-l"
101
+ HIGHMEM_XS = "highmem-xs"
102
+ HIGHMEM_S = "highmem-s"
103
+ HIGHMEM_M = "highmem-m"
104
+ HIGHMEM_L = "highmem-l"
105
+ GPU_T4_S = "gpu-t4"
106
+ GPU_T4_M = "gpu-t4-m"
107
+
108
+
109
+ class RuntimeType(str, Enum):
110
+ """ Service culture Runtime (KUBERNETES).
111
+
112
+ .. list-table::
113
+ :widths: 15 150
114
+ :header-rows: 1
115
+
116
+ * - State
117
+ - Description
118
+ * - KUBERNETES
119
+ - Service run in kubernetes culture
120
+ """
121
+ KUBERNETES = 'kubernetes'
122
+
123
+
124
+ class ServiceRuntime(entities.BaseEntity):
125
+ def __init__(self, service_type: RuntimeType = RuntimeType.KUBERNETES):
126
+ self.service_type = service_type
127
+
128
+
129
+ class KubernetesRuntime(ServiceRuntime):
130
+ DEFAULT_POD_TYPE = InstanceCatalog.REGULAR_S
131
+ DEFAULT_NUM_REPLICAS = 1
132
+ DEFAULT_CONCURRENCY = 10
133
+
134
+ def __init__(self,
135
+ pod_type: InstanceCatalog = DEFAULT_POD_TYPE,
136
+ num_replicas=DEFAULT_NUM_REPLICAS,
137
+ concurrency=DEFAULT_CONCURRENCY,
138
+ runner_image=None,
139
+ autoscaler=None,
140
+ **kwargs):
141
+
142
+ super().__init__(service_type=RuntimeType.KUBERNETES)
143
+ self.pod_type = kwargs.get('podType', pod_type)
144
+ self.num_replicas = kwargs.get('numReplicas', num_replicas)
145
+ self.concurrency = kwargs.get('concurrency', concurrency)
146
+ self.runner_image = kwargs.get('runnerImage', runner_image)
147
+ self._proxy_image = kwargs.get('proxyImage', None)
148
+ self.single_agent = kwargs.get('singleAgent', None)
149
+ self.preemptible = kwargs.get('preemptible', None)
150
+
151
+ self.autoscaler = kwargs.get('autoscaler', autoscaler)
152
+ if self.autoscaler is not None and isinstance(self.autoscaler, dict):
153
+ if self.autoscaler['type'] == KubernetesAutoscalerType.RABBITMQ:
154
+ self.autoscaler = KubernetesRabbitmqAutoscaler(**self.autoscaler)
155
+ elif self.autoscaler['type'] == KubernetesAutoscalerType.RPS:
156
+ self.autoscaler = KubernetesRPSAutoscaler(**self.autoscaler)
157
+ else:
158
+ raise NotImplementedError(
159
+ 'Unknown kubernetes autoscaler type: {}'.format(self.autoscaler['type']))
160
+
161
+ def to_json(self):
162
+ _json = {
163
+ 'podType': self.pod_type,
164
+ 'numReplicas': self.num_replicas,
165
+ 'concurrency': self.concurrency,
166
+ 'autoscaler': None if self.autoscaler is None else self.autoscaler.to_json()
167
+ }
168
+
169
+ if self.single_agent is not None:
170
+ _json['singleAgent'] = self.single_agent
171
+
172
+ if self.runner_image is not None:
173
+ _json['runnerImage'] = self.runner_image
174
+
175
+ if self._proxy_image is not None:
176
+ _json['proxyImage'] = self._proxy_image
177
+
178
+ if self.preemptible is not None:
179
+ _json['preemptible'] = self.preemptible
180
+
181
+ return _json
182
+
183
+
184
+ @attr.s
185
+ class Service(entities.BaseEntity):
186
+ """
187
+ Service object
188
+ """
189
+ # platform
190
+ created_at = attr.ib()
191
+ updated_at = attr.ib(repr=False)
192
+ creator = attr.ib()
193
+ version = attr.ib()
194
+
195
+ package_id = attr.ib()
196
+ package_revision = attr.ib()
197
+
198
+ bot = attr.ib()
199
+ use_user_jwt = attr.ib(repr=False)
200
+ init_input = attr.ib()
201
+ versions = attr.ib(repr=False)
202
+ module_name = attr.ib()
203
+ name = attr.ib()
204
+ url = attr.ib()
205
+ id = attr.ib()
206
+ active = attr.ib()
207
+ driver_id = attr.ib(repr=False)
208
+ secrets = attr.ib(repr=False)
209
+
210
+ # name change
211
+ runtime = attr.ib(repr=False, type=KubernetesRuntime)
212
+ queue_length_limit = attr.ib()
213
+ run_execution_as_process = attr.ib(type=bool)
214
+ execution_timeout = attr.ib()
215
+ drain_time = attr.ib()
216
+ on_reset = attr.ib(type=OnResetAction)
217
+ _type = attr.ib(type=ServiceType)
218
+ project_id = attr.ib()
219
+ org_id = attr.ib()
220
+ is_global = attr.ib()
221
+ max_attempts = attr.ib()
222
+ mode = attr.ib(repr=False)
223
+ metadata = attr.ib()
224
+ archive = attr.ib(repr=False)
225
+ config = attr.ib(repr=False)
226
+ settings = attr.ib(repr=False)
227
+ panels = attr.ib(repr=False)
228
+
229
+ # SDK
230
+ _package = attr.ib(repr=False)
231
+ _client_api = attr.ib(type=ApiClient, repr=False)
232
+ _revisions = attr.ib(default=None, repr=False)
233
+ # repositories
234
+ _project = attr.ib(default=None, repr=False)
235
+ _repositories = attr.ib(repr=False)
236
+ updated_by = attr.ib(default=None)
237
+ app = attr.ib(default=None)
238
+ integrations = attr.ib(default=None)
239
+
240
+ @property
241
+ def createdAt(self):
242
+ return self.created_at
243
+
244
+ @property
245
+ def updatedAt(self):
246
+ return self.updated_at
247
+
248
+ @staticmethod
249
+ def _protected_from_json(_json: dict, client_api: ApiClient, package=None, project=None, is_fetched=True):
250
+ """
251
+ Same as from_json but with try-except to catch if error
252
+
253
+ :param _json: platform json
254
+ :param client_api: ApiClient entity
255
+ :param package:
256
+ :param project: project entity
257
+ :param is_fetched: is Entity fetched from Platform
258
+ :return:
259
+ """
260
+ try:
261
+ service = Service.from_json(_json=_json,
262
+ client_api=client_api,
263
+ package=package,
264
+ project=project,
265
+ is_fetched=is_fetched)
266
+ status = True
267
+ except Exception:
268
+ service = traceback.format_exc()
269
+ status = False
270
+ return status, service
271
+
272
+ @classmethod
273
+ def from_json(cls, _json: dict, client_api: ApiClient = None, package=None, project=None, is_fetched=True):
274
+ """
275
+ Build a service entity object from a json
276
+
277
+ :param dict _json: platform json
278
+ :param dl.ApiClient client_api: ApiClient entity
279
+ :param dtlpy.entities.package.Package package: package entity
280
+ :param dtlpy.entities.project.Project project: project entity
281
+ :param bool is_fetched: is Entity fetched from Platform
282
+ :return: service object
283
+ :rtype: dtlpy.entities.service.Service
284
+ """
285
+ if project is not None:
286
+ if project.id != _json.get('projectId', None):
287
+ logger.warning('Service has been fetched from a project that is not belong to it')
288
+ project = None
289
+
290
+ if package is not None:
291
+ if package.id != _json.get('packageId', None):
292
+ logger.warning('Service has been fetched from a package that is not belong to it')
293
+ package = None
294
+
295
+ versions = _json.get('versions', dict())
296
+ runtime = _json.get("runtime", None)
297
+ if runtime:
298
+ runtime = KubernetesRuntime(**runtime)
299
+
300
+ inst = cls(
301
+ package_revision=_json.get("packageRevision", None),
302
+ bot=_json.get("botUserName", None),
303
+ use_user_jwt=_json.get("useUserJwt", False),
304
+ created_at=_json.get("createdAt", None),
305
+ updated_at=_json.get("updatedAt", None),
306
+ project_id=_json.get('projectId', None),
307
+ package_id=_json.get('packageId', None),
308
+ driver_id=_json.get('driverId', None),
309
+ max_attempts=_json.get('maxAttempts', None),
310
+ version=_json.get('version', None),
311
+ creator=_json.get('creator', None),
312
+ revisions=_json.get('revisions', None),
313
+ queue_length_limit=_json.get('queueLengthLimit', None),
314
+ active=_json.get('active', None),
315
+ runtime=runtime,
316
+ is_global=_json.get("global", False),
317
+ init_input=_json.get("initParams", dict()),
318
+ module_name=_json.get("moduleName", None),
319
+ run_execution_as_process=_json.get('runExecutionAsProcess', False),
320
+ execution_timeout=_json.get('executionTimeout', 60 * 60),
321
+ drain_time=_json.get('drainTime', 60 * 10),
322
+ on_reset=_json.get('onReset', OnResetAction.FAILED),
323
+ name=_json.get("name", None),
324
+ url=_json.get("url", None),
325
+ id=_json.get("id", None),
326
+ versions=versions,
327
+ client_api=client_api,
328
+ package=package,
329
+ project=project,
330
+ secrets=_json.get("secrets", None),
331
+ type=_json.get("type", None),
332
+ mode=_json.get('mode', dict()),
333
+ metadata=_json.get('metadata', None),
334
+ archive=_json.get('archive', None),
335
+ updated_by=_json.get('updatedBy', None),
336
+ config=_json.get('config', None),
337
+ settings=_json.get('settings', None),
338
+ app=_json.get('app', None),
339
+ integrations=_json.get('integrations', None),
340
+ org_id=_json.get('orgId', None),
341
+ panels=_json.get('panels', None)
342
+ )
343
+ inst.is_fetched = is_fetched
344
+ return inst
345
+
346
+ ############
347
+ # Entities #
348
+ ############
349
+ @property
350
+ def revisions(self):
351
+ if self._revisions is None:
352
+ self._revisions = self.services.revisions(service=self)
353
+ return self._revisions
354
+
355
+ @property
356
+ def platform_url(self):
357
+ return self._client_api._get_resource_url("projects/{}/services/{}/main".format(self.project.id, self.id))
358
+
359
+ @property
360
+ def project(self):
361
+ if self._project is None:
362
+ self._project = repositories.Projects(client_api=self._client_api).get(project_id=self.project_id,
363
+ fetch=None)
364
+ assert isinstance(self._project, entities.Project)
365
+ return self._project
366
+
367
+ @property
368
+ def package(self):
369
+ if self._package is None:
370
+ try:
371
+ dpk_id = None
372
+ dpk_version = None
373
+ if self.app and isinstance(self.app, dict):
374
+ dpk_id = self.app.get('dpkId', None)
375
+ dpk_version = self.app.get('dpkVersion', None)
376
+ if dpk_id is None:
377
+ self._package = repositories.Dpks(client_api=self._client_api, project=self.project).get(
378
+ dpk_id=self.package_id)
379
+ else:
380
+ self._package = repositories.Dpks(client_api=self._client_api, project=self.project).get_revisions(
381
+ dpk_id=dpk_id,
382
+ version=dpk_version)
383
+
384
+ assert isinstance(self._package, entities.Dpk)
385
+ except:
386
+ self._package = repositories.Packages(client_api=self._client_api).get(package_id=self.package_id,
387
+ fetch=None,
388
+ log_error=False)
389
+ assert isinstance(self._package, entities.Package)
390
+ return self._package
391
+
392
+ @property
393
+ def execution_url(self):
394
+ return 'CURL -X POST' \
395
+ '\nauthorization: Bearer <token>' \
396
+ '\nContent-Type: application/json" -d {' \
397
+ '\n"input": {<input json>}, ' \
398
+ '"projectId": "{<project_id>}", ' \
399
+ '"functionName": "<function_name>"}'
400
+
401
+ ################
402
+ # repositories #
403
+ ################
404
+ @_repositories.default
405
+ def set_repositories(self):
406
+ reps = namedtuple('repositories',
407
+ field_names=['executions', 'services', 'triggers'])
408
+
409
+ if self._package is None:
410
+ services_repo = repositories.Services(client_api=self._client_api,
411
+ package=self._package,
412
+ project=self._project)
413
+ else:
414
+ services_repo = self._package.services
415
+
416
+ triggers = repositories.Triggers(client_api=self._client_api,
417
+ project=self._project,
418
+ service=self)
419
+
420
+ r = reps(executions=repositories.Executions(client_api=self._client_api, service=self),
421
+ services=services_repo, triggers=triggers)
422
+ return r
423
+
424
+ @property
425
+ def executions(self):
426
+ assert isinstance(self._repositories.executions, repositories.Executions)
427
+ return self._repositories.executions
428
+
429
+ @property
430
+ def triggers(self):
431
+ assert isinstance(self._repositories.triggers, repositories.Triggers)
432
+ return self._repositories.triggers
433
+
434
+ @property
435
+ def services(self):
436
+ assert isinstance(self._repositories.services, repositories.Services)
437
+ return self._repositories.services
438
+
439
+ ###########
440
+ # methods #
441
+ ###########
442
+ def to_json(self):
443
+ """
444
+ Returns platform _json format of object
445
+
446
+ :return: platform json format of object
447
+ :rtype: dict
448
+ """
449
+ _json = attr.asdict(
450
+ self,
451
+ filter=attr.filters.exclude(
452
+ attr.fields(Service)._project,
453
+ attr.fields(Service)._package,
454
+ attr.fields(Service)._revisions,
455
+ attr.fields(Service)._client_api,
456
+ attr.fields(Service)._repositories,
457
+ attr.fields(Service).project_id,
458
+ attr.fields(Service).init_input,
459
+ attr.fields(Service).module_name,
460
+ attr.fields(Service).bot,
461
+ attr.fields(Service).package_id,
462
+ attr.fields(Service).is_global,
463
+ attr.fields(Service).use_user_jwt,
464
+ attr.fields(Service).package_revision,
465
+ attr.fields(Service).driver_id,
466
+ attr.fields(Service).run_execution_as_process,
467
+ attr.fields(Service).execution_timeout,
468
+ attr.fields(Service).drain_time,
469
+ attr.fields(Service).runtime,
470
+ attr.fields(Service).queue_length_limit,
471
+ attr.fields(Service).max_attempts,
472
+ attr.fields(Service).on_reset,
473
+ attr.fields(Service).created_at,
474
+ attr.fields(Service).updated_at,
475
+ attr.fields(Service).secrets,
476
+ attr.fields(Service)._type,
477
+ attr.fields(Service).mode,
478
+ attr.fields(Service).metadata,
479
+ attr.fields(Service).archive,
480
+ attr.fields(Service).updated_by,
481
+ attr.fields(Service).config,
482
+ attr.fields(Service).settings,
483
+ attr.fields(Service).app,
484
+ attr.fields(Service).integrations,
485
+ attr.fields(Service).org_id,
486
+ attr.fields(Service).panels
487
+ )
488
+ )
489
+
490
+ _json['projectId'] = self.project_id
491
+ _json['orgId'] = self.org_id
492
+ _json['packageId'] = self.package_id
493
+ _json['initParams'] = self.init_input
494
+ _json['moduleName'] = self.module_name
495
+ _json['botUserName'] = self.bot
496
+ _json['useUserJwt'] = self.use_user_jwt
497
+ _json['global'] = self.is_global
498
+ _json['driverId'] = self.driver_id
499
+ _json['packageRevision'] = self.package_revision
500
+ _json['runExecutionAsProcess'] = self.run_execution_as_process
501
+ _json['executionTimeout'] = self.execution_timeout
502
+ _json['drainTime'] = self.drain_time
503
+ _json['onReset'] = self.on_reset
504
+ _json['createdAt'] = self.created_at
505
+ _json['updatedAt'] = self.updated_at
506
+
507
+ if self.updated_by is not None:
508
+ _json['updatedBy'] = self.updated_by
509
+
510
+ if self.panels is not None:
511
+ _json['panels'] = self.panels
512
+
513
+ if self.max_attempts is not None:
514
+ _json['maxAttempts'] = self.max_attempts
515
+
516
+ if self.is_global is not None:
517
+ _json['global'] = self.is_global
518
+
519
+ if self.runtime:
520
+ _json['runtime'] = self.runtime if isinstance(self.runtime, dict) else self.runtime.to_json()
521
+
522
+ if self.queue_length_limit is not None:
523
+ _json['queueLengthLimit'] = self.queue_length_limit
524
+
525
+ if self.secrets is not None:
526
+ _json['secrets'] = self.secrets
527
+
528
+ if self._type is not None:
529
+ _json['type'] = self._type
530
+
531
+ if self.mode:
532
+ _json['mode'] = self.mode
533
+
534
+ if self.metadata:
535
+ _json['metadata'] = self.metadata
536
+
537
+ if self.archive is not None:
538
+ _json['archive'] = self.archive
539
+
540
+ if self.config is not None:
541
+ _json['config'] = self.config
542
+
543
+ if self.settings is not None:
544
+ _json['settings'] = self.settings
545
+
546
+ if self.app is not None:
547
+ _json['app'] = self.app
548
+
549
+ if self.integrations is not None:
550
+ _json['integrations'] = self.integrations
551
+
552
+ return _json
553
+
554
+ def update(self, force=False):
555
+ """
556
+ Update Service changes to platform
557
+
558
+ :param bool force: force update
559
+ :return: Service entity
560
+ :rtype: dtlpy.entities.service.Service
561
+ """
562
+ return self.services.update(service=self, force=force)
563
+
564
+ def delete(self, force: bool = False):
565
+ """
566
+ Delete Service object
567
+
568
+ :return: True
569
+ :rtype: bool
570
+ """
571
+ return self.services.delete(service_id=self.id, force=force)
572
+
573
+ def status(self):
574
+ """
575
+ Get Service status
576
+
577
+ :return: status json
578
+ :rtype: dict
579
+ """
580
+ return self.services.status(service_id=self.id)
581
+
582
+ def log(self,
583
+ size=None,
584
+ checkpoint=None,
585
+ start=None,
586
+ end=None,
587
+ follow=False,
588
+ text=None,
589
+ execution_id=None,
590
+ function_name=None,
591
+ replica_id=None,
592
+ system=False,
593
+ view=True,
594
+ until_completed=True,
595
+ model_id: str = None,
596
+ model_operation: str = None,
597
+ ):
598
+ """
599
+ Get service logs
600
+
601
+ :param int size: size
602
+ :param dict checkpoint: the information from the lst point checked in the service
603
+ :param str start: iso format time
604
+ :param str end: iso format time
605
+ :param bool follow: if true, keep stream future logs
606
+ :param str text: text
607
+ :param str execution_id: execution id
608
+ :param str function_name: function name
609
+ :param str replica_id: replica id
610
+ :param bool system: system
611
+ :param bool view: if true, print out all the logs
612
+ :param bool until_completed: wait until completed
613
+ :param str model_id: model id
614
+ :param str model_operation: model operation action
615
+ :return: ServiceLog entity
616
+ :rtype: ServiceLog
617
+
618
+ **Example**:
619
+
620
+ .. code-block:: python
621
+
622
+ service_log = service.log()
623
+ """
624
+ return self.services.log(service=self,
625
+ size=size,
626
+ checkpoint=checkpoint,
627
+ start=start,
628
+ end=end,
629
+ follow=follow,
630
+ execution_id=execution_id,
631
+ function_name=function_name,
632
+ replica_id=replica_id,
633
+ system=system,
634
+ text=text,
635
+ view=view,
636
+ until_completed=until_completed,
637
+ model_id=model_id,
638
+ model_operation=model_operation)
639
+
640
+ def open_in_web(self):
641
+ """
642
+ Open the service in web platform
643
+
644
+ :return:
645
+ """
646
+ parsed_url = urlsplit(self.platform_url)
647
+ base_url = parsed_url.scheme + "://" + parsed_url.netloc
648
+ url = '{}/projects/{}/services/{}'.format(base_url, self.project_id, self.id)
649
+ self._client_api._open_in_web(url=url)
650
+
651
+ def checkout(self):
652
+ """
653
+ Checkout
654
+
655
+ :return:
656
+ """
657
+ return self.services.checkout(service=self)
658
+
659
+ def pause(self):
660
+ """
661
+ pause
662
+
663
+ :return:
664
+ """
665
+ return self.services.pause(service_id=self.id)
666
+
667
+ def resume(self):
668
+ """
669
+ resume
670
+
671
+ :return:
672
+ """
673
+ return self.services.resume(service_id=self.id)
674
+
675
+ def execute(
676
+ self,
677
+ execution_input=None,
678
+ function_name=None,
679
+ resource=None,
680
+ item_id=None,
681
+ dataset_id=None,
682
+ annotation_id=None,
683
+ project_id=None,
684
+ sync=False,
685
+ stream_logs=True,
686
+ return_output=True
687
+ ):
688
+ """
689
+ Execute a function on an existing service
690
+
691
+ :param List[FunctionIO] or dict execution_input: input dictionary or list of FunctionIO entities
692
+ :param str function_name: function name to run
693
+ :param str resource: input type.
694
+ :param str item_id: optional - item id as input to function
695
+ :param str dataset_id: optional - dataset id as input to function
696
+ :param str annotation_id: optional - annotation id as input to function
697
+ :param str project_id: resource's project
698
+ :param bool sync: if true, wait for function to end
699
+ :param bool stream_logs: prints logs of the new execution. only works with sync=True
700
+ :param bool return_output: if True and sync is True - will return the output directly
701
+ :return: execution object
702
+ :rtype: dtlpy.entities.execution.Execution
703
+
704
+ **Example**:
705
+
706
+ .. code-block:: python
707
+
708
+ execution = service.execute(function_name='function_name', item_id='item_id', project_id='project_id')
709
+ """
710
+ execution = self.executions.create(sync=sync,
711
+ execution_input=execution_input,
712
+ function_name=function_name,
713
+ resource=resource,
714
+ item_id=item_id,
715
+ dataset_id=dataset_id,
716
+ annotation_id=annotation_id,
717
+ stream_logs=stream_logs,
718
+ project_id=project_id,
719
+ return_output=return_output)
720
+ return execution
721
+
722
+ def execute_batch(self,
723
+ filters,
724
+ function_name: str = None,
725
+ execution_inputs: list = None,
726
+ wait=True
727
+ ):
728
+ """
729
+ Execute a function on an existing service
730
+
731
+ **Prerequisites**: You must be in the role of an *owner* or *developer*. You must have a service.
732
+
733
+ :param filters: Filters entity for a filtering before execute
734
+ :param str function_name: function name to run
735
+ :param List[FunctionIO] or dict execution_inputs: input dictionary or list of FunctionIO entities, that represent the extra inputs of the function
736
+ :param bool wait: wait until create task finish
737
+ :return: execution object
738
+ :rtype: dtlpy.entities.execution.Execution
739
+
740
+ **Example**:
741
+
742
+ .. code-block:: python
743
+
744
+ command = service.execute_batch(
745
+ execution_inputs=dl.FunctionIO(type=dl.PackageInputType.STRING, value='test', name='string'),
746
+ filters=dl.Filters(field='dir', values='/test', context={"datasets": [dataset.id]),
747
+ function_name='run')
748
+ """
749
+ execution = self.executions.create_batch(service_id=self.id,
750
+ execution_inputs=execution_inputs,
751
+ filters=filters,
752
+ function_name=function_name,
753
+ wait=wait)
754
+ return execution
755
+
756
+ def rerun_batch(self,
757
+ filters,
758
+ wait=True
759
+ ):
760
+ """
761
+ rerun a executions on an existing service
762
+
763
+ **Prerequisites**: You must be in the role of an *owner* or *developer*. You must have a Filter.
764
+
765
+ :param filters: Filters entity for a filtering before rerun
766
+ :param bool wait: wait until create task finish
767
+ :return: rerun command
768
+ :rtype: dtlpy.entities.command.Command
769
+
770
+ **Example**:
771
+
772
+ .. code-block:: python
773
+
774
+ command = service.executions.rerun_batch(
775
+ filters=dl.Filters(field='id', values=['executionId'], operator=dl.FiltersOperations.IN, resource=dl.FiltersResource.EXECUTION))
776
+ """
777
+ execution = self.executions.rerun_batch(service_id=self.id,
778
+ filters=filters,
779
+ wait=wait)
780
+ return execution
781
+
782
+ def activate_slots(
783
+ self,
784
+ project_id: str = None,
785
+ task_id: str = None,
786
+ dataset_id: str = None,
787
+ org_id: str = None,
788
+ user_email: str = None,
789
+ slots=None,
790
+ role=None,
791
+ prevent_override: bool = True,
792
+ visible: bool = True,
793
+ icon: str = 'fas fa-magic',
794
+ **kwargs
795
+ ) -> object:
796
+ """
797
+ Activate service slots
798
+
799
+ :param str project_id: project id
800
+ :param str task_id: task id
801
+ :param str dataset_id: dataset id
802
+ :param str org_id: org id
803
+ :param str user_email: user email
804
+ :param list slots: list of entities.PackageSlot
805
+ :param str role: user role MemberOrgRole.ADMIN, MemberOrgRole.owner, MemberOrgRole.MEMBER, MemberOrgRole.WORKER
806
+ :param bool prevent_override: True to prevent override
807
+ :param bool visible: visible
808
+ :param str icon: icon
809
+ :param kwargs: all additional arguments
810
+ :return: list of user setting for activated slots
811
+ :rtype: list
812
+
813
+ **Example**:
814
+
815
+ .. code-block:: python
816
+
817
+ setting = service.activate_slots(project_id='project_id',
818
+ slots=List[entities.PackageSlot],
819
+ icon='fas fa-magic')
820
+ """
821
+ return self.services.activate_slots(
822
+ service=self,
823
+ project_id=project_id,
824
+ task_id=task_id,
825
+ dataset_id=dataset_id,
826
+ org_id=org_id,
827
+ user_email=user_email,
828
+ slots=slots,
829
+ role=role,
830
+ prevent_override=prevent_override,
831
+ visible=visible,
832
+ icon=icon,
833
+ **kwargs
834
+ )
835
+
836
+ def restart(self, replica_name: str = None):
837
+ """
838
+ Restart service
839
+
840
+ :param str replica_name: replica name
841
+ :return: True
842
+ :rtype: bool
843
+ """
844
+ return self.services.restart(service=self, replica_name=replica_name)
845
+
846
+
847
+ class KubernetesAutoscalerType(str, Enum):
848
+ """ The Service Autoscaler Type (RABBITMQ, CPU).
849
+
850
+ .. list-table::
851
+ :widths: 15 150
852
+ :header-rows: 1
853
+
854
+ * - State
855
+ - Description
856
+ * - RABBITMQ
857
+ - Service Autoscaler based on service queue length
858
+ * - CPU
859
+ - Service Autoscaler based on service CPU usage
860
+ * - RPS
861
+ - Service Autoscaler based on service RPS
862
+ """
863
+ RABBITMQ = 'rabbitmq'
864
+ CPU = 'cpu'
865
+ RPS = 'rps'
866
+
867
+
868
+ # added this class to avoid breaking changes after fixing a spelling mistake in KubernetesAutoscalerType
869
+ class KubernetesAutuscalerTypeMeta(type):
870
+ def __getattribute__(cls, item):
871
+ if hasattr(KubernetesAutoscalerType, item):
872
+ return getattr(KubernetesAutoscalerType, item)
873
+ else:
874
+ raise AttributeError(f"KubernetesAutuscalerType has no attribute '{item}'")
875
+
876
+
877
+ class KubernetesAutoscaler(entities.BaseEntity):
878
+ MIN_REPLICA_DEFAULT = 0
879
+ MAX_REPLICA_DEFAULT = 1
880
+ AUTOSCALER_TYPE_DEFAULT = KubernetesAutoscalerType.RABBITMQ
881
+
882
+ def __init__(self,
883
+ autoscaler_type: KubernetesAutoscalerType.RABBITMQ = AUTOSCALER_TYPE_DEFAULT,
884
+ min_replicas=MIN_REPLICA_DEFAULT,
885
+ max_replicas=MAX_REPLICA_DEFAULT,
886
+ cooldown_period=None,
887
+ polling_interval=None,
888
+ **kwargs):
889
+ self.autoscaler_type = kwargs.get('type', autoscaler_type)
890
+ self.min_replicas = kwargs.get('minReplicas', min_replicas)
891
+ self.max_replicas = kwargs.get('maxReplicas', max_replicas)
892
+ self.cooldown_period = kwargs.get('cooldownPeriod', cooldown_period)
893
+ self.polling_interval = kwargs.get('pollingInterval', polling_interval)
894
+
895
+ def to_json(self):
896
+ _json = {
897
+ 'type': self.autoscaler_type,
898
+ 'minReplicas': self.min_replicas,
899
+ 'maxReplicas': self.max_replicas
900
+ }
901
+
902
+ if self.cooldown_period is not None:
903
+ _json['cooldownPeriod'] = self.cooldown_period
904
+
905
+ if self.polling_interval is not None:
906
+ _json['pollingInterval'] = self.polling_interval
907
+
908
+ return _json
909
+
910
+
911
+ class KubernetesRabbitmqAutoscaler(KubernetesAutoscaler):
912
+ QUEUE_LENGTH_DEFAULT = 1000
913
+
914
+ def __init__(self,
915
+ min_replicas=KubernetesAutoscaler.MIN_REPLICA_DEFAULT,
916
+ max_replicas=KubernetesAutoscaler.MAX_REPLICA_DEFAULT,
917
+ queue_length=QUEUE_LENGTH_DEFAULT,
918
+ cooldown_period=None,
919
+ polling_interval=None,
920
+ **kwargs):
921
+ super().__init__(min_replicas=min_replicas,
922
+ max_replicas=max_replicas,
923
+ autoscaler_type=KubernetesAutoscalerType.RABBITMQ,
924
+ cooldown_period=cooldown_period,
925
+ polling_interval=polling_interval, **kwargs)
926
+ self.queue_length = kwargs.get('queueLength', queue_length)
927
+
928
+ def to_json(self):
929
+ _json = super().to_json()
930
+ _json['queueLength'] = self.queue_length
931
+ return _json
932
+
933
+
934
+ class KubernetesRPSAutoscaler(KubernetesAutoscaler):
935
+ THRESHOLD_DEFAULT = 10
936
+ RATE_SECONDS_DEFAULT = 30
937
+
938
+ def __init__(self,
939
+ min_replicas=KubernetesAutoscaler.MIN_REPLICA_DEFAULT,
940
+ max_replicas=KubernetesAutoscaler.MAX_REPLICA_DEFAULT,
941
+ threshold=THRESHOLD_DEFAULT,
942
+ rate_seconds=RATE_SECONDS_DEFAULT,
943
+ cooldown_period=None,
944
+ polling_interval=None,
945
+ **kwargs):
946
+ super().__init__(min_replicas=min_replicas,
947
+ max_replicas=max_replicas,
948
+ autoscaler_type=KubernetesAutoscalerType.RPS,
949
+ cooldown_period=cooldown_period,
950
+ polling_interval=polling_interval, **kwargs)
951
+ self.threshold = kwargs.get('threshold', threshold)
952
+ self.rate_seconds = kwargs.get('rateSeconds', rate_seconds)
953
+
954
+ def to_json(self):
955
+ _json = super().to_json()
956
+ _json['rateSeconds'] = self.rate_seconds
957
+ _json['threshold'] = self.threshold
958
+ return _json