dtlpy 1.114.17__py3-none-any.whl → 1.116.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (238) hide show
  1. dtlpy/__init__.py +491 -491
  2. dtlpy/__version__.py +1 -1
  3. dtlpy/assets/__init__.py +26 -26
  4. dtlpy/assets/code_server/config.yaml +2 -2
  5. dtlpy/assets/code_server/installation.sh +24 -24
  6. dtlpy/assets/code_server/launch.json +13 -13
  7. dtlpy/assets/code_server/settings.json +2 -2
  8. dtlpy/assets/main.py +53 -53
  9. dtlpy/assets/main_partial.py +18 -18
  10. dtlpy/assets/mock.json +11 -11
  11. dtlpy/assets/model_adapter.py +83 -83
  12. dtlpy/assets/package.json +61 -61
  13. dtlpy/assets/package_catalog.json +29 -29
  14. dtlpy/assets/package_gitignore +307 -307
  15. dtlpy/assets/service_runners/__init__.py +33 -33
  16. dtlpy/assets/service_runners/converter.py +96 -96
  17. dtlpy/assets/service_runners/multi_method.py +49 -49
  18. dtlpy/assets/service_runners/multi_method_annotation.py +54 -54
  19. dtlpy/assets/service_runners/multi_method_dataset.py +55 -55
  20. dtlpy/assets/service_runners/multi_method_item.py +52 -52
  21. dtlpy/assets/service_runners/multi_method_json.py +52 -52
  22. dtlpy/assets/service_runners/single_method.py +37 -37
  23. dtlpy/assets/service_runners/single_method_annotation.py +43 -43
  24. dtlpy/assets/service_runners/single_method_dataset.py +43 -43
  25. dtlpy/assets/service_runners/single_method_item.py +41 -41
  26. dtlpy/assets/service_runners/single_method_json.py +42 -42
  27. dtlpy/assets/service_runners/single_method_multi_input.py +45 -45
  28. dtlpy/assets/voc_annotation_template.xml +23 -23
  29. dtlpy/caches/base_cache.py +32 -32
  30. dtlpy/caches/cache.py +473 -473
  31. dtlpy/caches/dl_cache.py +201 -201
  32. dtlpy/caches/filesystem_cache.py +89 -89
  33. dtlpy/caches/redis_cache.py +84 -84
  34. dtlpy/dlp/__init__.py +20 -20
  35. dtlpy/dlp/cli_utilities.py +367 -367
  36. dtlpy/dlp/command_executor.py +764 -764
  37. dtlpy/dlp/dlp +1 -1
  38. dtlpy/dlp/dlp.bat +1 -1
  39. dtlpy/dlp/dlp.py +128 -128
  40. dtlpy/dlp/parser.py +651 -651
  41. dtlpy/entities/__init__.py +83 -83
  42. dtlpy/entities/analytic.py +347 -311
  43. dtlpy/entities/annotation.py +1879 -1879
  44. dtlpy/entities/annotation_collection.py +699 -699
  45. dtlpy/entities/annotation_definitions/__init__.py +20 -20
  46. dtlpy/entities/annotation_definitions/base_annotation_definition.py +100 -100
  47. dtlpy/entities/annotation_definitions/box.py +195 -195
  48. dtlpy/entities/annotation_definitions/classification.py +67 -67
  49. dtlpy/entities/annotation_definitions/comparison.py +72 -72
  50. dtlpy/entities/annotation_definitions/cube.py +204 -204
  51. dtlpy/entities/annotation_definitions/cube_3d.py +149 -149
  52. dtlpy/entities/annotation_definitions/description.py +32 -32
  53. dtlpy/entities/annotation_definitions/ellipse.py +124 -124
  54. dtlpy/entities/annotation_definitions/free_text.py +62 -62
  55. dtlpy/entities/annotation_definitions/gis.py +69 -69
  56. dtlpy/entities/annotation_definitions/note.py +139 -139
  57. dtlpy/entities/annotation_definitions/point.py +117 -117
  58. dtlpy/entities/annotation_definitions/polygon.py +182 -182
  59. dtlpy/entities/annotation_definitions/polyline.py +111 -111
  60. dtlpy/entities/annotation_definitions/pose.py +92 -92
  61. dtlpy/entities/annotation_definitions/ref_image.py +86 -86
  62. dtlpy/entities/annotation_definitions/segmentation.py +240 -240
  63. dtlpy/entities/annotation_definitions/subtitle.py +34 -34
  64. dtlpy/entities/annotation_definitions/text.py +85 -85
  65. dtlpy/entities/annotation_definitions/undefined_annotation.py +74 -74
  66. dtlpy/entities/app.py +220 -220
  67. dtlpy/entities/app_module.py +107 -107
  68. dtlpy/entities/artifact.py +174 -174
  69. dtlpy/entities/assignment.py +399 -399
  70. dtlpy/entities/base_entity.py +214 -214
  71. dtlpy/entities/bot.py +113 -113
  72. dtlpy/entities/codebase.py +292 -296
  73. dtlpy/entities/collection.py +38 -38
  74. dtlpy/entities/command.py +169 -169
  75. dtlpy/entities/compute.py +449 -442
  76. dtlpy/entities/dataset.py +1299 -1285
  77. dtlpy/entities/directory_tree.py +44 -44
  78. dtlpy/entities/dpk.py +470 -470
  79. dtlpy/entities/driver.py +235 -223
  80. dtlpy/entities/execution.py +397 -397
  81. dtlpy/entities/feature.py +124 -124
  82. dtlpy/entities/feature_set.py +145 -145
  83. dtlpy/entities/filters.py +798 -645
  84. dtlpy/entities/gis_item.py +107 -107
  85. dtlpy/entities/integration.py +184 -184
  86. dtlpy/entities/item.py +959 -953
  87. dtlpy/entities/label.py +123 -123
  88. dtlpy/entities/links.py +85 -85
  89. dtlpy/entities/message.py +175 -175
  90. dtlpy/entities/model.py +684 -684
  91. dtlpy/entities/node.py +1005 -1005
  92. dtlpy/entities/ontology.py +810 -803
  93. dtlpy/entities/organization.py +287 -287
  94. dtlpy/entities/package.py +657 -657
  95. dtlpy/entities/package_defaults.py +5 -5
  96. dtlpy/entities/package_function.py +185 -185
  97. dtlpy/entities/package_module.py +113 -113
  98. dtlpy/entities/package_slot.py +118 -118
  99. dtlpy/entities/paged_entities.py +299 -299
  100. dtlpy/entities/pipeline.py +624 -624
  101. dtlpy/entities/pipeline_execution.py +279 -279
  102. dtlpy/entities/project.py +394 -394
  103. dtlpy/entities/prompt_item.py +505 -499
  104. dtlpy/entities/recipe.py +301 -301
  105. dtlpy/entities/reflect_dict.py +102 -102
  106. dtlpy/entities/resource_execution.py +138 -138
  107. dtlpy/entities/service.py +963 -958
  108. dtlpy/entities/service_driver.py +117 -117
  109. dtlpy/entities/setting.py +294 -294
  110. dtlpy/entities/task.py +495 -495
  111. dtlpy/entities/time_series.py +143 -143
  112. dtlpy/entities/trigger.py +426 -426
  113. dtlpy/entities/user.py +118 -118
  114. dtlpy/entities/webhook.py +124 -124
  115. dtlpy/examples/__init__.py +19 -19
  116. dtlpy/examples/add_labels.py +135 -135
  117. dtlpy/examples/add_metadata_to_item.py +21 -21
  118. dtlpy/examples/annotate_items_using_model.py +65 -65
  119. dtlpy/examples/annotate_video_using_model_and_tracker.py +75 -75
  120. dtlpy/examples/annotations_convert_to_voc.py +9 -9
  121. dtlpy/examples/annotations_convert_to_yolo.py +9 -9
  122. dtlpy/examples/convert_annotation_types.py +51 -51
  123. dtlpy/examples/converter.py +143 -143
  124. dtlpy/examples/copy_annotations.py +22 -22
  125. dtlpy/examples/copy_folder.py +31 -31
  126. dtlpy/examples/create_annotations.py +51 -51
  127. dtlpy/examples/create_video_annotations.py +83 -83
  128. dtlpy/examples/delete_annotations.py +26 -26
  129. dtlpy/examples/filters.py +113 -113
  130. dtlpy/examples/move_item.py +23 -23
  131. dtlpy/examples/play_video_annotation.py +13 -13
  132. dtlpy/examples/show_item_and_mask.py +53 -53
  133. dtlpy/examples/triggers.py +49 -49
  134. dtlpy/examples/upload_batch_of_items.py +20 -20
  135. dtlpy/examples/upload_items_and_custom_format_annotations.py +55 -55
  136. dtlpy/examples/upload_items_with_modalities.py +43 -43
  137. dtlpy/examples/upload_segmentation_annotations_from_mask_image.py +44 -44
  138. dtlpy/examples/upload_yolo_format_annotations.py +70 -70
  139. dtlpy/exceptions.py +125 -125
  140. dtlpy/miscellaneous/__init__.py +20 -20
  141. dtlpy/miscellaneous/dict_differ.py +95 -95
  142. dtlpy/miscellaneous/git_utils.py +217 -217
  143. dtlpy/miscellaneous/json_utils.py +14 -14
  144. dtlpy/miscellaneous/list_print.py +105 -105
  145. dtlpy/miscellaneous/zipping.py +130 -130
  146. dtlpy/ml/__init__.py +20 -20
  147. dtlpy/ml/base_feature_extractor_adapter.py +27 -27
  148. dtlpy/ml/base_model_adapter.py +1257 -1086
  149. dtlpy/ml/metrics.py +461 -461
  150. dtlpy/ml/predictions_utils.py +274 -274
  151. dtlpy/ml/summary_writer.py +57 -57
  152. dtlpy/ml/train_utils.py +60 -60
  153. dtlpy/new_instance.py +252 -252
  154. dtlpy/repositories/__init__.py +56 -56
  155. dtlpy/repositories/analytics.py +85 -85
  156. dtlpy/repositories/annotations.py +916 -916
  157. dtlpy/repositories/apps.py +383 -383
  158. dtlpy/repositories/artifacts.py +452 -452
  159. dtlpy/repositories/assignments.py +599 -599
  160. dtlpy/repositories/bots.py +213 -213
  161. dtlpy/repositories/codebases.py +559 -559
  162. dtlpy/repositories/collections.py +332 -332
  163. dtlpy/repositories/commands.py +152 -158
  164. dtlpy/repositories/compositions.py +61 -61
  165. dtlpy/repositories/computes.py +439 -435
  166. dtlpy/repositories/datasets.py +1504 -1291
  167. dtlpy/repositories/downloader.py +976 -903
  168. dtlpy/repositories/dpks.py +433 -433
  169. dtlpy/repositories/drivers.py +482 -470
  170. dtlpy/repositories/executions.py +815 -817
  171. dtlpy/repositories/feature_sets.py +226 -226
  172. dtlpy/repositories/features.py +255 -238
  173. dtlpy/repositories/integrations.py +484 -484
  174. dtlpy/repositories/items.py +912 -909
  175. dtlpy/repositories/messages.py +94 -94
  176. dtlpy/repositories/models.py +1000 -988
  177. dtlpy/repositories/nodes.py +80 -80
  178. dtlpy/repositories/ontologies.py +511 -511
  179. dtlpy/repositories/organizations.py +525 -525
  180. dtlpy/repositories/packages.py +1941 -1941
  181. dtlpy/repositories/pipeline_executions.py +451 -451
  182. dtlpy/repositories/pipelines.py +640 -640
  183. dtlpy/repositories/projects.py +539 -539
  184. dtlpy/repositories/recipes.py +419 -399
  185. dtlpy/repositories/resource_executions.py +137 -137
  186. dtlpy/repositories/schema.py +120 -120
  187. dtlpy/repositories/service_drivers.py +213 -213
  188. dtlpy/repositories/services.py +1704 -1704
  189. dtlpy/repositories/settings.py +339 -339
  190. dtlpy/repositories/tasks.py +1477 -1477
  191. dtlpy/repositories/times_series.py +278 -278
  192. dtlpy/repositories/triggers.py +536 -536
  193. dtlpy/repositories/upload_element.py +257 -257
  194. dtlpy/repositories/uploader.py +661 -651
  195. dtlpy/repositories/webhooks.py +249 -249
  196. dtlpy/services/__init__.py +22 -22
  197. dtlpy/services/aihttp_retry.py +131 -131
  198. dtlpy/services/api_client.py +1785 -1782
  199. dtlpy/services/api_reference.py +40 -40
  200. dtlpy/services/async_utils.py +133 -133
  201. dtlpy/services/calls_counter.py +44 -44
  202. dtlpy/services/check_sdk.py +68 -68
  203. dtlpy/services/cookie.py +115 -115
  204. dtlpy/services/create_logger.py +156 -156
  205. dtlpy/services/events.py +84 -84
  206. dtlpy/services/logins.py +235 -235
  207. dtlpy/services/reporter.py +256 -256
  208. dtlpy/services/service_defaults.py +91 -91
  209. dtlpy/utilities/__init__.py +20 -20
  210. dtlpy/utilities/annotations/__init__.py +16 -16
  211. dtlpy/utilities/annotations/annotation_converters.py +269 -269
  212. dtlpy/utilities/base_package_runner.py +285 -264
  213. dtlpy/utilities/converter.py +1650 -1650
  214. dtlpy/utilities/dataset_generators/__init__.py +1 -1
  215. dtlpy/utilities/dataset_generators/dataset_generator.py +670 -670
  216. dtlpy/utilities/dataset_generators/dataset_generator_tensorflow.py +23 -23
  217. dtlpy/utilities/dataset_generators/dataset_generator_torch.py +21 -21
  218. dtlpy/utilities/local_development/__init__.py +1 -1
  219. dtlpy/utilities/local_development/local_session.py +179 -179
  220. dtlpy/utilities/reports/__init__.py +2 -2
  221. dtlpy/utilities/reports/figures.py +343 -343
  222. dtlpy/utilities/reports/report.py +71 -71
  223. dtlpy/utilities/videos/__init__.py +17 -17
  224. dtlpy/utilities/videos/video_player.py +598 -598
  225. dtlpy/utilities/videos/videos.py +470 -470
  226. {dtlpy-1.114.17.data → dtlpy-1.116.6.data}/scripts/dlp +1 -1
  227. dtlpy-1.116.6.data/scripts/dlp.bat +2 -0
  228. {dtlpy-1.114.17.data → dtlpy-1.116.6.data}/scripts/dlp.py +128 -128
  229. {dtlpy-1.114.17.dist-info → dtlpy-1.116.6.dist-info}/METADATA +186 -183
  230. dtlpy-1.116.6.dist-info/RECORD +239 -0
  231. {dtlpy-1.114.17.dist-info → dtlpy-1.116.6.dist-info}/WHEEL +1 -1
  232. {dtlpy-1.114.17.dist-info → dtlpy-1.116.6.dist-info}/licenses/LICENSE +200 -200
  233. tests/features/environment.py +551 -551
  234. dtlpy/assets/__pycache__/__init__.cpython-310.pyc +0 -0
  235. dtlpy-1.114.17.data/scripts/dlp.bat +0 -2
  236. dtlpy-1.114.17.dist-info/RECORD +0 -240
  237. {dtlpy-1.114.17.dist-info → dtlpy-1.116.6.dist-info}/entry_points.txt +0 -0
  238. {dtlpy-1.114.17.dist-info → dtlpy-1.116.6.dist-info}/top_level.txt +0 -0
dtlpy/entities/service.py CHANGED
@@ -1,958 +1,963 @@
1
- import warnings
2
- from collections import namedtuple
3
- from enum import Enum
4
- import traceback
5
- import logging
6
- from typing import List
7
- from urllib.parse import urlsplit
8
- import attr
9
- from .. import repositories, entities
10
- from ..services.api_client import ApiClient
11
-
12
- logger = logging.getLogger(name='dtlpy')
13
-
14
-
15
- class ServiceType(str, Enum):
16
- """ The type of the service (SYSTEM).
17
-
18
- .. list-table::
19
- :widths: 15 150
20
- :header-rows: 1
21
-
22
- * - State
23
- - Description
24
- * - SYSTEM
25
- - Dataloop internal service
26
- """
27
- SYSTEM = 'system'
28
- REGULAR = 'regular'
29
-
30
-
31
- class ServiceModeType(str, Enum):
32
- """ The type of the service mode.
33
-
34
- .. list-table::
35
- :widths: 15 150
36
- :header-rows: 1
37
-
38
- * - State
39
- - Description
40
- * - REGULAR
41
- - Service regular mode type
42
- * - DEBUG
43
- - Service debug mode type
44
- """
45
- REGULAR = 'regular'
46
- DEBUG = 'debug'
47
-
48
-
49
- class OnResetAction(str, Enum):
50
- """ The Execution action when the service reset (RERUN, FAILED).
51
-
52
- .. list-table::
53
- :widths: 15 150
54
- :header-rows: 1
55
-
56
- * - State
57
- - Description
58
- * - RERUN
59
- - When the service resting rerun the execution
60
- * - FAILED
61
- - When the service resting fail the execution
62
- """
63
- RERUN = 'rerun'
64
- FAILED = 'failed'
65
-
66
-
67
- class InstanceCatalog(str, Enum):
68
- """ The Service Pode size.
69
-
70
- .. list-table::
71
- :widths: 15 150
72
- :header-rows: 1
73
-
74
- * - State
75
- - Description
76
- * - REGULAR_XS
77
- - regular pod with extra small size
78
- * - REGULAR_S
79
- - regular pod with small size
80
- * - REGULAR_M
81
- - regular pod with medium size
82
- * - REGULAR_L
83
- - regular pod with large size
84
- * - HIGHMEM_XS
85
- - highmem pod with extra small size
86
- * - HIGHMEM_S
87
- - highmem pod with small size
88
- * - HIGHMEM_M
89
- - highmem pod with medium size
90
- * - HIGHMEM_L
91
- - highmem pod with large size
92
- * - GPU_T4_S
93
- - GPU NVIDIA T4 pod with regular memory
94
- * - GPU_T4_M
95
- - GPU NVIDIA T4 pod with highmem
96
- """
97
- REGULAR_XS = "regular-xs"
98
- REGULAR_S = "regular-s"
99
- REGULAR_M = "regular-m"
100
- REGULAR_L = "regular-l"
101
- HIGHMEM_XS = "highmem-xs"
102
- HIGHMEM_S = "highmem-s"
103
- HIGHMEM_M = "highmem-m"
104
- HIGHMEM_L = "highmem-l"
105
- GPU_T4_S = "gpu-t4"
106
- GPU_T4_M = "gpu-t4-m"
107
-
108
-
109
- class RuntimeType(str, Enum):
110
- """ Service culture Runtime (KUBERNETES).
111
-
112
- .. list-table::
113
- :widths: 15 150
114
- :header-rows: 1
115
-
116
- * - State
117
- - Description
118
- * - KUBERNETES
119
- - Service run in kubernetes culture
120
- """
121
- KUBERNETES = 'kubernetes'
122
-
123
-
124
- class ServiceRuntime(entities.BaseEntity):
125
- def __init__(self, service_type: RuntimeType = RuntimeType.KUBERNETES):
126
- self.service_type = service_type
127
-
128
-
129
- class KubernetesRuntime(ServiceRuntime):
130
- DEFAULT_POD_TYPE = InstanceCatalog.REGULAR_S
131
- DEFAULT_NUM_REPLICAS = 1
132
- DEFAULT_CONCURRENCY = 10
133
-
134
- def __init__(self,
135
- pod_type: InstanceCatalog = DEFAULT_POD_TYPE,
136
- num_replicas=DEFAULT_NUM_REPLICAS,
137
- concurrency=DEFAULT_CONCURRENCY,
138
- runner_image=None,
139
- autoscaler=None,
140
- **kwargs):
141
-
142
- super().__init__(service_type=RuntimeType.KUBERNETES)
143
- self.pod_type = kwargs.get('podType', pod_type)
144
- self.num_replicas = kwargs.get('numReplicas', num_replicas)
145
- self.concurrency = kwargs.get('concurrency', concurrency)
146
- self.runner_image = kwargs.get('runnerImage', runner_image)
147
- self._proxy_image = kwargs.get('proxyImage', None)
148
- self.single_agent = kwargs.get('singleAgent', None)
149
- self.preemptible = kwargs.get('preemptible', None)
150
-
151
- self.autoscaler = kwargs.get('autoscaler', autoscaler)
152
- if self.autoscaler is not None and isinstance(self.autoscaler, dict):
153
- if self.autoscaler['type'] == KubernetesAutoscalerType.RABBITMQ:
154
- self.autoscaler = KubernetesRabbitmqAutoscaler(**self.autoscaler)
155
- elif self.autoscaler['type'] == KubernetesAutoscalerType.RPS:
156
- self.autoscaler = KubernetesRPSAutoscaler(**self.autoscaler)
157
- else:
158
- raise NotImplementedError(
159
- 'Unknown kubernetes autoscaler type: {}'.format(self.autoscaler['type']))
160
-
161
- def to_json(self):
162
- _json = {
163
- 'podType': self.pod_type,
164
- 'numReplicas': self.num_replicas,
165
- 'concurrency': self.concurrency,
166
- 'autoscaler': None if self.autoscaler is None else self.autoscaler.to_json()
167
- }
168
-
169
- if self.single_agent is not None:
170
- _json['singleAgent'] = self.single_agent
171
-
172
- if self.runner_image is not None:
173
- _json['runnerImage'] = self.runner_image
174
-
175
- if self._proxy_image is not None:
176
- _json['proxyImage'] = self._proxy_image
177
-
178
- if self.preemptible is not None:
179
- _json['preemptible'] = self.preemptible
180
-
181
- return _json
182
-
183
-
184
- @attr.s
185
- class Service(entities.BaseEntity):
186
- """
187
- Service object
188
- """
189
- # platform
190
- created_at = attr.ib()
191
- updated_at = attr.ib(repr=False)
192
- creator = attr.ib()
193
- version = attr.ib()
194
-
195
- package_id = attr.ib()
196
- package_revision = attr.ib()
197
-
198
- bot = attr.ib()
199
- use_user_jwt = attr.ib(repr=False)
200
- init_input = attr.ib()
201
- versions = attr.ib(repr=False)
202
- module_name = attr.ib()
203
- name = attr.ib()
204
- url = attr.ib()
205
- id = attr.ib()
206
- active = attr.ib()
207
- driver_id = attr.ib(repr=False)
208
- secrets = attr.ib(repr=False)
209
-
210
- # name change
211
- runtime = attr.ib(repr=False, type=KubernetesRuntime)
212
- queue_length_limit = attr.ib()
213
- run_execution_as_process = attr.ib(type=bool)
214
- execution_timeout = attr.ib()
215
- drain_time = attr.ib()
216
- on_reset = attr.ib(type=OnResetAction)
217
- _type = attr.ib(type=ServiceType)
218
- project_id = attr.ib()
219
- org_id = attr.ib()
220
- is_global = attr.ib()
221
- max_attempts = attr.ib()
222
- mode = attr.ib(repr=False)
223
- metadata = attr.ib()
224
- archive = attr.ib(repr=False)
225
- config = attr.ib(repr=False)
226
- settings = attr.ib(repr=False)
227
- panels = attr.ib(repr=False)
228
-
229
- # SDK
230
- _package = attr.ib(repr=False)
231
- _client_api = attr.ib(type=ApiClient, repr=False)
232
- _revisions = attr.ib(default=None, repr=False)
233
- # repositories
234
- _project = attr.ib(default=None, repr=False)
235
- _repositories = attr.ib(repr=False)
236
- updated_by = attr.ib(default=None)
237
- app = attr.ib(default=None)
238
- integrations = attr.ib(default=None)
239
-
240
- @property
241
- def createdAt(self):
242
- return self.created_at
243
-
244
- @property
245
- def updatedAt(self):
246
- return self.updated_at
247
-
248
- @staticmethod
249
- def _protected_from_json(_json: dict, client_api: ApiClient, package=None, project=None, is_fetched=True):
250
- """
251
- Same as from_json but with try-except to catch if error
252
-
253
- :param _json: platform json
254
- :param client_api: ApiClient entity
255
- :param package:
256
- :param project: project entity
257
- :param is_fetched: is Entity fetched from Platform
258
- :return:
259
- """
260
- try:
261
- service = Service.from_json(_json=_json,
262
- client_api=client_api,
263
- package=package,
264
- project=project,
265
- is_fetched=is_fetched)
266
- status = True
267
- except Exception:
268
- service = traceback.format_exc()
269
- status = False
270
- return status, service
271
-
272
- @classmethod
273
- def from_json(cls, _json: dict, client_api: ApiClient = None, package=None, project=None, is_fetched=True):
274
- """
275
- Build a service entity object from a json
276
-
277
- :param dict _json: platform json
278
- :param dl.ApiClient client_api: ApiClient entity
279
- :param dtlpy.entities.package.Package package: package entity
280
- :param dtlpy.entities.project.Project project: project entity
281
- :param bool is_fetched: is Entity fetched from Platform
282
- :return: service object
283
- :rtype: dtlpy.entities.service.Service
284
- """
285
- if project is not None:
286
- if project.id != _json.get('projectId', None):
287
- logger.warning('Service has been fetched from a project that is not belong to it')
288
- project = None
289
-
290
- if package is not None:
291
- if package.id != _json.get('packageId', None):
292
- logger.warning('Service has been fetched from a package that is not belong to it')
293
- package = None
294
-
295
- versions = _json.get('versions', dict())
296
- runtime = _json.get("runtime", None)
297
- if runtime:
298
- runtime = KubernetesRuntime(**runtime)
299
-
300
- inst = cls(
301
- package_revision=_json.get("packageRevision", None),
302
- bot=_json.get("botUserName", None),
303
- use_user_jwt=_json.get("useUserJwt", False),
304
- created_at=_json.get("createdAt", None),
305
- updated_at=_json.get("updatedAt", None),
306
- project_id=_json.get('projectId', None),
307
- package_id=_json.get('packageId', None),
308
- driver_id=_json.get('driverId', None),
309
- max_attempts=_json.get('maxAttempts', None),
310
- version=_json.get('version', None),
311
- creator=_json.get('creator', None),
312
- revisions=_json.get('revisions', None),
313
- queue_length_limit=_json.get('queueLengthLimit', None),
314
- active=_json.get('active', None),
315
- runtime=runtime,
316
- is_global=_json.get("global", False),
317
- init_input=_json.get("initParams", dict()),
318
- module_name=_json.get("moduleName", None),
319
- run_execution_as_process=_json.get('runExecutionAsProcess', False),
320
- execution_timeout=_json.get('executionTimeout', 60 * 60),
321
- drain_time=_json.get('drainTime', 60 * 10),
322
- on_reset=_json.get('onReset', OnResetAction.FAILED),
323
- name=_json.get("name", None),
324
- url=_json.get("url", None),
325
- id=_json.get("id", None),
326
- versions=versions,
327
- client_api=client_api,
328
- package=package,
329
- project=project,
330
- secrets=_json.get("secrets", None),
331
- type=_json.get("type", None),
332
- mode=_json.get('mode', dict()),
333
- metadata=_json.get('metadata', None),
334
- archive=_json.get('archive', None),
335
- updated_by=_json.get('updatedBy', None),
336
- config=_json.get('config', None),
337
- settings=_json.get('settings', None),
338
- app=_json.get('app', None),
339
- integrations=_json.get('integrations', None),
340
- org_id=_json.get('orgId', None),
341
- panels=_json.get('panels', None)
342
- )
343
- inst.is_fetched = is_fetched
344
- return inst
345
-
346
- ############
347
- # Entities #
348
- ############
349
- @property
350
- def revisions(self):
351
- if self._revisions is None:
352
- self._revisions = self.services.revisions(service=self)
353
- return self._revisions
354
-
355
- @property
356
- def platform_url(self):
357
- return self._client_api._get_resource_url("projects/{}/services/{}/main".format(self.project.id, self.id))
358
-
359
- @property
360
- def project(self):
361
- if self._project is None:
362
- self._project = repositories.Projects(client_api=self._client_api).get(project_id=self.project_id,
363
- fetch=None)
364
- assert isinstance(self._project, entities.Project)
365
- return self._project
366
-
367
- @property
368
- def package(self):
369
- if self._package is None:
370
- try:
371
- dpk_id = None
372
- dpk_version = None
373
- if self.app and isinstance(self.app, dict):
374
- dpk_id = self.app.get('dpkId', None)
375
- dpk_version = self.app.get('dpkVersion', None)
376
- if dpk_id is None:
377
- self._package = repositories.Dpks(client_api=self._client_api, project=self.project).get(
378
- dpk_id=self.package_id)
379
- else:
380
- self._package = repositories.Dpks(client_api=self._client_api, project=self.project).get_revisions(
381
- dpk_id=dpk_id,
382
- version=dpk_version)
383
-
384
- assert isinstance(self._package, entities.Dpk)
385
- except:
386
- self._package = repositories.Packages(client_api=self._client_api).get(package_id=self.package_id,
387
- fetch=None,
388
- log_error=False)
389
- assert isinstance(self._package, entities.Package)
390
- return self._package
391
-
392
- @property
393
- def execution_url(self):
394
- return 'CURL -X POST' \
395
- '\nauthorization: Bearer <token>' \
396
- '\nContent-Type: application/json" -d {' \
397
- '\n"input": {<input json>}, ' \
398
- '"projectId": "{<project_id>}", ' \
399
- '"functionName": "<function_name>"}'
400
-
401
- ################
402
- # repositories #
403
- ################
404
- @_repositories.default
405
- def set_repositories(self):
406
- reps = namedtuple('repositories',
407
- field_names=['executions', 'services', 'triggers'])
408
-
409
- if self._package is None:
410
- services_repo = repositories.Services(client_api=self._client_api,
411
- package=self._package,
412
- project=self._project)
413
- else:
414
- services_repo = self._package.services
415
-
416
- triggers = repositories.Triggers(client_api=self._client_api,
417
- project=self._project,
418
- service=self)
419
-
420
- r = reps(executions=repositories.Executions(client_api=self._client_api, service=self),
421
- services=services_repo, triggers=triggers)
422
- return r
423
-
424
- @property
425
- def executions(self):
426
- assert isinstance(self._repositories.executions, repositories.Executions)
427
- return self._repositories.executions
428
-
429
- @property
430
- def triggers(self):
431
- assert isinstance(self._repositories.triggers, repositories.Triggers)
432
- return self._repositories.triggers
433
-
434
- @property
435
- def services(self):
436
- assert isinstance(self._repositories.services, repositories.Services)
437
- return self._repositories.services
438
-
439
- ###########
440
- # methods #
441
- ###########
442
- def to_json(self):
443
- """
444
- Returns platform _json format of object
445
-
446
- :return: platform json format of object
447
- :rtype: dict
448
- """
449
- _json = attr.asdict(
450
- self,
451
- filter=attr.filters.exclude(
452
- attr.fields(Service)._project,
453
- attr.fields(Service)._package,
454
- attr.fields(Service)._revisions,
455
- attr.fields(Service)._client_api,
456
- attr.fields(Service)._repositories,
457
- attr.fields(Service).project_id,
458
- attr.fields(Service).init_input,
459
- attr.fields(Service).module_name,
460
- attr.fields(Service).bot,
461
- attr.fields(Service).package_id,
462
- attr.fields(Service).is_global,
463
- attr.fields(Service).use_user_jwt,
464
- attr.fields(Service).package_revision,
465
- attr.fields(Service).driver_id,
466
- attr.fields(Service).run_execution_as_process,
467
- attr.fields(Service).execution_timeout,
468
- attr.fields(Service).drain_time,
469
- attr.fields(Service).runtime,
470
- attr.fields(Service).queue_length_limit,
471
- attr.fields(Service).max_attempts,
472
- attr.fields(Service).on_reset,
473
- attr.fields(Service).created_at,
474
- attr.fields(Service).updated_at,
475
- attr.fields(Service).secrets,
476
- attr.fields(Service)._type,
477
- attr.fields(Service).mode,
478
- attr.fields(Service).metadata,
479
- attr.fields(Service).archive,
480
- attr.fields(Service).updated_by,
481
- attr.fields(Service).config,
482
- attr.fields(Service).settings,
483
- attr.fields(Service).app,
484
- attr.fields(Service).integrations,
485
- attr.fields(Service).org_id,
486
- attr.fields(Service).panels
487
- )
488
- )
489
-
490
- _json['projectId'] = self.project_id
491
- _json['orgId'] = self.org_id
492
- _json['packageId'] = self.package_id
493
- _json['initParams'] = self.init_input
494
- _json['moduleName'] = self.module_name
495
- _json['botUserName'] = self.bot
496
- _json['useUserJwt'] = self.use_user_jwt
497
- _json['global'] = self.is_global
498
- _json['driverId'] = self.driver_id
499
- _json['packageRevision'] = self.package_revision
500
- _json['runExecutionAsProcess'] = self.run_execution_as_process
501
- _json['executionTimeout'] = self.execution_timeout
502
- _json['drainTime'] = self.drain_time
503
- _json['onReset'] = self.on_reset
504
- _json['createdAt'] = self.created_at
505
- _json['updatedAt'] = self.updated_at
506
-
507
- if self.updated_by is not None:
508
- _json['updatedBy'] = self.updated_by
509
-
510
- if self.panels is not None:
511
- _json['panels'] = self.panels
512
-
513
- if self.max_attempts is not None:
514
- _json['maxAttempts'] = self.max_attempts
515
-
516
- if self.is_global is not None:
517
- _json['global'] = self.is_global
518
-
519
- if self.runtime:
520
- _json['runtime'] = self.runtime if isinstance(self.runtime, dict) else self.runtime.to_json()
521
-
522
- if self.queue_length_limit is not None:
523
- _json['queueLengthLimit'] = self.queue_length_limit
524
-
525
- if self.secrets is not None:
526
- _json['secrets'] = self.secrets
527
-
528
- if self._type is not None:
529
- _json['type'] = self._type
530
-
531
- if self.mode:
532
- _json['mode'] = self.mode
533
-
534
- if self.metadata:
535
- _json['metadata'] = self.metadata
536
-
537
- if self.archive is not None:
538
- _json['archive'] = self.archive
539
-
540
- if self.config is not None:
541
- _json['config'] = self.config
542
-
543
- if self.settings is not None:
544
- _json['settings'] = self.settings
545
-
546
- if self.app is not None:
547
- _json['app'] = self.app
548
-
549
- if self.integrations is not None:
550
- _json['integrations'] = self.integrations
551
-
552
- return _json
553
-
554
- def update(self, force=False):
555
- """
556
- Update Service changes to platform
557
-
558
- :param bool force: force update
559
- :return: Service entity
560
- :rtype: dtlpy.entities.service.Service
561
- """
562
- return self.services.update(service=self, force=force)
563
-
564
- def delete(self, force: bool = False):
565
- """
566
- Delete Service object
567
-
568
- :return: True
569
- :rtype: bool
570
- """
571
- return self.services.delete(service_id=self.id, force=force)
572
-
573
- def status(self):
574
- """
575
- Get Service status
576
-
577
- :return: status json
578
- :rtype: dict
579
- """
580
- return self.services.status(service_id=self.id)
581
-
582
- def log(self,
583
- size=None,
584
- checkpoint=None,
585
- start=None,
586
- end=None,
587
- follow=False,
588
- text=None,
589
- execution_id=None,
590
- function_name=None,
591
- replica_id=None,
592
- system=False,
593
- view=True,
594
- until_completed=True,
595
- model_id: str = None,
596
- model_operation: str = None,
597
- ):
598
- """
599
- Get service logs
600
-
601
- :param int size: size
602
- :param dict checkpoint: the information from the lst point checked in the service
603
- :param str start: iso format time
604
- :param str end: iso format time
605
- :param bool follow: if true, keep stream future logs
606
- :param str text: text
607
- :param str execution_id: execution id
608
- :param str function_name: function name
609
- :param str replica_id: replica id
610
- :param bool system: system
611
- :param bool view: if true, print out all the logs
612
- :param bool until_completed: wait until completed
613
- :param str model_id: model id
614
- :param str model_operation: model operation action
615
- :return: ServiceLog entity
616
- :rtype: ServiceLog
617
-
618
- **Example**:
619
-
620
- .. code-block:: python
621
-
622
- service_log = service.log()
623
- """
624
- return self.services.log(service=self,
625
- size=size,
626
- checkpoint=checkpoint,
627
- start=start,
628
- end=end,
629
- follow=follow,
630
- execution_id=execution_id,
631
- function_name=function_name,
632
- replica_id=replica_id,
633
- system=system,
634
- text=text,
635
- view=view,
636
- until_completed=until_completed,
637
- model_id=model_id,
638
- model_operation=model_operation)
639
-
640
- def open_in_web(self):
641
- """
642
- Open the service in web platform
643
-
644
- :return:
645
- """
646
- parsed_url = urlsplit(self.platform_url)
647
- base_url = parsed_url.scheme + "://" + parsed_url.netloc
648
- url = '{}/projects/{}/services/{}'.format(base_url, self.project_id, self.id)
649
- self._client_api._open_in_web(url=url)
650
-
651
- def checkout(self):
652
- """
653
- Checkout
654
-
655
- :return:
656
- """
657
- return self.services.checkout(service=self)
658
-
659
- def pause(self):
660
- """
661
- pause
662
-
663
- :return:
664
- """
665
- return self.services.pause(service_id=self.id)
666
-
667
- def resume(self):
668
- """
669
- resume
670
-
671
- :return:
672
- """
673
- return self.services.resume(service_id=self.id)
674
-
675
- def execute(
676
- self,
677
- execution_input=None,
678
- function_name=None,
679
- resource=None,
680
- item_id=None,
681
- dataset_id=None,
682
- annotation_id=None,
683
- project_id=None,
684
- sync=False,
685
- stream_logs=True,
686
- return_output=True
687
- ):
688
- """
689
- Execute a function on an existing service
690
-
691
- :param List[FunctionIO] or dict execution_input: input dictionary or list of FunctionIO entities
692
- :param str function_name: function name to run
693
- :param str resource: input type.
694
- :param str item_id: optional - item id as input to function
695
- :param str dataset_id: optional - dataset id as input to function
696
- :param str annotation_id: optional - annotation id as input to function
697
- :param str project_id: resource's project
698
- :param bool sync: if true, wait for function to end
699
- :param bool stream_logs: prints logs of the new execution. only works with sync=True
700
- :param bool return_output: if True and sync is True - will return the output directly
701
- :return: execution object
702
- :rtype: dtlpy.entities.execution.Execution
703
-
704
- **Example**:
705
-
706
- .. code-block:: python
707
-
708
- execution = service.execute(function_name='function_name', item_id='item_id', project_id='project_id')
709
- """
710
- execution = self.executions.create(sync=sync,
711
- execution_input=execution_input,
712
- function_name=function_name,
713
- resource=resource,
714
- item_id=item_id,
715
- dataset_id=dataset_id,
716
- annotation_id=annotation_id,
717
- stream_logs=stream_logs,
718
- project_id=project_id,
719
- return_output=return_output)
720
- return execution
721
-
722
- def execute_batch(self,
723
- filters,
724
- function_name: str = None,
725
- execution_inputs: list = None,
726
- wait=True
727
- ):
728
- """
729
- Execute a function on an existing service
730
-
731
- **Prerequisites**: You must be in the role of an *owner* or *developer*. You must have a service.
732
-
733
- :param filters: Filters entity for a filtering before execute
734
- :param str function_name: function name to run
735
- :param List[FunctionIO] or dict execution_inputs: input dictionary or list of FunctionIO entities, that represent the extra inputs of the function
736
- :param bool wait: wait until create task finish
737
- :return: execution object
738
- :rtype: dtlpy.entities.execution.Execution
739
-
740
- **Example**:
741
-
742
- .. code-block:: python
743
-
744
- command = service.execute_batch(
745
- execution_inputs=dl.FunctionIO(type=dl.PackageInputType.STRING, value='test', name='string'),
746
- filters=dl.Filters(field='dir', values='/test', context={"datasets": [dataset.id]),
747
- function_name='run')
748
- """
749
- execution = self.executions.create_batch(service_id=self.id,
750
- execution_inputs=execution_inputs,
751
- filters=filters,
752
- function_name=function_name,
753
- wait=wait)
754
- return execution
755
-
756
- def rerun_batch(self,
757
- filters,
758
- wait=True
759
- ):
760
- """
761
- rerun a executions on an existing service
762
-
763
- **Prerequisites**: You must be in the role of an *owner* or *developer*. You must have a Filter.
764
-
765
- :param filters: Filters entity for a filtering before rerun
766
- :param bool wait: wait until create task finish
767
- :return: rerun command
768
- :rtype: dtlpy.entities.command.Command
769
-
770
- **Example**:
771
-
772
- .. code-block:: python
773
-
774
- command = service.executions.rerun_batch(
775
- filters=dl.Filters(field='id', values=['executionId'], operator=dl.FiltersOperations.IN, resource=dl.FiltersResource.EXECUTION))
776
- """
777
- execution = self.executions.rerun_batch(service_id=self.id,
778
- filters=filters,
779
- wait=wait)
780
- return execution
781
-
782
- def activate_slots(
783
- self,
784
- project_id: str = None,
785
- task_id: str = None,
786
- dataset_id: str = None,
787
- org_id: str = None,
788
- user_email: str = None,
789
- slots=None,
790
- role=None,
791
- prevent_override: bool = True,
792
- visible: bool = True,
793
- icon: str = 'fas fa-magic',
794
- **kwargs
795
- ) -> object:
796
- """
797
- Activate service slots
798
-
799
- :param str project_id: project id
800
- :param str task_id: task id
801
- :param str dataset_id: dataset id
802
- :param str org_id: org id
803
- :param str user_email: user email
804
- :param list slots: list of entities.PackageSlot
805
- :param str role: user role MemberOrgRole.ADMIN, MemberOrgRole.owner, MemberOrgRole.MEMBER, MemberOrgRole.WORKER
806
- :param bool prevent_override: True to prevent override
807
- :param bool visible: visible
808
- :param str icon: icon
809
- :param kwargs: all additional arguments
810
- :return: list of user setting for activated slots
811
- :rtype: list
812
-
813
- **Example**:
814
-
815
- .. code-block:: python
816
-
817
- setting = service.activate_slots(project_id='project_id',
818
- slots=List[entities.PackageSlot],
819
- icon='fas fa-magic')
820
- """
821
- return self.services.activate_slots(
822
- service=self,
823
- project_id=project_id,
824
- task_id=task_id,
825
- dataset_id=dataset_id,
826
- org_id=org_id,
827
- user_email=user_email,
828
- slots=slots,
829
- role=role,
830
- prevent_override=prevent_override,
831
- visible=visible,
832
- icon=icon,
833
- **kwargs
834
- )
835
-
836
- def restart(self, replica_name: str = None):
837
- """
838
- Restart service
839
-
840
- :param str replica_name: replica name
841
- :return: True
842
- :rtype: bool
843
- """
844
- return self.services.restart(service=self, replica_name=replica_name)
845
-
846
-
847
- class KubernetesAutoscalerType(str, Enum):
848
- """ The Service Autoscaler Type (RABBITMQ, CPU).
849
-
850
- .. list-table::
851
- :widths: 15 150
852
- :header-rows: 1
853
-
854
- * - State
855
- - Description
856
- * - RABBITMQ
857
- - Service Autoscaler based on service queue length
858
- * - CPU
859
- - Service Autoscaler based on service CPU usage
860
- * - RPS
861
- - Service Autoscaler based on service RPS
862
- """
863
- RABBITMQ = 'rabbitmq'
864
- CPU = 'cpu'
865
- RPS = 'rps'
866
-
867
-
868
- # added this class to avoid breaking changes after fixing a spelling mistake in KubernetesAutoscalerType
869
- class KubernetesAutuscalerTypeMeta(type):
870
- def __getattribute__(cls, item):
871
- if hasattr(KubernetesAutoscalerType, item):
872
- return getattr(KubernetesAutoscalerType, item)
873
- else:
874
- raise AttributeError(f"KubernetesAutuscalerType has no attribute '{item}'")
875
-
876
-
877
- class KubernetesAutoscaler(entities.BaseEntity):
878
- MIN_REPLICA_DEFAULT = 0
879
- MAX_REPLICA_DEFAULT = 1
880
- AUTOSCALER_TYPE_DEFAULT = KubernetesAutoscalerType.RABBITMQ
881
-
882
- def __init__(self,
883
- autoscaler_type: KubernetesAutoscalerType.RABBITMQ = AUTOSCALER_TYPE_DEFAULT,
884
- min_replicas=MIN_REPLICA_DEFAULT,
885
- max_replicas=MAX_REPLICA_DEFAULT,
886
- cooldown_period=None,
887
- polling_interval=None,
888
- **kwargs):
889
- self.autoscaler_type = kwargs.get('type', autoscaler_type)
890
- self.min_replicas = kwargs.get('minReplicas', min_replicas)
891
- self.max_replicas = kwargs.get('maxReplicas', max_replicas)
892
- self.cooldown_period = kwargs.get('cooldownPeriod', cooldown_period)
893
- self.polling_interval = kwargs.get('pollingInterval', polling_interval)
894
-
895
- def to_json(self):
896
- _json = {
897
- 'type': self.autoscaler_type,
898
- 'minReplicas': self.min_replicas,
899
- 'maxReplicas': self.max_replicas
900
- }
901
-
902
- if self.cooldown_period is not None:
903
- _json['cooldownPeriod'] = self.cooldown_period
904
-
905
- if self.polling_interval is not None:
906
- _json['pollingInterval'] = self.polling_interval
907
-
908
- return _json
909
-
910
-
911
- class KubernetesRabbitmqAutoscaler(KubernetesAutoscaler):
912
- QUEUE_LENGTH_DEFAULT = 1000
913
-
914
- def __init__(self,
915
- min_replicas=KubernetesAutoscaler.MIN_REPLICA_DEFAULT,
916
- max_replicas=KubernetesAutoscaler.MAX_REPLICA_DEFAULT,
917
- queue_length=QUEUE_LENGTH_DEFAULT,
918
- cooldown_period=None,
919
- polling_interval=None,
920
- **kwargs):
921
- super().__init__(min_replicas=min_replicas,
922
- max_replicas=max_replicas,
923
- autoscaler_type=KubernetesAutoscalerType.RABBITMQ,
924
- cooldown_period=cooldown_period,
925
- polling_interval=polling_interval, **kwargs)
926
- self.queue_length = kwargs.get('queueLength', queue_length)
927
-
928
- def to_json(self):
929
- _json = super().to_json()
930
- _json['queueLength'] = self.queue_length
931
- return _json
932
-
933
-
934
- class KubernetesRPSAutoscaler(KubernetesAutoscaler):
935
- THRESHOLD_DEFAULT = 10
936
- RATE_SECONDS_DEFAULT = 30
937
-
938
- def __init__(self,
939
- min_replicas=KubernetesAutoscaler.MIN_REPLICA_DEFAULT,
940
- max_replicas=KubernetesAutoscaler.MAX_REPLICA_DEFAULT,
941
- threshold=THRESHOLD_DEFAULT,
942
- rate_seconds=RATE_SECONDS_DEFAULT,
943
- cooldown_period=None,
944
- polling_interval=None,
945
- **kwargs):
946
- super().__init__(min_replicas=min_replicas,
947
- max_replicas=max_replicas,
948
- autoscaler_type=KubernetesAutoscalerType.RPS,
949
- cooldown_period=cooldown_period,
950
- polling_interval=polling_interval, **kwargs)
951
- self.threshold = kwargs.get('threshold', threshold)
952
- self.rate_seconds = kwargs.get('rateSeconds', rate_seconds)
953
-
954
- def to_json(self):
955
- _json = super().to_json()
956
- _json['rateSeconds'] = self.rate_seconds
957
- _json['threshold'] = self.threshold
958
- return _json
1
+ import warnings
2
+ from collections import namedtuple
3
+ from enum import Enum
4
+ import traceback
5
+ import logging
6
+ from typing import List
7
+ from urllib.parse import urlsplit
8
+ import attr
9
+ from .. import repositories, entities
10
+ from ..services.api_client import ApiClient
11
+
12
+ logger = logging.getLogger(name='dtlpy')
13
+
14
+
15
+ class ServiceType(str, Enum):
16
+ """ The type of the service (SYSTEM).
17
+
18
+ .. list-table::
19
+ :widths: 15 150
20
+ :header-rows: 1
21
+
22
+ * - State
23
+ - Description
24
+ * - SYSTEM
25
+ - Dataloop internal service
26
+ """
27
+ SYSTEM = 'system'
28
+ REGULAR = 'regular'
29
+
30
+
31
+ class ServiceModeType(str, Enum):
32
+ """ The type of the service mode.
33
+
34
+ .. list-table::
35
+ :widths: 15 150
36
+ :header-rows: 1
37
+
38
+ * - State
39
+ - Description
40
+ * - REGULAR
41
+ - Service regular mode type
42
+ * - DEBUG
43
+ - Service debug mode type
44
+ """
45
+ REGULAR = 'regular'
46
+ DEBUG = 'debug'
47
+
48
+
49
+ class OnResetAction(str, Enum):
50
+ """ The Execution action when the service reset (RERUN, FAILED).
51
+
52
+ .. list-table::
53
+ :widths: 15 150
54
+ :header-rows: 1
55
+
56
+ * - State
57
+ - Description
58
+ * - RERUN
59
+ - When the service resting rerun the execution
60
+ * - FAILED
61
+ - When the service resting fail the execution
62
+ """
63
+ RERUN = 'rerun'
64
+ FAILED = 'failed'
65
+
66
+
67
+ class InstanceCatalog(str, Enum):
68
+ """ The Service Pode size.
69
+
70
+ .. list-table::
71
+ :widths: 15 150
72
+ :header-rows: 1
73
+
74
+ * - State
75
+ - Description
76
+ * - REGULAR_XS
77
+ - regular pod with extra small size
78
+ * - REGULAR_S
79
+ - regular pod with small size
80
+ * - REGULAR_M
81
+ - regular pod with medium size
82
+ * - REGULAR_L
83
+ - regular pod with large size
84
+ * - HIGHMEM_XS
85
+ - highmem pod with extra small size
86
+ * - HIGHMEM_S
87
+ - highmem pod with small size
88
+ * - HIGHMEM_M
89
+ - highmem pod with medium size
90
+ * - HIGHMEM_L
91
+ - highmem pod with large size
92
+ * - GPU_T4_S
93
+ - GPU NVIDIA T4 pod with regular memory
94
+ * - GPU_T4_M
95
+ - GPU NVIDIA T4 pod with highmem
96
+ """
97
+ REGULAR_XS = "regular-xs"
98
+ REGULAR_S = "regular-s"
99
+ REGULAR_M = "regular-m"
100
+ REGULAR_L = "regular-l"
101
+ HIGHMEM_XS = "highmem-xs"
102
+ HIGHMEM_S = "highmem-s"
103
+ HIGHMEM_M = "highmem-m"
104
+ HIGHMEM_L = "highmem-l"
105
+ GPU_T4_S = "gpu-t4"
106
+ GPU_T4_M = "gpu-t4-m"
107
+
108
+
109
+ class RuntimeType(str, Enum):
110
+ """ Service culture Runtime (KUBERNETES).
111
+
112
+ .. list-table::
113
+ :widths: 15 150
114
+ :header-rows: 1
115
+
116
+ * - State
117
+ - Description
118
+ * - KUBERNETES
119
+ - Service run in kubernetes culture
120
+ """
121
+ KUBERNETES = 'kubernetes'
122
+
123
+
124
+ class ServiceRuntime(entities.BaseEntity):
125
+ def __init__(self, service_type: RuntimeType = RuntimeType.KUBERNETES):
126
+ self.service_type = service_type
127
+
128
+
129
+ class KubernetesRuntime(ServiceRuntime):
130
+ DEFAULT_POD_TYPE = InstanceCatalog.REGULAR_S
131
+ DEFAULT_NUM_REPLICAS = 1
132
+ DEFAULT_CONCURRENCY = 10
133
+
134
+ def __init__(self,
135
+ pod_type: InstanceCatalog = DEFAULT_POD_TYPE,
136
+ num_replicas=DEFAULT_NUM_REPLICAS,
137
+ concurrency=DEFAULT_CONCURRENCY,
138
+ dynamic_concurrency=None,
139
+ runner_image=None,
140
+ autoscaler=None,
141
+ **kwargs):
142
+
143
+ super().__init__(service_type=RuntimeType.KUBERNETES)
144
+ self.pod_type = kwargs.get('podType', pod_type)
145
+ self.num_replicas = kwargs.get('numReplicas', num_replicas)
146
+ self.concurrency = kwargs.get('concurrency', concurrency)
147
+ self.runner_image = kwargs.get('runnerImage', runner_image)
148
+ self._proxy_image = kwargs.get('proxyImage', None)
149
+ self.single_agent = kwargs.get('singleAgent', None)
150
+ self.preemptible = kwargs.get('preemptible', None)
151
+ self.dynamic_concurrency = kwargs.get('dynamicConcurrency', dynamic_concurrency)
152
+
153
+ self.autoscaler = kwargs.get('autoscaler', autoscaler)
154
+ if self.autoscaler is not None and isinstance(self.autoscaler, dict):
155
+ if self.autoscaler['type'] == KubernetesAutoscalerType.RABBITMQ:
156
+ self.autoscaler = KubernetesRabbitmqAutoscaler(**self.autoscaler)
157
+ elif self.autoscaler['type'] == KubernetesAutoscalerType.RPS:
158
+ self.autoscaler = KubernetesRPSAutoscaler(**self.autoscaler)
159
+ else:
160
+ raise NotImplementedError(
161
+ 'Unknown kubernetes autoscaler type: {}'.format(self.autoscaler['type']))
162
+
163
+ def to_json(self):
164
+ _json = {
165
+ 'podType': self.pod_type,
166
+ 'numReplicas': self.num_replicas,
167
+ 'concurrency': self.concurrency,
168
+ 'autoscaler': None if self.autoscaler is None else self.autoscaler.to_json()
169
+ }
170
+
171
+ if self.single_agent is not None:
172
+ _json['singleAgent'] = self.single_agent
173
+
174
+ if self.runner_image is not None:
175
+ _json['runnerImage'] = self.runner_image
176
+
177
+ if self._proxy_image is not None:
178
+ _json['proxyImage'] = self._proxy_image
179
+
180
+ if self.preemptible is not None:
181
+ _json['preemptible'] = self.preemptible
182
+
183
+ if self.dynamic_concurrency is not None:
184
+ _json['dynamicConcurrency'] = self.dynamic_concurrency
185
+
186
+ return _json
187
+
188
+
189
+ @attr.s
190
+ class Service(entities.BaseEntity):
191
+ """
192
+ Service object
193
+ """
194
+ # platform
195
+ created_at = attr.ib()
196
+ updated_at = attr.ib(repr=False)
197
+ creator = attr.ib()
198
+ version = attr.ib()
199
+
200
+ package_id = attr.ib()
201
+ package_revision = attr.ib()
202
+
203
+ bot = attr.ib()
204
+ use_user_jwt = attr.ib(repr=False)
205
+ init_input = attr.ib()
206
+ versions = attr.ib(repr=False)
207
+ module_name = attr.ib()
208
+ name = attr.ib()
209
+ url = attr.ib()
210
+ id = attr.ib()
211
+ active = attr.ib()
212
+ driver_id = attr.ib(repr=False)
213
+ secrets = attr.ib(repr=False)
214
+
215
+ # name change
216
+ runtime = attr.ib(repr=False, type=KubernetesRuntime)
217
+ queue_length_limit = attr.ib()
218
+ run_execution_as_process = attr.ib(type=bool)
219
+ execution_timeout = attr.ib()
220
+ drain_time = attr.ib()
221
+ on_reset = attr.ib(type=OnResetAction)
222
+ _type = attr.ib(type=ServiceType)
223
+ project_id = attr.ib()
224
+ org_id = attr.ib()
225
+ is_global = attr.ib()
226
+ max_attempts = attr.ib()
227
+ mode = attr.ib(repr=False)
228
+ metadata = attr.ib()
229
+ archive = attr.ib(repr=False)
230
+ config = attr.ib(repr=False)
231
+ settings = attr.ib(repr=False)
232
+ panels = attr.ib(repr=False)
233
+
234
+ # SDK
235
+ _package = attr.ib(repr=False)
236
+ _client_api = attr.ib(type=ApiClient, repr=False)
237
+ _revisions = attr.ib(default=None, repr=False)
238
+ # repositories
239
+ _project = attr.ib(default=None, repr=False)
240
+ _repositories = attr.ib(repr=False)
241
+ updated_by = attr.ib(default=None)
242
+ app = attr.ib(default=None)
243
+ integrations = attr.ib(default=None)
244
+
245
+ @property
246
+ def createdAt(self):
247
+ return self.created_at
248
+
249
+ @property
250
+ def updatedAt(self):
251
+ return self.updated_at
252
+
253
+ @staticmethod
254
+ def _protected_from_json(_json: dict, client_api: ApiClient, package=None, project=None, is_fetched=True):
255
+ """
256
+ Same as from_json but with try-except to catch if error
257
+
258
+ :param _json: platform json
259
+ :param client_api: ApiClient entity
260
+ :param package:
261
+ :param project: project entity
262
+ :param is_fetched: is Entity fetched from Platform
263
+ :return:
264
+ """
265
+ try:
266
+ service = Service.from_json(_json=_json,
267
+ client_api=client_api,
268
+ package=package,
269
+ project=project,
270
+ is_fetched=is_fetched)
271
+ status = True
272
+ except Exception:
273
+ service = traceback.format_exc()
274
+ status = False
275
+ return status, service
276
+
277
+ @classmethod
278
+ def from_json(cls, _json: dict, client_api: ApiClient = None, package=None, project=None, is_fetched=True):
279
+ """
280
+ Build a service entity object from a json
281
+
282
+ :param dict _json: platform json
283
+ :param dl.ApiClient client_api: ApiClient entity
284
+ :param dtlpy.entities.package.Package package: package entity
285
+ :param dtlpy.entities.project.Project project: project entity
286
+ :param bool is_fetched: is Entity fetched from Platform
287
+ :return: service object
288
+ :rtype: dtlpy.entities.service.Service
289
+ """
290
+ if project is not None:
291
+ if project.id != _json.get('projectId', None):
292
+ logger.warning('Service has been fetched from a project that is not belong to it')
293
+ project = None
294
+
295
+ if package is not None:
296
+ if package.id != _json.get('packageId', None):
297
+ logger.warning('Service has been fetched from a package that is not belong to it')
298
+ package = None
299
+
300
+ versions = _json.get('versions', dict())
301
+ runtime = _json.get("runtime", None)
302
+ if runtime:
303
+ runtime = KubernetesRuntime(**runtime)
304
+
305
+ inst = cls(
306
+ package_revision=_json.get("packageRevision", None),
307
+ bot=_json.get("botUserName", None),
308
+ use_user_jwt=_json.get("useUserJwt", False),
309
+ created_at=_json.get("createdAt", None),
310
+ updated_at=_json.get("updatedAt", None),
311
+ project_id=_json.get('projectId', None),
312
+ package_id=_json.get('packageId', None),
313
+ driver_id=_json.get('driverId', None),
314
+ max_attempts=_json.get('maxAttempts', None),
315
+ version=_json.get('version', None),
316
+ creator=_json.get('creator', None),
317
+ revisions=_json.get('revisions', None),
318
+ queue_length_limit=_json.get('queueLengthLimit', None),
319
+ active=_json.get('active', None),
320
+ runtime=runtime,
321
+ is_global=_json.get("global", False),
322
+ init_input=_json.get("initParams", dict()),
323
+ module_name=_json.get("moduleName", None),
324
+ run_execution_as_process=_json.get('runExecutionAsProcess', False),
325
+ execution_timeout=_json.get('executionTimeout', 60 * 60),
326
+ drain_time=_json.get('drainTime', 60 * 10),
327
+ on_reset=_json.get('onReset', OnResetAction.FAILED),
328
+ name=_json.get("name", None),
329
+ url=_json.get("url", None),
330
+ id=_json.get("id", None),
331
+ versions=versions,
332
+ client_api=client_api,
333
+ package=package,
334
+ project=project,
335
+ secrets=_json.get("secrets", None),
336
+ type=_json.get("type", None),
337
+ mode=_json.get('mode', dict()),
338
+ metadata=_json.get('metadata', None),
339
+ archive=_json.get('archive', None),
340
+ updated_by=_json.get('updatedBy', None),
341
+ config=_json.get('config', None),
342
+ settings=_json.get('settings', None),
343
+ app=_json.get('app', None),
344
+ integrations=_json.get('integrations', None),
345
+ org_id=_json.get('orgId', None),
346
+ panels=_json.get('panels', None)
347
+ )
348
+ inst.is_fetched = is_fetched
349
+ return inst
350
+
351
+ ############
352
+ # Entities #
353
+ ############
354
+ @property
355
+ def revisions(self):
356
+ if self._revisions is None:
357
+ self._revisions = self.services.revisions(service=self)
358
+ return self._revisions
359
+
360
+ @property
361
+ def platform_url(self):
362
+ return self._client_api._get_resource_url("projects/{}/services/{}/main".format(self.project.id, self.id))
363
+
364
+ @property
365
+ def project(self):
366
+ if self._project is None:
367
+ self._project = repositories.Projects(client_api=self._client_api).get(project_id=self.project_id,
368
+ fetch=None)
369
+ assert isinstance(self._project, entities.Project)
370
+ return self._project
371
+
372
+ @property
373
+ def package(self):
374
+ if self._package is None:
375
+ try:
376
+ dpk_id = None
377
+ dpk_version = None
378
+ if self.app and isinstance(self.app, dict):
379
+ dpk_id = self.app.get('dpkId', None)
380
+ dpk_version = self.app.get('dpkVersion', None)
381
+ if dpk_id is None:
382
+ self._package = repositories.Dpks(client_api=self._client_api, project=self.project).get(
383
+ dpk_id=self.package_id)
384
+ else:
385
+ self._package = repositories.Dpks(client_api=self._client_api, project=self.project).get_revisions(
386
+ dpk_id=dpk_id,
387
+ version=dpk_version)
388
+
389
+ assert isinstance(self._package, entities.Dpk)
390
+ except:
391
+ self._package = repositories.Packages(client_api=self._client_api).get(package_id=self.package_id,
392
+ fetch=None,
393
+ log_error=False)
394
+ assert isinstance(self._package, entities.Package)
395
+ return self._package
396
+
397
+ @property
398
+ def execution_url(self):
399
+ return 'CURL -X POST' \
400
+ '\nauthorization: Bearer <token>' \
401
+ '\nContent-Type: application/json" -d {' \
402
+ '\n"input": {<input json>}, ' \
403
+ '"projectId": "{<project_id>}", ' \
404
+ '"functionName": "<function_name>"}'
405
+
406
+ ################
407
+ # repositories #
408
+ ################
409
+ @_repositories.default
410
+ def set_repositories(self):
411
+ reps = namedtuple('repositories',
412
+ field_names=['executions', 'services', 'triggers'])
413
+
414
+ if self._package is None:
415
+ services_repo = repositories.Services(client_api=self._client_api,
416
+ package=self._package,
417
+ project=self._project)
418
+ else:
419
+ services_repo = self._package.services
420
+
421
+ triggers = repositories.Triggers(client_api=self._client_api,
422
+ project=self._project,
423
+ service=self)
424
+
425
+ r = reps(executions=repositories.Executions(client_api=self._client_api, service=self),
426
+ services=services_repo, triggers=triggers)
427
+ return r
428
+
429
+ @property
430
+ def executions(self):
431
+ assert isinstance(self._repositories.executions, repositories.Executions)
432
+ return self._repositories.executions
433
+
434
+ @property
435
+ def triggers(self):
436
+ assert isinstance(self._repositories.triggers, repositories.Triggers)
437
+ return self._repositories.triggers
438
+
439
+ @property
440
+ def services(self):
441
+ assert isinstance(self._repositories.services, repositories.Services)
442
+ return self._repositories.services
443
+
444
+ ###########
445
+ # methods #
446
+ ###########
447
+ def to_json(self):
448
+ """
449
+ Returns platform _json format of object
450
+
451
+ :return: platform json format of object
452
+ :rtype: dict
453
+ """
454
+ _json = attr.asdict(
455
+ self,
456
+ filter=attr.filters.exclude(
457
+ attr.fields(Service)._project,
458
+ attr.fields(Service)._package,
459
+ attr.fields(Service)._revisions,
460
+ attr.fields(Service)._client_api,
461
+ attr.fields(Service)._repositories,
462
+ attr.fields(Service).project_id,
463
+ attr.fields(Service).init_input,
464
+ attr.fields(Service).module_name,
465
+ attr.fields(Service).bot,
466
+ attr.fields(Service).package_id,
467
+ attr.fields(Service).is_global,
468
+ attr.fields(Service).use_user_jwt,
469
+ attr.fields(Service).package_revision,
470
+ attr.fields(Service).driver_id,
471
+ attr.fields(Service).run_execution_as_process,
472
+ attr.fields(Service).execution_timeout,
473
+ attr.fields(Service).drain_time,
474
+ attr.fields(Service).runtime,
475
+ attr.fields(Service).queue_length_limit,
476
+ attr.fields(Service).max_attempts,
477
+ attr.fields(Service).on_reset,
478
+ attr.fields(Service).created_at,
479
+ attr.fields(Service).updated_at,
480
+ attr.fields(Service).secrets,
481
+ attr.fields(Service)._type,
482
+ attr.fields(Service).mode,
483
+ attr.fields(Service).metadata,
484
+ attr.fields(Service).archive,
485
+ attr.fields(Service).updated_by,
486
+ attr.fields(Service).config,
487
+ attr.fields(Service).settings,
488
+ attr.fields(Service).app,
489
+ attr.fields(Service).integrations,
490
+ attr.fields(Service).org_id,
491
+ attr.fields(Service).panels
492
+ )
493
+ )
494
+
495
+ _json['projectId'] = self.project_id
496
+ _json['orgId'] = self.org_id
497
+ _json['packageId'] = self.package_id
498
+ _json['initParams'] = self.init_input
499
+ _json['moduleName'] = self.module_name
500
+ _json['botUserName'] = self.bot
501
+ _json['useUserJwt'] = self.use_user_jwt
502
+ _json['global'] = self.is_global
503
+ _json['driverId'] = self.driver_id
504
+ _json['packageRevision'] = self.package_revision
505
+ _json['runExecutionAsProcess'] = self.run_execution_as_process
506
+ _json['executionTimeout'] = self.execution_timeout
507
+ _json['drainTime'] = self.drain_time
508
+ _json['onReset'] = self.on_reset
509
+ _json['createdAt'] = self.created_at
510
+ _json['updatedAt'] = self.updated_at
511
+
512
+ if self.updated_by is not None:
513
+ _json['updatedBy'] = self.updated_by
514
+
515
+ if self.panels is not None:
516
+ _json['panels'] = self.panels
517
+
518
+ if self.max_attempts is not None:
519
+ _json['maxAttempts'] = self.max_attempts
520
+
521
+ if self.is_global is not None:
522
+ _json['global'] = self.is_global
523
+
524
+ if self.runtime:
525
+ _json['runtime'] = self.runtime if isinstance(self.runtime, dict) else self.runtime.to_json()
526
+
527
+ if self.queue_length_limit is not None:
528
+ _json['queueLengthLimit'] = self.queue_length_limit
529
+
530
+ if self.secrets is not None:
531
+ _json['secrets'] = self.secrets
532
+
533
+ if self._type is not None:
534
+ _json['type'] = self._type
535
+
536
+ if self.mode:
537
+ _json['mode'] = self.mode
538
+
539
+ if self.metadata:
540
+ _json['metadata'] = self.metadata
541
+
542
+ if self.archive is not None:
543
+ _json['archive'] = self.archive
544
+
545
+ if self.config is not None:
546
+ _json['config'] = self.config
547
+
548
+ if self.settings is not None:
549
+ _json['settings'] = self.settings
550
+
551
+ if self.app is not None:
552
+ _json['app'] = self.app
553
+
554
+ if self.integrations is not None:
555
+ _json['integrations'] = self.integrations
556
+
557
+ return _json
558
+
559
+ def update(self, force=False):
560
+ """
561
+ Update Service changes to platform
562
+
563
+ :param bool force: force update
564
+ :return: Service entity
565
+ :rtype: dtlpy.entities.service.Service
566
+ """
567
+ return self.services.update(service=self, force=force)
568
+
569
+ def delete(self, force: bool = False):
570
+ """
571
+ Delete Service object
572
+
573
+ :return: True
574
+ :rtype: bool
575
+ """
576
+ return self.services.delete(service_id=self.id, force=force)
577
+
578
+ def status(self):
579
+ """
580
+ Get Service status
581
+
582
+ :return: status json
583
+ :rtype: dict
584
+ """
585
+ return self.services.status(service_id=self.id)
586
+
587
+ def log(self,
588
+ size=None,
589
+ checkpoint=None,
590
+ start=None,
591
+ end=None,
592
+ follow=False,
593
+ text=None,
594
+ execution_id=None,
595
+ function_name=None,
596
+ replica_id=None,
597
+ system=False,
598
+ view=True,
599
+ until_completed=True,
600
+ model_id: str = None,
601
+ model_operation: str = None,
602
+ ):
603
+ """
604
+ Get service logs
605
+
606
+ :param int size: size
607
+ :param dict checkpoint: the information from the lst point checked in the service
608
+ :param str start: iso format time
609
+ :param str end: iso format time
610
+ :param bool follow: if true, keep stream future logs
611
+ :param str text: text
612
+ :param str execution_id: execution id
613
+ :param str function_name: function name
614
+ :param str replica_id: replica id
615
+ :param bool system: system
616
+ :param bool view: if true, print out all the logs
617
+ :param bool until_completed: wait until completed
618
+ :param str model_id: model id
619
+ :param str model_operation: model operation action
620
+ :return: ServiceLog entity
621
+ :rtype: ServiceLog
622
+
623
+ **Example**:
624
+
625
+ .. code-block:: python
626
+
627
+ service_log = service.log()
628
+ """
629
+ return self.services.log(service=self,
630
+ size=size,
631
+ checkpoint=checkpoint,
632
+ start=start,
633
+ end=end,
634
+ follow=follow,
635
+ execution_id=execution_id,
636
+ function_name=function_name,
637
+ replica_id=replica_id,
638
+ system=system,
639
+ text=text,
640
+ view=view,
641
+ until_completed=until_completed,
642
+ model_id=model_id,
643
+ model_operation=model_operation)
644
+
645
+ def open_in_web(self):
646
+ """
647
+ Open the service in web platform
648
+
649
+ :return:
650
+ """
651
+ parsed_url = urlsplit(self.platform_url)
652
+ base_url = parsed_url.scheme + "://" + parsed_url.netloc
653
+ url = '{}/projects/{}/services/{}'.format(base_url, self.project_id, self.id)
654
+ self._client_api._open_in_web(url=url)
655
+
656
+ def checkout(self):
657
+ """
658
+ Checkout
659
+
660
+ :return:
661
+ """
662
+ return self.services.checkout(service=self)
663
+
664
+ def pause(self):
665
+ """
666
+ pause
667
+
668
+ :return:
669
+ """
670
+ return self.services.pause(service_id=self.id)
671
+
672
+ def resume(self):
673
+ """
674
+ resume
675
+
676
+ :return:
677
+ """
678
+ return self.services.resume(service_id=self.id)
679
+
680
+ def execute(
681
+ self,
682
+ execution_input=None,
683
+ function_name=None,
684
+ resource=None,
685
+ item_id=None,
686
+ dataset_id=None,
687
+ annotation_id=None,
688
+ project_id=None,
689
+ sync=False,
690
+ stream_logs=True,
691
+ return_output=True
692
+ ):
693
+ """
694
+ Execute a function on an existing service
695
+
696
+ :param List[FunctionIO] or dict execution_input: input dictionary or list of FunctionIO entities
697
+ :param str function_name: function name to run
698
+ :param str resource: input type.
699
+ :param str item_id: optional - item id as input to function
700
+ :param str dataset_id: optional - dataset id as input to function
701
+ :param str annotation_id: optional - annotation id as input to function
702
+ :param str project_id: resource's project
703
+ :param bool sync: if true, wait for function to end
704
+ :param bool stream_logs: prints logs of the new execution. only works with sync=True
705
+ :param bool return_output: if True and sync is True - will return the output directly
706
+ :return: execution object
707
+ :rtype: dtlpy.entities.execution.Execution
708
+
709
+ **Example**:
710
+
711
+ .. code-block:: python
712
+
713
+ execution = service.execute(function_name='function_name', item_id='item_id', project_id='project_id')
714
+ """
715
+ execution = self.executions.create(sync=sync,
716
+ execution_input=execution_input,
717
+ function_name=function_name,
718
+ resource=resource,
719
+ item_id=item_id,
720
+ dataset_id=dataset_id,
721
+ annotation_id=annotation_id,
722
+ stream_logs=stream_logs,
723
+ project_id=project_id,
724
+ return_output=return_output)
725
+ return execution
726
+
727
+ def execute_batch(self,
728
+ filters,
729
+ function_name: str = None,
730
+ execution_inputs: list = None,
731
+ wait=True
732
+ ):
733
+ """
734
+ Execute a function on an existing service
735
+
736
+ **Prerequisites**: You must be in the role of an *owner* or *developer*. You must have a service.
737
+
738
+ :param filters: Filters entity for a filtering before execute
739
+ :param str function_name: function name to run
740
+ :param List[FunctionIO] or dict execution_inputs: input dictionary or list of FunctionIO entities, that represent the extra inputs of the function
741
+ :param bool wait: wait until create task finish
742
+ :return: execution object
743
+ :rtype: dtlpy.entities.execution.Execution
744
+
745
+ **Example**:
746
+
747
+ .. code-block:: python
748
+
749
+ command = service.execute_batch(
750
+ execution_inputs=dl.FunctionIO(type=dl.PackageInputType.STRING, value='test', name='string'),
751
+ filters=dl.Filters(field='dir', values='/test', context={"datasets": [dataset.id]),
752
+ function_name='run')
753
+ """
754
+ execution = self.executions.create_batch(service_id=self.id,
755
+ execution_inputs=execution_inputs,
756
+ filters=filters,
757
+ function_name=function_name,
758
+ wait=wait)
759
+ return execution
760
+
761
+ def rerun_batch(self,
762
+ filters,
763
+ wait=True
764
+ ):
765
+ """
766
+ rerun a executions on an existing service
767
+
768
+ **Prerequisites**: You must be in the role of an *owner* or *developer*. You must have a Filter.
769
+
770
+ :param filters: Filters entity for a filtering before rerun
771
+ :param bool wait: wait until create task finish
772
+ :return: rerun command
773
+ :rtype: dtlpy.entities.command.Command
774
+
775
+ **Example**:
776
+
777
+ .. code-block:: python
778
+
779
+ command = service.executions.rerun_batch(
780
+ filters=dl.Filters(field='id', values=['executionId'], operator=dl.FiltersOperations.IN, resource=dl.FiltersResource.EXECUTION))
781
+ """
782
+ execution = self.executions.rerun_batch(service_id=self.id,
783
+ filters=filters,
784
+ wait=wait)
785
+ return execution
786
+
787
+ def activate_slots(
788
+ self,
789
+ project_id: str = None,
790
+ task_id: str = None,
791
+ dataset_id: str = None,
792
+ org_id: str = None,
793
+ user_email: str = None,
794
+ slots=None,
795
+ role=None,
796
+ prevent_override: bool = True,
797
+ visible: bool = True,
798
+ icon: str = 'fas fa-magic',
799
+ **kwargs
800
+ ) -> object:
801
+ """
802
+ Activate service slots
803
+
804
+ :param str project_id: project id
805
+ :param str task_id: task id
806
+ :param str dataset_id: dataset id
807
+ :param str org_id: org id
808
+ :param str user_email: user email
809
+ :param list slots: list of entities.PackageSlot
810
+ :param str role: user role MemberOrgRole.ADMIN, MemberOrgRole.owner, MemberOrgRole.MEMBER, MemberOrgRole.WORKER
811
+ :param bool prevent_override: True to prevent override
812
+ :param bool visible: visible
813
+ :param str icon: icon
814
+ :param kwargs: all additional arguments
815
+ :return: list of user setting for activated slots
816
+ :rtype: list
817
+
818
+ **Example**:
819
+
820
+ .. code-block:: python
821
+
822
+ setting = service.activate_slots(project_id='project_id',
823
+ slots=List[entities.PackageSlot],
824
+ icon='fas fa-magic')
825
+ """
826
+ return self.services.activate_slots(
827
+ service=self,
828
+ project_id=project_id,
829
+ task_id=task_id,
830
+ dataset_id=dataset_id,
831
+ org_id=org_id,
832
+ user_email=user_email,
833
+ slots=slots,
834
+ role=role,
835
+ prevent_override=prevent_override,
836
+ visible=visible,
837
+ icon=icon,
838
+ **kwargs
839
+ )
840
+
841
+ def restart(self, replica_name: str = None):
842
+ """
843
+ Restart service
844
+
845
+ :param str replica_name: replica name
846
+ :return: True
847
+ :rtype: bool
848
+ """
849
+ return self.services.restart(service=self, replica_name=replica_name)
850
+
851
+
852
+ class KubernetesAutoscalerType(str, Enum):
853
+ """ The Service Autoscaler Type (RABBITMQ, CPU).
854
+
855
+ .. list-table::
856
+ :widths: 15 150
857
+ :header-rows: 1
858
+
859
+ * - State
860
+ - Description
861
+ * - RABBITMQ
862
+ - Service Autoscaler based on service queue length
863
+ * - CPU
864
+ - Service Autoscaler based on service CPU usage
865
+ * - RPS
866
+ - Service Autoscaler based on service RPS
867
+ """
868
+ RABBITMQ = 'rabbitmq'
869
+ CPU = 'cpu'
870
+ RPS = 'rps'
871
+
872
+
873
+ # added this class to avoid breaking changes after fixing a spelling mistake in KubernetesAutoscalerType
874
+ class KubernetesAutuscalerTypeMeta(type):
875
+ def __getattribute__(cls, item):
876
+ if hasattr(KubernetesAutoscalerType, item):
877
+ return getattr(KubernetesAutoscalerType, item)
878
+ else:
879
+ raise AttributeError(f"KubernetesAutuscalerType has no attribute '{item}'")
880
+
881
+
882
+ class KubernetesAutoscaler(entities.BaseEntity):
883
+ MIN_REPLICA_DEFAULT = 0
884
+ MAX_REPLICA_DEFAULT = 1
885
+ AUTOSCALER_TYPE_DEFAULT = KubernetesAutoscalerType.RABBITMQ
886
+
887
+ def __init__(self,
888
+ autoscaler_type: KubernetesAutoscalerType.RABBITMQ = AUTOSCALER_TYPE_DEFAULT,
889
+ min_replicas=MIN_REPLICA_DEFAULT,
890
+ max_replicas=MAX_REPLICA_DEFAULT,
891
+ cooldown_period=None,
892
+ polling_interval=None,
893
+ **kwargs):
894
+ self.autoscaler_type = kwargs.get('type', autoscaler_type)
895
+ self.min_replicas = kwargs.get('minReplicas', min_replicas)
896
+ self.max_replicas = kwargs.get('maxReplicas', max_replicas)
897
+ self.cooldown_period = kwargs.get('cooldownPeriod', cooldown_period)
898
+ self.polling_interval = kwargs.get('pollingInterval', polling_interval)
899
+
900
+ def to_json(self):
901
+ _json = {
902
+ 'type': self.autoscaler_type,
903
+ 'minReplicas': self.min_replicas,
904
+ 'maxReplicas': self.max_replicas
905
+ }
906
+
907
+ if self.cooldown_period is not None:
908
+ _json['cooldownPeriod'] = self.cooldown_period
909
+
910
+ if self.polling_interval is not None:
911
+ _json['pollingInterval'] = self.polling_interval
912
+
913
+ return _json
914
+
915
+
916
+ class KubernetesRabbitmqAutoscaler(KubernetesAutoscaler):
917
+ QUEUE_LENGTH_DEFAULT = 1000
918
+
919
+ def __init__(self,
920
+ min_replicas=KubernetesAutoscaler.MIN_REPLICA_DEFAULT,
921
+ max_replicas=KubernetesAutoscaler.MAX_REPLICA_DEFAULT,
922
+ queue_length=QUEUE_LENGTH_DEFAULT,
923
+ cooldown_period=None,
924
+ polling_interval=None,
925
+ **kwargs):
926
+ super().__init__(min_replicas=min_replicas,
927
+ max_replicas=max_replicas,
928
+ autoscaler_type=KubernetesAutoscalerType.RABBITMQ,
929
+ cooldown_period=cooldown_period,
930
+ polling_interval=polling_interval, **kwargs)
931
+ self.queue_length = kwargs.get('queueLength', queue_length)
932
+
933
+ def to_json(self):
934
+ _json = super().to_json()
935
+ _json['queueLength'] = self.queue_length
936
+ return _json
937
+
938
+
939
+ class KubernetesRPSAutoscaler(KubernetesAutoscaler):
940
+ THRESHOLD_DEFAULT = 10
941
+ RATE_SECONDS_DEFAULT = 30
942
+
943
+ def __init__(self,
944
+ min_replicas=KubernetesAutoscaler.MIN_REPLICA_DEFAULT,
945
+ max_replicas=KubernetesAutoscaler.MAX_REPLICA_DEFAULT,
946
+ threshold=THRESHOLD_DEFAULT,
947
+ rate_seconds=RATE_SECONDS_DEFAULT,
948
+ cooldown_period=None,
949
+ polling_interval=None,
950
+ **kwargs):
951
+ super().__init__(min_replicas=min_replicas,
952
+ max_replicas=max_replicas,
953
+ autoscaler_type=KubernetesAutoscalerType.RPS,
954
+ cooldown_period=cooldown_period,
955
+ polling_interval=polling_interval, **kwargs)
956
+ self.threshold = kwargs.get('threshold', threshold)
957
+ self.rate_seconds = kwargs.get('rateSeconds', rate_seconds)
958
+
959
+ def to_json(self):
960
+ _json = super().to_json()
961
+ _json['rateSeconds'] = self.rate_seconds
962
+ _json['threshold'] = self.threshold
963
+ return _json