dtlpy 1.115.44__py3-none-any.whl → 1.117.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (238) hide show
  1. dtlpy/__init__.py +491 -491
  2. dtlpy/__version__.py +1 -1
  3. dtlpy/assets/__init__.py +26 -26
  4. dtlpy/assets/code_server/config.yaml +2 -2
  5. dtlpy/assets/code_server/installation.sh +24 -24
  6. dtlpy/assets/code_server/launch.json +13 -13
  7. dtlpy/assets/code_server/settings.json +2 -2
  8. dtlpy/assets/main.py +53 -53
  9. dtlpy/assets/main_partial.py +18 -18
  10. dtlpy/assets/mock.json +11 -11
  11. dtlpy/assets/model_adapter.py +83 -83
  12. dtlpy/assets/package.json +61 -61
  13. dtlpy/assets/package_catalog.json +29 -29
  14. dtlpy/assets/package_gitignore +307 -307
  15. dtlpy/assets/service_runners/__init__.py +33 -33
  16. dtlpy/assets/service_runners/converter.py +96 -96
  17. dtlpy/assets/service_runners/multi_method.py +49 -49
  18. dtlpy/assets/service_runners/multi_method_annotation.py +54 -54
  19. dtlpy/assets/service_runners/multi_method_dataset.py +55 -55
  20. dtlpy/assets/service_runners/multi_method_item.py +52 -52
  21. dtlpy/assets/service_runners/multi_method_json.py +52 -52
  22. dtlpy/assets/service_runners/single_method.py +37 -37
  23. dtlpy/assets/service_runners/single_method_annotation.py +43 -43
  24. dtlpy/assets/service_runners/single_method_dataset.py +43 -43
  25. dtlpy/assets/service_runners/single_method_item.py +41 -41
  26. dtlpy/assets/service_runners/single_method_json.py +42 -42
  27. dtlpy/assets/service_runners/single_method_multi_input.py +45 -45
  28. dtlpy/assets/voc_annotation_template.xml +23 -23
  29. dtlpy/caches/base_cache.py +32 -32
  30. dtlpy/caches/cache.py +473 -473
  31. dtlpy/caches/dl_cache.py +201 -201
  32. dtlpy/caches/filesystem_cache.py +89 -89
  33. dtlpy/caches/redis_cache.py +84 -84
  34. dtlpy/dlp/__init__.py +20 -20
  35. dtlpy/dlp/cli_utilities.py +367 -367
  36. dtlpy/dlp/command_executor.py +764 -764
  37. dtlpy/dlp/dlp +1 -1
  38. dtlpy/dlp/dlp.bat +1 -1
  39. dtlpy/dlp/dlp.py +128 -128
  40. dtlpy/dlp/parser.py +651 -651
  41. dtlpy/entities/__init__.py +83 -83
  42. dtlpy/entities/analytic.py +347 -347
  43. dtlpy/entities/annotation.py +1879 -1879
  44. dtlpy/entities/annotation_collection.py +699 -699
  45. dtlpy/entities/annotation_definitions/__init__.py +20 -20
  46. dtlpy/entities/annotation_definitions/base_annotation_definition.py +100 -100
  47. dtlpy/entities/annotation_definitions/box.py +195 -195
  48. dtlpy/entities/annotation_definitions/classification.py +67 -67
  49. dtlpy/entities/annotation_definitions/comparison.py +72 -72
  50. dtlpy/entities/annotation_definitions/cube.py +204 -204
  51. dtlpy/entities/annotation_definitions/cube_3d.py +149 -149
  52. dtlpy/entities/annotation_definitions/description.py +32 -32
  53. dtlpy/entities/annotation_definitions/ellipse.py +124 -124
  54. dtlpy/entities/annotation_definitions/free_text.py +62 -62
  55. dtlpy/entities/annotation_definitions/gis.py +69 -69
  56. dtlpy/entities/annotation_definitions/note.py +139 -139
  57. dtlpy/entities/annotation_definitions/point.py +117 -117
  58. dtlpy/entities/annotation_definitions/polygon.py +182 -182
  59. dtlpy/entities/annotation_definitions/polyline.py +111 -111
  60. dtlpy/entities/annotation_definitions/pose.py +92 -92
  61. dtlpy/entities/annotation_definitions/ref_image.py +86 -86
  62. dtlpy/entities/annotation_definitions/segmentation.py +240 -240
  63. dtlpy/entities/annotation_definitions/subtitle.py +34 -34
  64. dtlpy/entities/annotation_definitions/text.py +85 -85
  65. dtlpy/entities/annotation_definitions/undefined_annotation.py +74 -74
  66. dtlpy/entities/app.py +220 -220
  67. dtlpy/entities/app_module.py +107 -107
  68. dtlpy/entities/artifact.py +174 -174
  69. dtlpy/entities/assignment.py +399 -399
  70. dtlpy/entities/base_entity.py +214 -214
  71. dtlpy/entities/bot.py +113 -113
  72. dtlpy/entities/codebase.py +292 -292
  73. dtlpy/entities/collection.py +38 -38
  74. dtlpy/entities/command.py +169 -169
  75. dtlpy/entities/compute.py +449 -449
  76. dtlpy/entities/dataset.py +1299 -1299
  77. dtlpy/entities/directory_tree.py +44 -44
  78. dtlpy/entities/dpk.py +470 -470
  79. dtlpy/entities/driver.py +235 -235
  80. dtlpy/entities/execution.py +397 -397
  81. dtlpy/entities/feature.py +124 -124
  82. dtlpy/entities/feature_set.py +152 -145
  83. dtlpy/entities/filters.py +798 -798
  84. dtlpy/entities/gis_item.py +107 -107
  85. dtlpy/entities/integration.py +184 -184
  86. dtlpy/entities/item.py +975 -959
  87. dtlpy/entities/label.py +123 -123
  88. dtlpy/entities/links.py +85 -85
  89. dtlpy/entities/message.py +175 -175
  90. dtlpy/entities/model.py +684 -684
  91. dtlpy/entities/node.py +1005 -1005
  92. dtlpy/entities/ontology.py +810 -803
  93. dtlpy/entities/organization.py +287 -287
  94. dtlpy/entities/package.py +657 -657
  95. dtlpy/entities/package_defaults.py +5 -5
  96. dtlpy/entities/package_function.py +185 -185
  97. dtlpy/entities/package_module.py +113 -113
  98. dtlpy/entities/package_slot.py +118 -118
  99. dtlpy/entities/paged_entities.py +299 -299
  100. dtlpy/entities/pipeline.py +624 -624
  101. dtlpy/entities/pipeline_execution.py +279 -279
  102. dtlpy/entities/project.py +394 -394
  103. dtlpy/entities/prompt_item.py +505 -505
  104. dtlpy/entities/recipe.py +301 -301
  105. dtlpy/entities/reflect_dict.py +102 -102
  106. dtlpy/entities/resource_execution.py +138 -138
  107. dtlpy/entities/service.py +974 -963
  108. dtlpy/entities/service_driver.py +117 -117
  109. dtlpy/entities/setting.py +294 -294
  110. dtlpy/entities/task.py +495 -495
  111. dtlpy/entities/time_series.py +143 -143
  112. dtlpy/entities/trigger.py +426 -426
  113. dtlpy/entities/user.py +118 -118
  114. dtlpy/entities/webhook.py +124 -124
  115. dtlpy/examples/__init__.py +19 -19
  116. dtlpy/examples/add_labels.py +135 -135
  117. dtlpy/examples/add_metadata_to_item.py +21 -21
  118. dtlpy/examples/annotate_items_using_model.py +65 -65
  119. dtlpy/examples/annotate_video_using_model_and_tracker.py +75 -75
  120. dtlpy/examples/annotations_convert_to_voc.py +9 -9
  121. dtlpy/examples/annotations_convert_to_yolo.py +9 -9
  122. dtlpy/examples/convert_annotation_types.py +51 -51
  123. dtlpy/examples/converter.py +143 -143
  124. dtlpy/examples/copy_annotations.py +22 -22
  125. dtlpy/examples/copy_folder.py +31 -31
  126. dtlpy/examples/create_annotations.py +51 -51
  127. dtlpy/examples/create_video_annotations.py +83 -83
  128. dtlpy/examples/delete_annotations.py +26 -26
  129. dtlpy/examples/filters.py +113 -113
  130. dtlpy/examples/move_item.py +23 -23
  131. dtlpy/examples/play_video_annotation.py +13 -13
  132. dtlpy/examples/show_item_and_mask.py +53 -53
  133. dtlpy/examples/triggers.py +49 -49
  134. dtlpy/examples/upload_batch_of_items.py +20 -20
  135. dtlpy/examples/upload_items_and_custom_format_annotations.py +55 -55
  136. dtlpy/examples/upload_items_with_modalities.py +43 -43
  137. dtlpy/examples/upload_segmentation_annotations_from_mask_image.py +44 -44
  138. dtlpy/examples/upload_yolo_format_annotations.py +70 -70
  139. dtlpy/exceptions.py +125 -125
  140. dtlpy/miscellaneous/__init__.py +20 -20
  141. dtlpy/miscellaneous/dict_differ.py +95 -95
  142. dtlpy/miscellaneous/git_utils.py +217 -217
  143. dtlpy/miscellaneous/json_utils.py +14 -14
  144. dtlpy/miscellaneous/list_print.py +105 -105
  145. dtlpy/miscellaneous/zipping.py +130 -130
  146. dtlpy/ml/__init__.py +20 -20
  147. dtlpy/ml/base_feature_extractor_adapter.py +27 -27
  148. dtlpy/ml/base_model_adapter.py +1287 -1230
  149. dtlpy/ml/metrics.py +461 -461
  150. dtlpy/ml/predictions_utils.py +274 -274
  151. dtlpy/ml/summary_writer.py +57 -57
  152. dtlpy/ml/train_utils.py +60 -60
  153. dtlpy/new_instance.py +252 -252
  154. dtlpy/repositories/__init__.py +56 -56
  155. dtlpy/repositories/analytics.py +85 -85
  156. dtlpy/repositories/annotations.py +916 -916
  157. dtlpy/repositories/apps.py +383 -383
  158. dtlpy/repositories/artifacts.py +452 -452
  159. dtlpy/repositories/assignments.py +599 -599
  160. dtlpy/repositories/bots.py +213 -213
  161. dtlpy/repositories/codebases.py +559 -559
  162. dtlpy/repositories/collections.py +332 -332
  163. dtlpy/repositories/commands.py +152 -152
  164. dtlpy/repositories/compositions.py +61 -61
  165. dtlpy/repositories/computes.py +439 -439
  166. dtlpy/repositories/datasets.py +1585 -1504
  167. dtlpy/repositories/downloader.py +1157 -923
  168. dtlpy/repositories/dpks.py +433 -433
  169. dtlpy/repositories/drivers.py +482 -482
  170. dtlpy/repositories/executions.py +815 -815
  171. dtlpy/repositories/feature_sets.py +256 -226
  172. dtlpy/repositories/features.py +255 -255
  173. dtlpy/repositories/integrations.py +484 -484
  174. dtlpy/repositories/items.py +912 -912
  175. dtlpy/repositories/messages.py +94 -94
  176. dtlpy/repositories/models.py +1000 -1000
  177. dtlpy/repositories/nodes.py +80 -80
  178. dtlpy/repositories/ontologies.py +511 -511
  179. dtlpy/repositories/organizations.py +525 -525
  180. dtlpy/repositories/packages.py +1941 -1941
  181. dtlpy/repositories/pipeline_executions.py +451 -451
  182. dtlpy/repositories/pipelines.py +640 -640
  183. dtlpy/repositories/projects.py +539 -539
  184. dtlpy/repositories/recipes.py +429 -399
  185. dtlpy/repositories/resource_executions.py +137 -137
  186. dtlpy/repositories/schema.py +120 -120
  187. dtlpy/repositories/service_drivers.py +213 -213
  188. dtlpy/repositories/services.py +1704 -1704
  189. dtlpy/repositories/settings.py +339 -339
  190. dtlpy/repositories/tasks.py +1477 -1477
  191. dtlpy/repositories/times_series.py +278 -278
  192. dtlpy/repositories/triggers.py +536 -536
  193. dtlpy/repositories/upload_element.py +257 -257
  194. dtlpy/repositories/uploader.py +661 -661
  195. dtlpy/repositories/webhooks.py +249 -249
  196. dtlpy/services/__init__.py +22 -22
  197. dtlpy/services/aihttp_retry.py +131 -131
  198. dtlpy/services/api_client.py +1786 -1785
  199. dtlpy/services/api_reference.py +40 -40
  200. dtlpy/services/async_utils.py +133 -133
  201. dtlpy/services/calls_counter.py +44 -44
  202. dtlpy/services/check_sdk.py +68 -68
  203. dtlpy/services/cookie.py +115 -115
  204. dtlpy/services/create_logger.py +156 -156
  205. dtlpy/services/events.py +84 -84
  206. dtlpy/services/logins.py +235 -235
  207. dtlpy/services/reporter.py +256 -256
  208. dtlpy/services/service_defaults.py +91 -91
  209. dtlpy/utilities/__init__.py +20 -20
  210. dtlpy/utilities/annotations/__init__.py +16 -16
  211. dtlpy/utilities/annotations/annotation_converters.py +269 -269
  212. dtlpy/utilities/base_package_runner.py +285 -264
  213. dtlpy/utilities/converter.py +1650 -1650
  214. dtlpy/utilities/dataset_generators/__init__.py +1 -1
  215. dtlpy/utilities/dataset_generators/dataset_generator.py +670 -670
  216. dtlpy/utilities/dataset_generators/dataset_generator_tensorflow.py +23 -23
  217. dtlpy/utilities/dataset_generators/dataset_generator_torch.py +21 -21
  218. dtlpy/utilities/local_development/__init__.py +1 -1
  219. dtlpy/utilities/local_development/local_session.py +179 -179
  220. dtlpy/utilities/reports/__init__.py +2 -2
  221. dtlpy/utilities/reports/figures.py +343 -343
  222. dtlpy/utilities/reports/report.py +71 -71
  223. dtlpy/utilities/videos/__init__.py +17 -17
  224. dtlpy/utilities/videos/video_player.py +598 -598
  225. dtlpy/utilities/videos/videos.py +470 -470
  226. {dtlpy-1.115.44.data → dtlpy-1.117.6.data}/scripts/dlp +1 -1
  227. dtlpy-1.117.6.data/scripts/dlp.bat +2 -0
  228. {dtlpy-1.115.44.data → dtlpy-1.117.6.data}/scripts/dlp.py +128 -128
  229. {dtlpy-1.115.44.dist-info → dtlpy-1.117.6.dist-info}/METADATA +186 -186
  230. dtlpy-1.117.6.dist-info/RECORD +239 -0
  231. {dtlpy-1.115.44.dist-info → dtlpy-1.117.6.dist-info}/WHEEL +1 -1
  232. {dtlpy-1.115.44.dist-info → dtlpy-1.117.6.dist-info}/licenses/LICENSE +200 -200
  233. tests/features/environment.py +551 -551
  234. dtlpy/assets/__pycache__/__init__.cpython-310.pyc +0 -0
  235. dtlpy-1.115.44.data/scripts/dlp.bat +0 -2
  236. dtlpy-1.115.44.dist-info/RECORD +0 -240
  237. {dtlpy-1.115.44.dist-info → dtlpy-1.117.6.dist-info}/entry_points.txt +0 -0
  238. {dtlpy-1.115.44.dist-info → dtlpy-1.117.6.dist-info}/top_level.txt +0 -0
dtlpy/entities/service.py CHANGED
@@ -1,963 +1,974 @@
1
- import warnings
2
- from collections import namedtuple
3
- from enum import Enum
4
- import traceback
5
- import logging
6
- from typing import List
7
- from urllib.parse import urlsplit
8
- import attr
9
- from .. import repositories, entities
10
- from ..services.api_client import ApiClient
11
-
12
- logger = logging.getLogger(name='dtlpy')
13
-
14
-
15
- class ServiceType(str, Enum):
16
- """ The type of the service (SYSTEM).
17
-
18
- .. list-table::
19
- :widths: 15 150
20
- :header-rows: 1
21
-
22
- * - State
23
- - Description
24
- * - SYSTEM
25
- - Dataloop internal service
26
- """
27
- SYSTEM = 'system'
28
- REGULAR = 'regular'
29
-
30
-
31
- class ServiceModeType(str, Enum):
32
- """ The type of the service mode.
33
-
34
- .. list-table::
35
- :widths: 15 150
36
- :header-rows: 1
37
-
38
- * - State
39
- - Description
40
- * - REGULAR
41
- - Service regular mode type
42
- * - DEBUG
43
- - Service debug mode type
44
- """
45
- REGULAR = 'regular'
46
- DEBUG = 'debug'
47
-
48
-
49
- class OnResetAction(str, Enum):
50
- """ The Execution action when the service reset (RERUN, FAILED).
51
-
52
- .. list-table::
53
- :widths: 15 150
54
- :header-rows: 1
55
-
56
- * - State
57
- - Description
58
- * - RERUN
59
- - When the service resting rerun the execution
60
- * - FAILED
61
- - When the service resting fail the execution
62
- """
63
- RERUN = 'rerun'
64
- FAILED = 'failed'
65
-
66
-
67
- class InstanceCatalog(str, Enum):
68
- """ The Service Pode size.
69
-
70
- .. list-table::
71
- :widths: 15 150
72
- :header-rows: 1
73
-
74
- * - State
75
- - Description
76
- * - REGULAR_XS
77
- - regular pod with extra small size
78
- * - REGULAR_S
79
- - regular pod with small size
80
- * - REGULAR_M
81
- - regular pod with medium size
82
- * - REGULAR_L
83
- - regular pod with large size
84
- * - HIGHMEM_XS
85
- - highmem pod with extra small size
86
- * - HIGHMEM_S
87
- - highmem pod with small size
88
- * - HIGHMEM_M
89
- - highmem pod with medium size
90
- * - HIGHMEM_L
91
- - highmem pod with large size
92
- * - GPU_T4_S
93
- - GPU NVIDIA T4 pod with regular memory
94
- * - GPU_T4_M
95
- - GPU NVIDIA T4 pod with highmem
96
- """
97
- REGULAR_XS = "regular-xs"
98
- REGULAR_S = "regular-s"
99
- REGULAR_M = "regular-m"
100
- REGULAR_L = "regular-l"
101
- HIGHMEM_XS = "highmem-xs"
102
- HIGHMEM_S = "highmem-s"
103
- HIGHMEM_M = "highmem-m"
104
- HIGHMEM_L = "highmem-l"
105
- GPU_T4_S = "gpu-t4"
106
- GPU_T4_M = "gpu-t4-m"
107
-
108
-
109
- class RuntimeType(str, Enum):
110
- """ Service culture Runtime (KUBERNETES).
111
-
112
- .. list-table::
113
- :widths: 15 150
114
- :header-rows: 1
115
-
116
- * - State
117
- - Description
118
- * - KUBERNETES
119
- - Service run in kubernetes culture
120
- """
121
- KUBERNETES = 'kubernetes'
122
-
123
-
124
- class ServiceRuntime(entities.BaseEntity):
125
- def __init__(self, service_type: RuntimeType = RuntimeType.KUBERNETES):
126
- self.service_type = service_type
127
-
128
-
129
- class KubernetesRuntime(ServiceRuntime):
130
- DEFAULT_POD_TYPE = InstanceCatalog.REGULAR_S
131
- DEFAULT_NUM_REPLICAS = 1
132
- DEFAULT_CONCURRENCY = 10
133
-
134
- def __init__(self,
135
- pod_type: InstanceCatalog = DEFAULT_POD_TYPE,
136
- num_replicas=DEFAULT_NUM_REPLICAS,
137
- concurrency=DEFAULT_CONCURRENCY,
138
- dynamic_concurrency=None,
139
- runner_image=None,
140
- autoscaler=None,
141
- **kwargs):
142
-
143
- super().__init__(service_type=RuntimeType.KUBERNETES)
144
- self.pod_type = kwargs.get('podType', pod_type)
145
- self.num_replicas = kwargs.get('numReplicas', num_replicas)
146
- self.concurrency = kwargs.get('concurrency', concurrency)
147
- self.runner_image = kwargs.get('runnerImage', runner_image)
148
- self._proxy_image = kwargs.get('proxyImage', None)
149
- self.single_agent = kwargs.get('singleAgent', None)
150
- self.preemptible = kwargs.get('preemptible', None)
151
- self.dynamic_concurrency = kwargs.get('dynamicConcurrency', dynamic_concurrency)
152
-
153
- self.autoscaler = kwargs.get('autoscaler', autoscaler)
154
- if self.autoscaler is not None and isinstance(self.autoscaler, dict):
155
- if self.autoscaler['type'] == KubernetesAutoscalerType.RABBITMQ:
156
- self.autoscaler = KubernetesRabbitmqAutoscaler(**self.autoscaler)
157
- elif self.autoscaler['type'] == KubernetesAutoscalerType.RPS:
158
- self.autoscaler = KubernetesRPSAutoscaler(**self.autoscaler)
159
- else:
160
- raise NotImplementedError(
161
- 'Unknown kubernetes autoscaler type: {}'.format(self.autoscaler['type']))
162
-
163
- def to_json(self):
164
- _json = {
165
- 'podType': self.pod_type,
166
- 'numReplicas': self.num_replicas,
167
- 'concurrency': self.concurrency,
168
- 'autoscaler': None if self.autoscaler is None else self.autoscaler.to_json()
169
- }
170
-
171
- if self.single_agent is not None:
172
- _json['singleAgent'] = self.single_agent
173
-
174
- if self.runner_image is not None:
175
- _json['runnerImage'] = self.runner_image
176
-
177
- if self._proxy_image is not None:
178
- _json['proxyImage'] = self._proxy_image
179
-
180
- if self.preemptible is not None:
181
- _json['preemptible'] = self.preemptible
182
-
183
- if self.dynamic_concurrency is not None:
184
- _json['dynamicConcurrency'] = self.dynamic_concurrency
185
-
186
- return _json
187
-
188
-
189
- @attr.s
190
- class Service(entities.BaseEntity):
191
- """
192
- Service object
193
- """
194
- # platform
195
- created_at = attr.ib()
196
- updated_at = attr.ib(repr=False)
197
- creator = attr.ib()
198
- version = attr.ib()
199
-
200
- package_id = attr.ib()
201
- package_revision = attr.ib()
202
-
203
- bot = attr.ib()
204
- use_user_jwt = attr.ib(repr=False)
205
- init_input = attr.ib()
206
- versions = attr.ib(repr=False)
207
- module_name = attr.ib()
208
- name = attr.ib()
209
- url = attr.ib()
210
- id = attr.ib()
211
- active = attr.ib()
212
- driver_id = attr.ib(repr=False)
213
- secrets = attr.ib(repr=False)
214
-
215
- # name change
216
- runtime = attr.ib(repr=False, type=KubernetesRuntime)
217
- queue_length_limit = attr.ib()
218
- run_execution_as_process = attr.ib(type=bool)
219
- execution_timeout = attr.ib()
220
- drain_time = attr.ib()
221
- on_reset = attr.ib(type=OnResetAction)
222
- _type = attr.ib(type=ServiceType)
223
- project_id = attr.ib()
224
- org_id = attr.ib()
225
- is_global = attr.ib()
226
- max_attempts = attr.ib()
227
- mode = attr.ib(repr=False)
228
- metadata = attr.ib()
229
- archive = attr.ib(repr=False)
230
- config = attr.ib(repr=False)
231
- settings = attr.ib(repr=False)
232
- panels = attr.ib(repr=False)
233
-
234
- # SDK
235
- _package = attr.ib(repr=False)
236
- _client_api = attr.ib(type=ApiClient, repr=False)
237
- _revisions = attr.ib(default=None, repr=False)
238
- # repositories
239
- _project = attr.ib(default=None, repr=False)
240
- _repositories = attr.ib(repr=False)
241
- updated_by = attr.ib(default=None)
242
- app = attr.ib(default=None)
243
- integrations = attr.ib(default=None)
244
-
245
- @property
246
- def createdAt(self):
247
- return self.created_at
248
-
249
- @property
250
- def updatedAt(self):
251
- return self.updated_at
252
-
253
- @staticmethod
254
- def _protected_from_json(_json: dict, client_api: ApiClient, package=None, project=None, is_fetched=True):
255
- """
256
- Same as from_json but with try-except to catch if error
257
-
258
- :param _json: platform json
259
- :param client_api: ApiClient entity
260
- :param package:
261
- :param project: project entity
262
- :param is_fetched: is Entity fetched from Platform
263
- :return:
264
- """
265
- try:
266
- service = Service.from_json(_json=_json,
267
- client_api=client_api,
268
- package=package,
269
- project=project,
270
- is_fetched=is_fetched)
271
- status = True
272
- except Exception:
273
- service = traceback.format_exc()
274
- status = False
275
- return status, service
276
-
277
- @classmethod
278
- def from_json(cls, _json: dict, client_api: ApiClient = None, package=None, project=None, is_fetched=True):
279
- """
280
- Build a service entity object from a json
281
-
282
- :param dict _json: platform json
283
- :param dl.ApiClient client_api: ApiClient entity
284
- :param dtlpy.entities.package.Package package: package entity
285
- :param dtlpy.entities.project.Project project: project entity
286
- :param bool is_fetched: is Entity fetched from Platform
287
- :return: service object
288
- :rtype: dtlpy.entities.service.Service
289
- """
290
- if project is not None:
291
- if project.id != _json.get('projectId', None):
292
- logger.warning('Service has been fetched from a project that is not belong to it')
293
- project = None
294
-
295
- if package is not None:
296
- if package.id != _json.get('packageId', None):
297
- logger.warning('Service has been fetched from a package that is not belong to it')
298
- package = None
299
-
300
- versions = _json.get('versions', dict())
301
- runtime = _json.get("runtime", None)
302
- if runtime:
303
- runtime = KubernetesRuntime(**runtime)
304
-
305
- inst = cls(
306
- package_revision=_json.get("packageRevision", None),
307
- bot=_json.get("botUserName", None),
308
- use_user_jwt=_json.get("useUserJwt", False),
309
- created_at=_json.get("createdAt", None),
310
- updated_at=_json.get("updatedAt", None),
311
- project_id=_json.get('projectId', None),
312
- package_id=_json.get('packageId', None),
313
- driver_id=_json.get('driverId', None),
314
- max_attempts=_json.get('maxAttempts', None),
315
- version=_json.get('version', None),
316
- creator=_json.get('creator', None),
317
- revisions=_json.get('revisions', None),
318
- queue_length_limit=_json.get('queueLengthLimit', None),
319
- active=_json.get('active', None),
320
- runtime=runtime,
321
- is_global=_json.get("global", False),
322
- init_input=_json.get("initParams", dict()),
323
- module_name=_json.get("moduleName", None),
324
- run_execution_as_process=_json.get('runExecutionAsProcess', False),
325
- execution_timeout=_json.get('executionTimeout', 60 * 60),
326
- drain_time=_json.get('drainTime', 60 * 10),
327
- on_reset=_json.get('onReset', OnResetAction.FAILED),
328
- name=_json.get("name", None),
329
- url=_json.get("url", None),
330
- id=_json.get("id", None),
331
- versions=versions,
332
- client_api=client_api,
333
- package=package,
334
- project=project,
335
- secrets=_json.get("secrets", None),
336
- type=_json.get("type", None),
337
- mode=_json.get('mode', dict()),
338
- metadata=_json.get('metadata', None),
339
- archive=_json.get('archive', None),
340
- updated_by=_json.get('updatedBy', None),
341
- config=_json.get('config', None),
342
- settings=_json.get('settings', None),
343
- app=_json.get('app', None),
344
- integrations=_json.get('integrations', None),
345
- org_id=_json.get('orgId', None),
346
- panels=_json.get('panels', None)
347
- )
348
- inst.is_fetched = is_fetched
349
- return inst
350
-
351
- ############
352
- # Entities #
353
- ############
354
- @property
355
- def revisions(self):
356
- if self._revisions is None:
357
- self._revisions = self.services.revisions(service=self)
358
- return self._revisions
359
-
360
- @property
361
- def platform_url(self):
362
- return self._client_api._get_resource_url("projects/{}/services/{}/main".format(self.project.id, self.id))
363
-
364
- @property
365
- def project(self):
366
- if self._project is None:
367
- self._project = repositories.Projects(client_api=self._client_api).get(project_id=self.project_id,
368
- fetch=None)
369
- assert isinstance(self._project, entities.Project)
370
- return self._project
371
-
372
- @property
373
- def package(self):
374
- if self._package is None:
375
- try:
376
- dpk_id = None
377
- dpk_version = None
378
- if self.app and isinstance(self.app, dict):
379
- dpk_id = self.app.get('dpkId', None)
380
- dpk_version = self.app.get('dpkVersion', None)
381
- if dpk_id is None:
382
- self._package = repositories.Dpks(client_api=self._client_api, project=self.project).get(
383
- dpk_id=self.package_id)
384
- else:
385
- self._package = repositories.Dpks(client_api=self._client_api, project=self.project).get_revisions(
386
- dpk_id=dpk_id,
387
- version=dpk_version)
388
-
389
- assert isinstance(self._package, entities.Dpk)
390
- except:
391
- self._package = repositories.Packages(client_api=self._client_api).get(package_id=self.package_id,
392
- fetch=None,
393
- log_error=False)
394
- assert isinstance(self._package, entities.Package)
395
- return self._package
396
-
397
- @property
398
- def execution_url(self):
399
- return 'CURL -X POST' \
400
- '\nauthorization: Bearer <token>' \
401
- '\nContent-Type: application/json" -d {' \
402
- '\n"input": {<input json>}, ' \
403
- '"projectId": "{<project_id>}", ' \
404
- '"functionName": "<function_name>"}'
405
-
406
- ################
407
- # repositories #
408
- ################
409
- @_repositories.default
410
- def set_repositories(self):
411
- reps = namedtuple('repositories',
412
- field_names=['executions', 'services', 'triggers'])
413
-
414
- if self._package is None:
415
- services_repo = repositories.Services(client_api=self._client_api,
416
- package=self._package,
417
- project=self._project)
418
- else:
419
- services_repo = self._package.services
420
-
421
- triggers = repositories.Triggers(client_api=self._client_api,
422
- project=self._project,
423
- service=self)
424
-
425
- r = reps(executions=repositories.Executions(client_api=self._client_api, service=self),
426
- services=services_repo, triggers=triggers)
427
- return r
428
-
429
- @property
430
- def executions(self):
431
- assert isinstance(self._repositories.executions, repositories.Executions)
432
- return self._repositories.executions
433
-
434
- @property
435
- def triggers(self):
436
- assert isinstance(self._repositories.triggers, repositories.Triggers)
437
- return self._repositories.triggers
438
-
439
- @property
440
- def services(self):
441
- assert isinstance(self._repositories.services, repositories.Services)
442
- return self._repositories.services
443
-
444
- ###########
445
- # methods #
446
- ###########
447
- def to_json(self):
448
- """
449
- Returns platform _json format of object
450
-
451
- :return: platform json format of object
452
- :rtype: dict
453
- """
454
- _json = attr.asdict(
455
- self,
456
- filter=attr.filters.exclude(
457
- attr.fields(Service)._project,
458
- attr.fields(Service)._package,
459
- attr.fields(Service)._revisions,
460
- attr.fields(Service)._client_api,
461
- attr.fields(Service)._repositories,
462
- attr.fields(Service).project_id,
463
- attr.fields(Service).init_input,
464
- attr.fields(Service).module_name,
465
- attr.fields(Service).bot,
466
- attr.fields(Service).package_id,
467
- attr.fields(Service).is_global,
468
- attr.fields(Service).use_user_jwt,
469
- attr.fields(Service).package_revision,
470
- attr.fields(Service).driver_id,
471
- attr.fields(Service).run_execution_as_process,
472
- attr.fields(Service).execution_timeout,
473
- attr.fields(Service).drain_time,
474
- attr.fields(Service).runtime,
475
- attr.fields(Service).queue_length_limit,
476
- attr.fields(Service).max_attempts,
477
- attr.fields(Service).on_reset,
478
- attr.fields(Service).created_at,
479
- attr.fields(Service).updated_at,
480
- attr.fields(Service).secrets,
481
- attr.fields(Service)._type,
482
- attr.fields(Service).mode,
483
- attr.fields(Service).metadata,
484
- attr.fields(Service).archive,
485
- attr.fields(Service).updated_by,
486
- attr.fields(Service).config,
487
- attr.fields(Service).settings,
488
- attr.fields(Service).app,
489
- attr.fields(Service).integrations,
490
- attr.fields(Service).org_id,
491
- attr.fields(Service).panels
492
- )
493
- )
494
-
495
- _json['projectId'] = self.project_id
496
- _json['orgId'] = self.org_id
497
- _json['packageId'] = self.package_id
498
- _json['initParams'] = self.init_input
499
- _json['moduleName'] = self.module_name
500
- _json['botUserName'] = self.bot
501
- _json['useUserJwt'] = self.use_user_jwt
502
- _json['global'] = self.is_global
503
- _json['driverId'] = self.driver_id
504
- _json['packageRevision'] = self.package_revision
505
- _json['runExecutionAsProcess'] = self.run_execution_as_process
506
- _json['executionTimeout'] = self.execution_timeout
507
- _json['drainTime'] = self.drain_time
508
- _json['onReset'] = self.on_reset
509
- _json['createdAt'] = self.created_at
510
- _json['updatedAt'] = self.updated_at
511
-
512
- if self.updated_by is not None:
513
- _json['updatedBy'] = self.updated_by
514
-
515
- if self.panels is not None:
516
- _json['panels'] = self.panels
517
-
518
- if self.max_attempts is not None:
519
- _json['maxAttempts'] = self.max_attempts
520
-
521
- if self.is_global is not None:
522
- _json['global'] = self.is_global
523
-
524
- if self.runtime:
525
- _json['runtime'] = self.runtime if isinstance(self.runtime, dict) else self.runtime.to_json()
526
-
527
- if self.queue_length_limit is not None:
528
- _json['queueLengthLimit'] = self.queue_length_limit
529
-
530
- if self.secrets is not None:
531
- _json['secrets'] = self.secrets
532
-
533
- if self._type is not None:
534
- _json['type'] = self._type
535
-
536
- if self.mode:
537
- _json['mode'] = self.mode
538
-
539
- if self.metadata:
540
- _json['metadata'] = self.metadata
541
-
542
- if self.archive is not None:
543
- _json['archive'] = self.archive
544
-
545
- if self.config is not None:
546
- _json['config'] = self.config
547
-
548
- if self.settings is not None:
549
- _json['settings'] = self.settings
550
-
551
- if self.app is not None:
552
- _json['app'] = self.app
553
-
554
- if self.integrations is not None:
555
- _json['integrations'] = self.integrations
556
-
557
- return _json
558
-
559
- def update(self, force=False):
560
- """
561
- Update Service changes to platform
562
-
563
- :param bool force: force update
564
- :return: Service entity
565
- :rtype: dtlpy.entities.service.Service
566
- """
567
- return self.services.update(service=self, force=force)
568
-
569
- def delete(self, force: bool = False):
570
- """
571
- Delete Service object
572
-
573
- :return: True
574
- :rtype: bool
575
- """
576
- return self.services.delete(service_id=self.id, force=force)
577
-
578
- def status(self):
579
- """
580
- Get Service status
581
-
582
- :return: status json
583
- :rtype: dict
584
- """
585
- return self.services.status(service_id=self.id)
586
-
587
- def log(self,
588
- size=None,
589
- checkpoint=None,
590
- start=None,
591
- end=None,
592
- follow=False,
593
- text=None,
594
- execution_id=None,
595
- function_name=None,
596
- replica_id=None,
597
- system=False,
598
- view=True,
599
- until_completed=True,
600
- model_id: str = None,
601
- model_operation: str = None,
602
- ):
603
- """
604
- Get service logs
605
-
606
- :param int size: size
607
- :param dict checkpoint: the information from the lst point checked in the service
608
- :param str start: iso format time
609
- :param str end: iso format time
610
- :param bool follow: if true, keep stream future logs
611
- :param str text: text
612
- :param str execution_id: execution id
613
- :param str function_name: function name
614
- :param str replica_id: replica id
615
- :param bool system: system
616
- :param bool view: if true, print out all the logs
617
- :param bool until_completed: wait until completed
618
- :param str model_id: model id
619
- :param str model_operation: model operation action
620
- :return: ServiceLog entity
621
- :rtype: ServiceLog
622
-
623
- **Example**:
624
-
625
- .. code-block:: python
626
-
627
- service_log = service.log()
628
- """
629
- return self.services.log(service=self,
630
- size=size,
631
- checkpoint=checkpoint,
632
- start=start,
633
- end=end,
634
- follow=follow,
635
- execution_id=execution_id,
636
- function_name=function_name,
637
- replica_id=replica_id,
638
- system=system,
639
- text=text,
640
- view=view,
641
- until_completed=until_completed,
642
- model_id=model_id,
643
- model_operation=model_operation)
644
-
645
- def open_in_web(self):
646
- """
647
- Open the service in web platform
648
-
649
- :return:
650
- """
651
- parsed_url = urlsplit(self.platform_url)
652
- base_url = parsed_url.scheme + "://" + parsed_url.netloc
653
- url = '{}/projects/{}/services/{}'.format(base_url, self.project_id, self.id)
654
- self._client_api._open_in_web(url=url)
655
-
656
- def checkout(self):
657
- """
658
- Checkout
659
-
660
- :return:
661
- """
662
- return self.services.checkout(service=self)
663
-
664
- def pause(self):
665
- """
666
- pause
667
-
668
- :return:
669
- """
670
- return self.services.pause(service_id=self.id)
671
-
672
- def resume(self):
673
- """
674
- resume
675
-
676
- :return:
677
- """
678
- return self.services.resume(service_id=self.id)
679
-
680
- def execute(
681
- self,
682
- execution_input=None,
683
- function_name=None,
684
- resource=None,
685
- item_id=None,
686
- dataset_id=None,
687
- annotation_id=None,
688
- project_id=None,
689
- sync=False,
690
- stream_logs=True,
691
- return_output=True
692
- ):
693
- """
694
- Execute a function on an existing service
695
-
696
- :param List[FunctionIO] or dict execution_input: input dictionary or list of FunctionIO entities
697
- :param str function_name: function name to run
698
- :param str resource: input type.
699
- :param str item_id: optional - item id as input to function
700
- :param str dataset_id: optional - dataset id as input to function
701
- :param str annotation_id: optional - annotation id as input to function
702
- :param str project_id: resource's project
703
- :param bool sync: if true, wait for function to end
704
- :param bool stream_logs: prints logs of the new execution. only works with sync=True
705
- :param bool return_output: if True and sync is True - will return the output directly
706
- :return: execution object
707
- :rtype: dtlpy.entities.execution.Execution
708
-
709
- **Example**:
710
-
711
- .. code-block:: python
712
-
713
- execution = service.execute(function_name='function_name', item_id='item_id', project_id='project_id')
714
- """
715
- execution = self.executions.create(sync=sync,
716
- execution_input=execution_input,
717
- function_name=function_name,
718
- resource=resource,
719
- item_id=item_id,
720
- dataset_id=dataset_id,
721
- annotation_id=annotation_id,
722
- stream_logs=stream_logs,
723
- project_id=project_id,
724
- return_output=return_output)
725
- return execution
726
-
727
- def execute_batch(self,
728
- filters,
729
- function_name: str = None,
730
- execution_inputs: list = None,
731
- wait=True
732
- ):
733
- """
734
- Execute a function on an existing service
735
-
736
- **Prerequisites**: You must be in the role of an *owner* or *developer*. You must have a service.
737
-
738
- :param filters: Filters entity for a filtering before execute
739
- :param str function_name: function name to run
740
- :param List[FunctionIO] or dict execution_inputs: input dictionary or list of FunctionIO entities, that represent the extra inputs of the function
741
- :param bool wait: wait until create task finish
742
- :return: execution object
743
- :rtype: dtlpy.entities.execution.Execution
744
-
745
- **Example**:
746
-
747
- .. code-block:: python
748
-
749
- command = service.execute_batch(
750
- execution_inputs=dl.FunctionIO(type=dl.PackageInputType.STRING, value='test', name='string'),
751
- filters=dl.Filters(field='dir', values='/test', context={"datasets": [dataset.id]),
752
- function_name='run')
753
- """
754
- execution = self.executions.create_batch(service_id=self.id,
755
- execution_inputs=execution_inputs,
756
- filters=filters,
757
- function_name=function_name,
758
- wait=wait)
759
- return execution
760
-
761
- def rerun_batch(self,
762
- filters,
763
- wait=True
764
- ):
765
- """
766
- rerun a executions on an existing service
767
-
768
- **Prerequisites**: You must be in the role of an *owner* or *developer*. You must have a Filter.
769
-
770
- :param filters: Filters entity for a filtering before rerun
771
- :param bool wait: wait until create task finish
772
- :return: rerun command
773
- :rtype: dtlpy.entities.command.Command
774
-
775
- **Example**:
776
-
777
- .. code-block:: python
778
-
779
- command = service.executions.rerun_batch(
780
- filters=dl.Filters(field='id', values=['executionId'], operator=dl.FiltersOperations.IN, resource=dl.FiltersResource.EXECUTION))
781
- """
782
- execution = self.executions.rerun_batch(service_id=self.id,
783
- filters=filters,
784
- wait=wait)
785
- return execution
786
-
787
- def activate_slots(
788
- self,
789
- project_id: str = None,
790
- task_id: str = None,
791
- dataset_id: str = None,
792
- org_id: str = None,
793
- user_email: str = None,
794
- slots=None,
795
- role=None,
796
- prevent_override: bool = True,
797
- visible: bool = True,
798
- icon: str = 'fas fa-magic',
799
- **kwargs
800
- ) -> object:
801
- """
802
- Activate service slots
803
-
804
- :param str project_id: project id
805
- :param str task_id: task id
806
- :param str dataset_id: dataset id
807
- :param str org_id: org id
808
- :param str user_email: user email
809
- :param list slots: list of entities.PackageSlot
810
- :param str role: user role MemberOrgRole.ADMIN, MemberOrgRole.owner, MemberOrgRole.MEMBER, MemberOrgRole.WORKER
811
- :param bool prevent_override: True to prevent override
812
- :param bool visible: visible
813
- :param str icon: icon
814
- :param kwargs: all additional arguments
815
- :return: list of user setting for activated slots
816
- :rtype: list
817
-
818
- **Example**:
819
-
820
- .. code-block:: python
821
-
822
- setting = service.activate_slots(project_id='project_id',
823
- slots=List[entities.PackageSlot],
824
- icon='fas fa-magic')
825
- """
826
- return self.services.activate_slots(
827
- service=self,
828
- project_id=project_id,
829
- task_id=task_id,
830
- dataset_id=dataset_id,
831
- org_id=org_id,
832
- user_email=user_email,
833
- slots=slots,
834
- role=role,
835
- prevent_override=prevent_override,
836
- visible=visible,
837
- icon=icon,
838
- **kwargs
839
- )
840
-
841
- def restart(self, replica_name: str = None):
842
- """
843
- Restart service
844
-
845
- :param str replica_name: replica name
846
- :return: True
847
- :rtype: bool
848
- """
849
- return self.services.restart(service=self, replica_name=replica_name)
850
-
851
-
852
- class KubernetesAutoscalerType(str, Enum):
853
- """ The Service Autoscaler Type (RABBITMQ, CPU).
854
-
855
- .. list-table::
856
- :widths: 15 150
857
- :header-rows: 1
858
-
859
- * - State
860
- - Description
861
- * - RABBITMQ
862
- - Service Autoscaler based on service queue length
863
- * - CPU
864
- - Service Autoscaler based on service CPU usage
865
- * - RPS
866
- - Service Autoscaler based on service RPS
867
- """
868
- RABBITMQ = 'rabbitmq'
869
- CPU = 'cpu'
870
- RPS = 'rps'
871
-
872
-
873
- # added this class to avoid breaking changes after fixing a spelling mistake in KubernetesAutoscalerType
874
- class KubernetesAutuscalerTypeMeta(type):
875
- def __getattribute__(cls, item):
876
- if hasattr(KubernetesAutoscalerType, item):
877
- return getattr(KubernetesAutoscalerType, item)
878
- else:
879
- raise AttributeError(f"KubernetesAutuscalerType has no attribute '{item}'")
880
-
881
-
882
- class KubernetesAutoscaler(entities.BaseEntity):
883
- MIN_REPLICA_DEFAULT = 0
884
- MAX_REPLICA_DEFAULT = 1
885
- AUTOSCALER_TYPE_DEFAULT = KubernetesAutoscalerType.RABBITMQ
886
-
887
- def __init__(self,
888
- autoscaler_type: KubernetesAutoscalerType.RABBITMQ = AUTOSCALER_TYPE_DEFAULT,
889
- min_replicas=MIN_REPLICA_DEFAULT,
890
- max_replicas=MAX_REPLICA_DEFAULT,
891
- cooldown_period=None,
892
- polling_interval=None,
893
- **kwargs):
894
- self.autoscaler_type = kwargs.get('type', autoscaler_type)
895
- self.min_replicas = kwargs.get('minReplicas', min_replicas)
896
- self.max_replicas = kwargs.get('maxReplicas', max_replicas)
897
- self.cooldown_period = kwargs.get('cooldownPeriod', cooldown_period)
898
- self.polling_interval = kwargs.get('pollingInterval', polling_interval)
899
-
900
- def to_json(self):
901
- _json = {
902
- 'type': self.autoscaler_type,
903
- 'minReplicas': self.min_replicas,
904
- 'maxReplicas': self.max_replicas
905
- }
906
-
907
- if self.cooldown_period is not None:
908
- _json['cooldownPeriod'] = self.cooldown_period
909
-
910
- if self.polling_interval is not None:
911
- _json['pollingInterval'] = self.polling_interval
912
-
913
- return _json
914
-
915
-
916
- class KubernetesRabbitmqAutoscaler(KubernetesAutoscaler):
917
- QUEUE_LENGTH_DEFAULT = 1000
918
-
919
- def __init__(self,
920
- min_replicas=KubernetesAutoscaler.MIN_REPLICA_DEFAULT,
921
- max_replicas=KubernetesAutoscaler.MAX_REPLICA_DEFAULT,
922
- queue_length=QUEUE_LENGTH_DEFAULT,
923
- cooldown_period=None,
924
- polling_interval=None,
925
- **kwargs):
926
- super().__init__(min_replicas=min_replicas,
927
- max_replicas=max_replicas,
928
- autoscaler_type=KubernetesAutoscalerType.RABBITMQ,
929
- cooldown_period=cooldown_period,
930
- polling_interval=polling_interval, **kwargs)
931
- self.queue_length = kwargs.get('queueLength', queue_length)
932
-
933
- def to_json(self):
934
- _json = super().to_json()
935
- _json['queueLength'] = self.queue_length
936
- return _json
937
-
938
-
939
- class KubernetesRPSAutoscaler(KubernetesAutoscaler):
940
- THRESHOLD_DEFAULT = 10
941
- RATE_SECONDS_DEFAULT = 30
942
-
943
- def __init__(self,
944
- min_replicas=KubernetesAutoscaler.MIN_REPLICA_DEFAULT,
945
- max_replicas=KubernetesAutoscaler.MAX_REPLICA_DEFAULT,
946
- threshold=THRESHOLD_DEFAULT,
947
- rate_seconds=RATE_SECONDS_DEFAULT,
948
- cooldown_period=None,
949
- polling_interval=None,
950
- **kwargs):
951
- super().__init__(min_replicas=min_replicas,
952
- max_replicas=max_replicas,
953
- autoscaler_type=KubernetesAutoscalerType.RPS,
954
- cooldown_period=cooldown_period,
955
- polling_interval=polling_interval, **kwargs)
956
- self.threshold = kwargs.get('threshold', threshold)
957
- self.rate_seconds = kwargs.get('rateSeconds', rate_seconds)
958
-
959
- def to_json(self):
960
- _json = super().to_json()
961
- _json['rateSeconds'] = self.rate_seconds
962
- _json['threshold'] = self.threshold
963
- return _json
1
+ import warnings
2
+ from collections import namedtuple
3
+ from enum import Enum
4
+ import traceback
5
+ import logging
6
+ from typing import List
7
+ from urllib.parse import urlsplit
8
+ import attr
9
+ from .. import repositories, entities
10
+ from ..services.api_client import ApiClient
11
+
12
+ logger = logging.getLogger(name='dtlpy')
13
+
14
+ class DynamicConcurrencyUpdateMethod(str, Enum):
15
+ """ The method of updating the dynamic concurrency.
16
+ """
17
+ RESTART = 'restart',
18
+ SYNC = 'sync'
19
+
20
+
21
+ class ServiceType(str, Enum):
22
+ """ The type of the service (SYSTEM).
23
+
24
+ .. list-table::
25
+ :widths: 15 150
26
+ :header-rows: 1
27
+
28
+ * - State
29
+ - Description
30
+ * - SYSTEM
31
+ - Dataloop internal service
32
+ """
33
+ SYSTEM = 'system'
34
+ REGULAR = 'regular'
35
+
36
+
37
+ class ServiceModeType(str, Enum):
38
+ """ The type of the service mode.
39
+
40
+ .. list-table::
41
+ :widths: 15 150
42
+ :header-rows: 1
43
+
44
+ * - State
45
+ - Description
46
+ * - REGULAR
47
+ - Service regular mode type
48
+ * - DEBUG
49
+ - Service debug mode type
50
+ """
51
+ REGULAR = 'regular'
52
+ DEBUG = 'debug'
53
+
54
+
55
+ class OnResetAction(str, Enum):
56
+ """ The Execution action when the service reset (RERUN, FAILED).
57
+
58
+ .. list-table::
59
+ :widths: 15 150
60
+ :header-rows: 1
61
+
62
+ * - State
63
+ - Description
64
+ * - RERUN
65
+ - When the service resting rerun the execution
66
+ * - FAILED
67
+ - When the service resting fail the execution
68
+ """
69
+ RERUN = 'rerun'
70
+ FAILED = 'failed'
71
+
72
+
73
+ class InstanceCatalog(str, Enum):
74
+ """ The Service Pode size.
75
+
76
+ .. list-table::
77
+ :widths: 15 150
78
+ :header-rows: 1
79
+
80
+ * - State
81
+ - Description
82
+ * - REGULAR_XS
83
+ - regular pod with extra small size
84
+ * - REGULAR_S
85
+ - regular pod with small size
86
+ * - REGULAR_M
87
+ - regular pod with medium size
88
+ * - REGULAR_L
89
+ - regular pod with large size
90
+ * - HIGHMEM_XS
91
+ - highmem pod with extra small size
92
+ * - HIGHMEM_S
93
+ - highmem pod with small size
94
+ * - HIGHMEM_M
95
+ - highmem pod with medium size
96
+ * - HIGHMEM_L
97
+ - highmem pod with large size
98
+ * - GPU_T4_S
99
+ - GPU NVIDIA T4 pod with regular memory
100
+ * - GPU_T4_M
101
+ - GPU NVIDIA T4 pod with highmem
102
+ """
103
+ REGULAR_XS = "regular-xs"
104
+ REGULAR_S = "regular-s"
105
+ REGULAR_M = "regular-m"
106
+ REGULAR_L = "regular-l"
107
+ HIGHMEM_XS = "highmem-xs"
108
+ HIGHMEM_S = "highmem-s"
109
+ HIGHMEM_M = "highmem-m"
110
+ HIGHMEM_L = "highmem-l"
111
+ GPU_T4_S = "gpu-t4"
112
+ GPU_T4_M = "gpu-t4-m"
113
+
114
+
115
+ class RuntimeType(str, Enum):
116
+ """ Service culture Runtime (KUBERNETES).
117
+
118
+ .. list-table::
119
+ :widths: 15 150
120
+ :header-rows: 1
121
+
122
+ * - State
123
+ - Description
124
+ * - KUBERNETES
125
+ - Service run in kubernetes culture
126
+ """
127
+ KUBERNETES = 'kubernetes'
128
+
129
+
130
+ class ServiceRuntime(entities.BaseEntity):
131
+ def __init__(self, service_type: RuntimeType = RuntimeType.KUBERNETES):
132
+ self.service_type = service_type
133
+
134
+
135
+ class KubernetesRuntime(ServiceRuntime):
136
+ DEFAULT_POD_TYPE = InstanceCatalog.REGULAR_S
137
+ DEFAULT_NUM_REPLICAS = 1
138
+ DEFAULT_CONCURRENCY = 10
139
+
140
+ def __init__(self,
141
+ pod_type: InstanceCatalog = DEFAULT_POD_TYPE,
142
+ num_replicas=DEFAULT_NUM_REPLICAS,
143
+ concurrency=DEFAULT_CONCURRENCY,
144
+ dynamic_concurrency=None,
145
+ concurrency_update_method=None,
146
+ runner_image=None,
147
+ autoscaler=None,
148
+ **kwargs):
149
+
150
+ super().__init__(service_type=RuntimeType.KUBERNETES)
151
+ self.pod_type = kwargs.get('podType', pod_type)
152
+ self.num_replicas = kwargs.get('numReplicas', num_replicas)
153
+ self.concurrency = kwargs.get('concurrency', concurrency)
154
+ self.runner_image = kwargs.get('runnerImage', runner_image)
155
+ self._proxy_image = kwargs.get('proxyImage', None)
156
+ self.single_agent = kwargs.get('singleAgent', None)
157
+ self.preemptible = kwargs.get('preemptible', None)
158
+ self.dynamic_concurrency = kwargs.get('dynamicConcurrency', dynamic_concurrency)
159
+ self.concurrency_update_method = kwargs.get('concurrencyUpdateMethod', concurrency_update_method)
160
+
161
+ self.autoscaler = kwargs.get('autoscaler', autoscaler)
162
+ if self.autoscaler is not None and isinstance(self.autoscaler, dict):
163
+ if self.autoscaler['type'] == KubernetesAutoscalerType.RABBITMQ:
164
+ self.autoscaler = KubernetesRabbitmqAutoscaler(**self.autoscaler)
165
+ elif self.autoscaler['type'] == KubernetesAutoscalerType.RPS:
166
+ self.autoscaler = KubernetesRPSAutoscaler(**self.autoscaler)
167
+ else:
168
+ raise NotImplementedError(
169
+ 'Unknown kubernetes autoscaler type: {}'.format(self.autoscaler['type']))
170
+
171
+ def to_json(self):
172
+ _json = {
173
+ 'podType': self.pod_type,
174
+ 'numReplicas': self.num_replicas,
175
+ 'concurrency': self.concurrency,
176
+ 'autoscaler': None if self.autoscaler is None else self.autoscaler.to_json()
177
+ }
178
+
179
+ if self.single_agent is not None:
180
+ _json['singleAgent'] = self.single_agent
181
+
182
+ if self.runner_image is not None:
183
+ _json['runnerImage'] = self.runner_image
184
+
185
+ if self._proxy_image is not None:
186
+ _json['proxyImage'] = self._proxy_image
187
+
188
+ if self.preemptible is not None:
189
+ _json['preemptible'] = self.preemptible
190
+
191
+ if self.dynamic_concurrency is not None:
192
+ _json['dynamicConcurrency'] = self.dynamic_concurrency
193
+
194
+ if self.concurrency_update_method is not None:
195
+ _json['concurrencyUpdateMethod'] = self.concurrency_update_method
196
+
197
+ return _json
198
+
199
+
200
+ @attr.s
201
+ class Service(entities.BaseEntity):
202
+ """
203
+ Service object
204
+ """
205
+ # platform
206
+ created_at = attr.ib()
207
+ updated_at = attr.ib(repr=False)
208
+ creator = attr.ib()
209
+ version = attr.ib()
210
+
211
+ package_id = attr.ib()
212
+ package_revision = attr.ib()
213
+
214
+ bot = attr.ib()
215
+ use_user_jwt = attr.ib(repr=False)
216
+ init_input = attr.ib()
217
+ versions = attr.ib(repr=False)
218
+ module_name = attr.ib()
219
+ name = attr.ib()
220
+ url = attr.ib()
221
+ id = attr.ib()
222
+ active = attr.ib()
223
+ driver_id = attr.ib(repr=False)
224
+ secrets = attr.ib(repr=False)
225
+
226
+ # name change
227
+ runtime = attr.ib(repr=False, type=KubernetesRuntime)
228
+ queue_length_limit = attr.ib()
229
+ run_execution_as_process = attr.ib(type=bool)
230
+ execution_timeout = attr.ib()
231
+ drain_time = attr.ib()
232
+ on_reset = attr.ib(type=OnResetAction)
233
+ _type = attr.ib(type=ServiceType)
234
+ project_id = attr.ib()
235
+ org_id = attr.ib()
236
+ is_global = attr.ib()
237
+ max_attempts = attr.ib()
238
+ mode = attr.ib(repr=False)
239
+ metadata = attr.ib()
240
+ archive = attr.ib(repr=False)
241
+ config = attr.ib(repr=False)
242
+ settings = attr.ib(repr=False)
243
+ panels = attr.ib(repr=False)
244
+
245
+ # SDK
246
+ _package = attr.ib(repr=False)
247
+ _client_api = attr.ib(type=ApiClient, repr=False)
248
+ _revisions = attr.ib(default=None, repr=False)
249
+ # repositories
250
+ _project = attr.ib(default=None, repr=False)
251
+ _repositories = attr.ib(repr=False)
252
+ updated_by = attr.ib(default=None)
253
+ app = attr.ib(default=None)
254
+ integrations = attr.ib(default=None)
255
+
256
+ @property
257
+ def createdAt(self):
258
+ return self.created_at
259
+
260
+ @property
261
+ def updatedAt(self):
262
+ return self.updated_at
263
+
264
+ @staticmethod
265
+ def _protected_from_json(_json: dict, client_api: ApiClient, package=None, project=None, is_fetched=True):
266
+ """
267
+ Same as from_json but with try-except to catch if error
268
+
269
+ :param _json: platform json
270
+ :param client_api: ApiClient entity
271
+ :param package:
272
+ :param project: project entity
273
+ :param is_fetched: is Entity fetched from Platform
274
+ :return:
275
+ """
276
+ try:
277
+ service = Service.from_json(_json=_json,
278
+ client_api=client_api,
279
+ package=package,
280
+ project=project,
281
+ is_fetched=is_fetched)
282
+ status = True
283
+ except Exception:
284
+ service = traceback.format_exc()
285
+ status = False
286
+ return status, service
287
+
288
+ @classmethod
289
+ def from_json(cls, _json: dict, client_api: ApiClient = None, package=None, project=None, is_fetched=True):
290
+ """
291
+ Build a service entity object from a json
292
+
293
+ :param dict _json: platform json
294
+ :param dl.ApiClient client_api: ApiClient entity
295
+ :param dtlpy.entities.package.Package package: package entity
296
+ :param dtlpy.entities.project.Project project: project entity
297
+ :param bool is_fetched: is Entity fetched from Platform
298
+ :return: service object
299
+ :rtype: dtlpy.entities.service.Service
300
+ """
301
+ if project is not None:
302
+ if project.id != _json.get('projectId', None):
303
+ logger.warning('Service has been fetched from a project that is not belong to it')
304
+ project = None
305
+
306
+ if package is not None:
307
+ if package.id != _json.get('packageId', None):
308
+ logger.warning('Service has been fetched from a package that is not belong to it')
309
+ package = None
310
+
311
+ versions = _json.get('versions', dict())
312
+ runtime = _json.get("runtime", None)
313
+ if runtime:
314
+ runtime = KubernetesRuntime(**runtime)
315
+
316
+ inst = cls(
317
+ package_revision=_json.get("packageRevision", None),
318
+ bot=_json.get("botUserName", None),
319
+ use_user_jwt=_json.get("useUserJwt", False),
320
+ created_at=_json.get("createdAt", None),
321
+ updated_at=_json.get("updatedAt", None),
322
+ project_id=_json.get('projectId', None),
323
+ package_id=_json.get('packageId', None),
324
+ driver_id=_json.get('driverId', None),
325
+ max_attempts=_json.get('maxAttempts', None),
326
+ version=_json.get('version', None),
327
+ creator=_json.get('creator', None),
328
+ revisions=_json.get('revisions', None),
329
+ queue_length_limit=_json.get('queueLengthLimit', None),
330
+ active=_json.get('active', None),
331
+ runtime=runtime,
332
+ is_global=_json.get("global", False),
333
+ init_input=_json.get("initParams", dict()),
334
+ module_name=_json.get("moduleName", None),
335
+ run_execution_as_process=_json.get('runExecutionAsProcess', False),
336
+ execution_timeout=_json.get('executionTimeout', 60 * 60),
337
+ drain_time=_json.get('drainTime', 60 * 10),
338
+ on_reset=_json.get('onReset', OnResetAction.FAILED),
339
+ name=_json.get("name", None),
340
+ url=_json.get("url", None),
341
+ id=_json.get("id", None),
342
+ versions=versions,
343
+ client_api=client_api,
344
+ package=package,
345
+ project=project,
346
+ secrets=_json.get("secrets", None),
347
+ type=_json.get("type", None),
348
+ mode=_json.get('mode', dict()),
349
+ metadata=_json.get('metadata', None),
350
+ archive=_json.get('archive', None),
351
+ updated_by=_json.get('updatedBy', None),
352
+ config=_json.get('config', None),
353
+ settings=_json.get('settings', None),
354
+ app=_json.get('app', None),
355
+ integrations=_json.get('integrations', None),
356
+ org_id=_json.get('orgId', None),
357
+ panels=_json.get('panels', None)
358
+ )
359
+ inst.is_fetched = is_fetched
360
+ return inst
361
+
362
+ ############
363
+ # Entities #
364
+ ############
365
+ @property
366
+ def revisions(self):
367
+ if self._revisions is None:
368
+ self._revisions = self.services.revisions(service=self)
369
+ return self._revisions
370
+
371
+ @property
372
+ def platform_url(self):
373
+ return self._client_api._get_resource_url("projects/{}/services/{}/main".format(self.project.id, self.id))
374
+
375
+ @property
376
+ def project(self):
377
+ if self._project is None:
378
+ self._project = repositories.Projects(client_api=self._client_api).get(project_id=self.project_id,
379
+ fetch=None)
380
+ assert isinstance(self._project, entities.Project)
381
+ return self._project
382
+
383
+ @property
384
+ def package(self):
385
+ if self._package is None:
386
+ try:
387
+ dpk_id = None
388
+ dpk_version = None
389
+ if self.app and isinstance(self.app, dict):
390
+ dpk_id = self.app.get('dpkId', None)
391
+ dpk_version = self.app.get('dpkVersion', None)
392
+ if dpk_id is None:
393
+ self._package = repositories.Dpks(client_api=self._client_api, project=self.project).get(
394
+ dpk_id=self.package_id)
395
+ else:
396
+ self._package = repositories.Dpks(client_api=self._client_api, project=self.project).get_revisions(
397
+ dpk_id=dpk_id,
398
+ version=dpk_version)
399
+
400
+ assert isinstance(self._package, entities.Dpk)
401
+ except:
402
+ self._package = repositories.Packages(client_api=self._client_api).get(package_id=self.package_id,
403
+ fetch=None,
404
+ log_error=False)
405
+ assert isinstance(self._package, entities.Package)
406
+ return self._package
407
+
408
+ @property
409
+ def execution_url(self):
410
+ return 'CURL -X POST' \
411
+ '\nauthorization: Bearer <token>' \
412
+ '\nContent-Type: application/json" -d {' \
413
+ '\n"input": {<input json>}, ' \
414
+ '"projectId": "{<project_id>}", ' \
415
+ '"functionName": "<function_name>"}'
416
+
417
+ ################
418
+ # repositories #
419
+ ################
420
+ @_repositories.default
421
+ def set_repositories(self):
422
+ reps = namedtuple('repositories',
423
+ field_names=['executions', 'services', 'triggers'])
424
+
425
+ if self._package is None:
426
+ services_repo = repositories.Services(client_api=self._client_api,
427
+ package=self._package,
428
+ project=self._project)
429
+ else:
430
+ services_repo = self._package.services
431
+
432
+ triggers = repositories.Triggers(client_api=self._client_api,
433
+ project=self._project,
434
+ service=self)
435
+
436
+ r = reps(executions=repositories.Executions(client_api=self._client_api, service=self),
437
+ services=services_repo, triggers=triggers)
438
+ return r
439
+
440
+ @property
441
+ def executions(self):
442
+ assert isinstance(self._repositories.executions, repositories.Executions)
443
+ return self._repositories.executions
444
+
445
+ @property
446
+ def triggers(self):
447
+ assert isinstance(self._repositories.triggers, repositories.Triggers)
448
+ return self._repositories.triggers
449
+
450
+ @property
451
+ def services(self):
452
+ assert isinstance(self._repositories.services, repositories.Services)
453
+ return self._repositories.services
454
+
455
+ ###########
456
+ # methods #
457
+ ###########
458
+ def to_json(self):
459
+ """
460
+ Returns platform _json format of object
461
+
462
+ :return: platform json format of object
463
+ :rtype: dict
464
+ """
465
+ _json = attr.asdict(
466
+ self,
467
+ filter=attr.filters.exclude(
468
+ attr.fields(Service)._project,
469
+ attr.fields(Service)._package,
470
+ attr.fields(Service)._revisions,
471
+ attr.fields(Service)._client_api,
472
+ attr.fields(Service)._repositories,
473
+ attr.fields(Service).project_id,
474
+ attr.fields(Service).init_input,
475
+ attr.fields(Service).module_name,
476
+ attr.fields(Service).bot,
477
+ attr.fields(Service).package_id,
478
+ attr.fields(Service).is_global,
479
+ attr.fields(Service).use_user_jwt,
480
+ attr.fields(Service).package_revision,
481
+ attr.fields(Service).driver_id,
482
+ attr.fields(Service).run_execution_as_process,
483
+ attr.fields(Service).execution_timeout,
484
+ attr.fields(Service).drain_time,
485
+ attr.fields(Service).runtime,
486
+ attr.fields(Service).queue_length_limit,
487
+ attr.fields(Service).max_attempts,
488
+ attr.fields(Service).on_reset,
489
+ attr.fields(Service).created_at,
490
+ attr.fields(Service).updated_at,
491
+ attr.fields(Service).secrets,
492
+ attr.fields(Service)._type,
493
+ attr.fields(Service).mode,
494
+ attr.fields(Service).metadata,
495
+ attr.fields(Service).archive,
496
+ attr.fields(Service).updated_by,
497
+ attr.fields(Service).config,
498
+ attr.fields(Service).settings,
499
+ attr.fields(Service).app,
500
+ attr.fields(Service).integrations,
501
+ attr.fields(Service).org_id,
502
+ attr.fields(Service).panels
503
+ )
504
+ )
505
+
506
+ _json['projectId'] = self.project_id
507
+ _json['orgId'] = self.org_id
508
+ _json['packageId'] = self.package_id
509
+ _json['initParams'] = self.init_input
510
+ _json['moduleName'] = self.module_name
511
+ _json['botUserName'] = self.bot
512
+ _json['useUserJwt'] = self.use_user_jwt
513
+ _json['global'] = self.is_global
514
+ _json['driverId'] = self.driver_id
515
+ _json['packageRevision'] = self.package_revision
516
+ _json['runExecutionAsProcess'] = self.run_execution_as_process
517
+ _json['executionTimeout'] = self.execution_timeout
518
+ _json['drainTime'] = self.drain_time
519
+ _json['onReset'] = self.on_reset
520
+ _json['createdAt'] = self.created_at
521
+ _json['updatedAt'] = self.updated_at
522
+
523
+ if self.updated_by is not None:
524
+ _json['updatedBy'] = self.updated_by
525
+
526
+ if self.panels is not None:
527
+ _json['panels'] = self.panels
528
+
529
+ if self.max_attempts is not None:
530
+ _json['maxAttempts'] = self.max_attempts
531
+
532
+ if self.is_global is not None:
533
+ _json['global'] = self.is_global
534
+
535
+ if self.runtime:
536
+ _json['runtime'] = self.runtime if isinstance(self.runtime, dict) else self.runtime.to_json()
537
+
538
+ if self.queue_length_limit is not None:
539
+ _json['queueLengthLimit'] = self.queue_length_limit
540
+
541
+ if self.secrets is not None:
542
+ _json['secrets'] = self.secrets
543
+
544
+ if self._type is not None:
545
+ _json['type'] = self._type
546
+
547
+ if self.mode:
548
+ _json['mode'] = self.mode
549
+
550
+ if self.metadata:
551
+ _json['metadata'] = self.metadata
552
+
553
+ if self.archive is not None:
554
+ _json['archive'] = self.archive
555
+
556
+ if self.config is not None:
557
+ _json['config'] = self.config
558
+
559
+ if self.settings is not None:
560
+ _json['settings'] = self.settings
561
+
562
+ if self.app is not None:
563
+ _json['app'] = self.app
564
+
565
+ if self.integrations is not None:
566
+ _json['integrations'] = self.integrations
567
+
568
+ return _json
569
+
570
+ def update(self, force=False):
571
+ """
572
+ Update Service changes to platform
573
+
574
+ :param bool force: force update
575
+ :return: Service entity
576
+ :rtype: dtlpy.entities.service.Service
577
+ """
578
+ return self.services.update(service=self, force=force)
579
+
580
+ def delete(self, force: bool = False):
581
+ """
582
+ Delete Service object
583
+
584
+ :return: True
585
+ :rtype: bool
586
+ """
587
+ return self.services.delete(service_id=self.id, force=force)
588
+
589
+ def status(self):
590
+ """
591
+ Get Service status
592
+
593
+ :return: status json
594
+ :rtype: dict
595
+ """
596
+ return self.services.status(service_id=self.id)
597
+
598
+ def log(self,
599
+ size=None,
600
+ checkpoint=None,
601
+ start=None,
602
+ end=None,
603
+ follow=False,
604
+ text=None,
605
+ execution_id=None,
606
+ function_name=None,
607
+ replica_id=None,
608
+ system=False,
609
+ view=True,
610
+ until_completed=True,
611
+ model_id: str = None,
612
+ model_operation: str = None,
613
+ ):
614
+ """
615
+ Get service logs
616
+
617
+ :param int size: size
618
+ :param dict checkpoint: the information from the lst point checked in the service
619
+ :param str start: iso format time
620
+ :param str end: iso format time
621
+ :param bool follow: if true, keep stream future logs
622
+ :param str text: text
623
+ :param str execution_id: execution id
624
+ :param str function_name: function name
625
+ :param str replica_id: replica id
626
+ :param bool system: system
627
+ :param bool view: if true, print out all the logs
628
+ :param bool until_completed: wait until completed
629
+ :param str model_id: model id
630
+ :param str model_operation: model operation action
631
+ :return: ServiceLog entity
632
+ :rtype: ServiceLog
633
+
634
+ **Example**:
635
+
636
+ .. code-block:: python
637
+
638
+ service_log = service.log()
639
+ """
640
+ return self.services.log(service=self,
641
+ size=size,
642
+ checkpoint=checkpoint,
643
+ start=start,
644
+ end=end,
645
+ follow=follow,
646
+ execution_id=execution_id,
647
+ function_name=function_name,
648
+ replica_id=replica_id,
649
+ system=system,
650
+ text=text,
651
+ view=view,
652
+ until_completed=until_completed,
653
+ model_id=model_id,
654
+ model_operation=model_operation)
655
+
656
+ def open_in_web(self):
657
+ """
658
+ Open the service in web platform
659
+
660
+ :return:
661
+ """
662
+ parsed_url = urlsplit(self.platform_url)
663
+ base_url = parsed_url.scheme + "://" + parsed_url.netloc
664
+ url = '{}/projects/{}/services/{}'.format(base_url, self.project_id, self.id)
665
+ self._client_api._open_in_web(url=url)
666
+
667
+ def checkout(self):
668
+ """
669
+ Checkout
670
+
671
+ :return:
672
+ """
673
+ return self.services.checkout(service=self)
674
+
675
+ def pause(self):
676
+ """
677
+ pause
678
+
679
+ :return:
680
+ """
681
+ return self.services.pause(service_id=self.id)
682
+
683
+ def resume(self):
684
+ """
685
+ resume
686
+
687
+ :return:
688
+ """
689
+ return self.services.resume(service_id=self.id)
690
+
691
+ def execute(
692
+ self,
693
+ execution_input=None,
694
+ function_name=None,
695
+ resource=None,
696
+ item_id=None,
697
+ dataset_id=None,
698
+ annotation_id=None,
699
+ project_id=None,
700
+ sync=False,
701
+ stream_logs=True,
702
+ return_output=True
703
+ ):
704
+ """
705
+ Execute a function on an existing service
706
+
707
+ :param List[FunctionIO] or dict execution_input: input dictionary or list of FunctionIO entities
708
+ :param str function_name: function name to run
709
+ :param str resource: input type.
710
+ :param str item_id: optional - item id as input to function
711
+ :param str dataset_id: optional - dataset id as input to function
712
+ :param str annotation_id: optional - annotation id as input to function
713
+ :param str project_id: resource's project
714
+ :param bool sync: if true, wait for function to end
715
+ :param bool stream_logs: prints logs of the new execution. only works with sync=True
716
+ :param bool return_output: if True and sync is True - will return the output directly
717
+ :return: execution object
718
+ :rtype: dtlpy.entities.execution.Execution
719
+
720
+ **Example**:
721
+
722
+ .. code-block:: python
723
+
724
+ execution = service.execute(function_name='function_name', item_id='item_id', project_id='project_id')
725
+ """
726
+ execution = self.executions.create(sync=sync,
727
+ execution_input=execution_input,
728
+ function_name=function_name,
729
+ resource=resource,
730
+ item_id=item_id,
731
+ dataset_id=dataset_id,
732
+ annotation_id=annotation_id,
733
+ stream_logs=stream_logs,
734
+ project_id=project_id,
735
+ return_output=return_output)
736
+ return execution
737
+
738
+ def execute_batch(self,
739
+ filters,
740
+ function_name: str = None,
741
+ execution_inputs: list = None,
742
+ wait=True
743
+ ):
744
+ """
745
+ Execute a function on an existing service
746
+
747
+ **Prerequisites**: You must be in the role of an *owner* or *developer*. You must have a service.
748
+
749
+ :param filters: Filters entity for a filtering before execute
750
+ :param str function_name: function name to run
751
+ :param List[FunctionIO] or dict execution_inputs: input dictionary or list of FunctionIO entities, that represent the extra inputs of the function
752
+ :param bool wait: wait until create task finish
753
+ :return: execution object
754
+ :rtype: dtlpy.entities.execution.Execution
755
+
756
+ **Example**:
757
+
758
+ .. code-block:: python
759
+
760
+ command = service.execute_batch(
761
+ execution_inputs=dl.FunctionIO(type=dl.PackageInputType.STRING, value='test', name='string'),
762
+ filters=dl.Filters(field='dir', values='/test', context={"datasets": [dataset.id]),
763
+ function_name='run')
764
+ """
765
+ execution = self.executions.create_batch(service_id=self.id,
766
+ execution_inputs=execution_inputs,
767
+ filters=filters,
768
+ function_name=function_name,
769
+ wait=wait)
770
+ return execution
771
+
772
+ def rerun_batch(self,
773
+ filters,
774
+ wait=True
775
+ ):
776
+ """
777
+ rerun a executions on an existing service
778
+
779
+ **Prerequisites**: You must be in the role of an *owner* or *developer*. You must have a Filter.
780
+
781
+ :param filters: Filters entity for a filtering before rerun
782
+ :param bool wait: wait until create task finish
783
+ :return: rerun command
784
+ :rtype: dtlpy.entities.command.Command
785
+
786
+ **Example**:
787
+
788
+ .. code-block:: python
789
+
790
+ command = service.executions.rerun_batch(
791
+ filters=dl.Filters(field='id', values=['executionId'], operator=dl.FiltersOperations.IN, resource=dl.FiltersResource.EXECUTION))
792
+ """
793
+ execution = self.executions.rerun_batch(service_id=self.id,
794
+ filters=filters,
795
+ wait=wait)
796
+ return execution
797
+
798
+ def activate_slots(
799
+ self,
800
+ project_id: str = None,
801
+ task_id: str = None,
802
+ dataset_id: str = None,
803
+ org_id: str = None,
804
+ user_email: str = None,
805
+ slots=None,
806
+ role=None,
807
+ prevent_override: bool = True,
808
+ visible: bool = True,
809
+ icon: str = 'fas fa-magic',
810
+ **kwargs
811
+ ) -> object:
812
+ """
813
+ Activate service slots
814
+
815
+ :param str project_id: project id
816
+ :param str task_id: task id
817
+ :param str dataset_id: dataset id
818
+ :param str org_id: org id
819
+ :param str user_email: user email
820
+ :param list slots: list of entities.PackageSlot
821
+ :param str role: user role MemberOrgRole.ADMIN, MemberOrgRole.owner, MemberOrgRole.MEMBER, MemberOrgRole.WORKER
822
+ :param bool prevent_override: True to prevent override
823
+ :param bool visible: visible
824
+ :param str icon: icon
825
+ :param kwargs: all additional arguments
826
+ :return: list of user setting for activated slots
827
+ :rtype: list
828
+
829
+ **Example**:
830
+
831
+ .. code-block:: python
832
+
833
+ setting = service.activate_slots(project_id='project_id',
834
+ slots=List[entities.PackageSlot],
835
+ icon='fas fa-magic')
836
+ """
837
+ return self.services.activate_slots(
838
+ service=self,
839
+ project_id=project_id,
840
+ task_id=task_id,
841
+ dataset_id=dataset_id,
842
+ org_id=org_id,
843
+ user_email=user_email,
844
+ slots=slots,
845
+ role=role,
846
+ prevent_override=prevent_override,
847
+ visible=visible,
848
+ icon=icon,
849
+ **kwargs
850
+ )
851
+
852
+ def restart(self, replica_name: str = None):
853
+ """
854
+ Restart service
855
+
856
+ :param str replica_name: replica name
857
+ :return: True
858
+ :rtype: bool
859
+ """
860
+ return self.services.restart(service=self, replica_name=replica_name)
861
+
862
+
863
+ class KubernetesAutoscalerType(str, Enum):
864
+ """ The Service Autoscaler Type (RABBITMQ, CPU).
865
+
866
+ .. list-table::
867
+ :widths: 15 150
868
+ :header-rows: 1
869
+
870
+ * - State
871
+ - Description
872
+ * - RABBITMQ
873
+ - Service Autoscaler based on service queue length
874
+ * - CPU
875
+ - Service Autoscaler based on service CPU usage
876
+ * - RPS
877
+ - Service Autoscaler based on service RPS
878
+ """
879
+ RABBITMQ = 'rabbitmq'
880
+ CPU = 'cpu'
881
+ RPS = 'rps'
882
+
883
+
884
+ # added this class to avoid breaking changes after fixing a spelling mistake in KubernetesAutoscalerType
885
+ class KubernetesAutuscalerTypeMeta(type):
886
+ def __getattribute__(cls, item):
887
+ if hasattr(KubernetesAutoscalerType, item):
888
+ return getattr(KubernetesAutoscalerType, item)
889
+ else:
890
+ raise AttributeError(f"KubernetesAutuscalerType has no attribute '{item}'")
891
+
892
+
893
+ class KubernetesAutoscaler(entities.BaseEntity):
894
+ MIN_REPLICA_DEFAULT = 0
895
+ MAX_REPLICA_DEFAULT = 1
896
+ AUTOSCALER_TYPE_DEFAULT = KubernetesAutoscalerType.RABBITMQ
897
+
898
+ def __init__(self,
899
+ autoscaler_type: KubernetesAutoscalerType.RABBITMQ = AUTOSCALER_TYPE_DEFAULT,
900
+ min_replicas=MIN_REPLICA_DEFAULT,
901
+ max_replicas=MAX_REPLICA_DEFAULT,
902
+ cooldown_period=None,
903
+ polling_interval=None,
904
+ **kwargs):
905
+ self.autoscaler_type = kwargs.get('type', autoscaler_type)
906
+ self.min_replicas = kwargs.get('minReplicas', min_replicas)
907
+ self.max_replicas = kwargs.get('maxReplicas', max_replicas)
908
+ self.cooldown_period = kwargs.get('cooldownPeriod', cooldown_period)
909
+ self.polling_interval = kwargs.get('pollingInterval', polling_interval)
910
+
911
+ def to_json(self):
912
+ _json = {
913
+ 'type': self.autoscaler_type,
914
+ 'minReplicas': self.min_replicas,
915
+ 'maxReplicas': self.max_replicas
916
+ }
917
+
918
+ if self.cooldown_period is not None:
919
+ _json['cooldownPeriod'] = self.cooldown_period
920
+
921
+ if self.polling_interval is not None:
922
+ _json['pollingInterval'] = self.polling_interval
923
+
924
+ return _json
925
+
926
+
927
+ class KubernetesRabbitmqAutoscaler(KubernetesAutoscaler):
928
+ QUEUE_LENGTH_DEFAULT = 1000
929
+
930
+ def __init__(self,
931
+ min_replicas=KubernetesAutoscaler.MIN_REPLICA_DEFAULT,
932
+ max_replicas=KubernetesAutoscaler.MAX_REPLICA_DEFAULT,
933
+ queue_length=QUEUE_LENGTH_DEFAULT,
934
+ cooldown_period=None,
935
+ polling_interval=None,
936
+ **kwargs):
937
+ super().__init__(min_replicas=min_replicas,
938
+ max_replicas=max_replicas,
939
+ autoscaler_type=KubernetesAutoscalerType.RABBITMQ,
940
+ cooldown_period=cooldown_period,
941
+ polling_interval=polling_interval, **kwargs)
942
+ self.queue_length = kwargs.get('queueLength', queue_length)
943
+
944
+ def to_json(self):
945
+ _json = super().to_json()
946
+ _json['queueLength'] = self.queue_length
947
+ return _json
948
+
949
+
950
+ class KubernetesRPSAutoscaler(KubernetesAutoscaler):
951
+ THRESHOLD_DEFAULT = 10
952
+ RATE_SECONDS_DEFAULT = 30
953
+
954
+ def __init__(self,
955
+ min_replicas=KubernetesAutoscaler.MIN_REPLICA_DEFAULT,
956
+ max_replicas=KubernetesAutoscaler.MAX_REPLICA_DEFAULT,
957
+ threshold=THRESHOLD_DEFAULT,
958
+ rate_seconds=RATE_SECONDS_DEFAULT,
959
+ cooldown_period=None,
960
+ polling_interval=None,
961
+ **kwargs):
962
+ super().__init__(min_replicas=min_replicas,
963
+ max_replicas=max_replicas,
964
+ autoscaler_type=KubernetesAutoscalerType.RPS,
965
+ cooldown_period=cooldown_period,
966
+ polling_interval=polling_interval, **kwargs)
967
+ self.threshold = kwargs.get('threshold', threshold)
968
+ self.rate_seconds = kwargs.get('rateSeconds', rate_seconds)
969
+
970
+ def to_json(self):
971
+ _json = super().to_json()
972
+ _json['rateSeconds'] = self.rate_seconds
973
+ _json['threshold'] = self.threshold
974
+ return _json