dtlpy 1.115.44__py3-none-any.whl → 1.116.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dtlpy/__init__.py +491 -491
- dtlpy/__version__.py +1 -1
- dtlpy/assets/__init__.py +26 -26
- dtlpy/assets/code_server/config.yaml +2 -2
- dtlpy/assets/code_server/installation.sh +24 -24
- dtlpy/assets/code_server/launch.json +13 -13
- dtlpy/assets/code_server/settings.json +2 -2
- dtlpy/assets/main.py +53 -53
- dtlpy/assets/main_partial.py +18 -18
- dtlpy/assets/mock.json +11 -11
- dtlpy/assets/model_adapter.py +83 -83
- dtlpy/assets/package.json +61 -61
- dtlpy/assets/package_catalog.json +29 -29
- dtlpy/assets/package_gitignore +307 -307
- dtlpy/assets/service_runners/__init__.py +33 -33
- dtlpy/assets/service_runners/converter.py +96 -96
- dtlpy/assets/service_runners/multi_method.py +49 -49
- dtlpy/assets/service_runners/multi_method_annotation.py +54 -54
- dtlpy/assets/service_runners/multi_method_dataset.py +55 -55
- dtlpy/assets/service_runners/multi_method_item.py +52 -52
- dtlpy/assets/service_runners/multi_method_json.py +52 -52
- dtlpy/assets/service_runners/single_method.py +37 -37
- dtlpy/assets/service_runners/single_method_annotation.py +43 -43
- dtlpy/assets/service_runners/single_method_dataset.py +43 -43
- dtlpy/assets/service_runners/single_method_item.py +41 -41
- dtlpy/assets/service_runners/single_method_json.py +42 -42
- dtlpy/assets/service_runners/single_method_multi_input.py +45 -45
- dtlpy/assets/voc_annotation_template.xml +23 -23
- dtlpy/caches/base_cache.py +32 -32
- dtlpy/caches/cache.py +473 -473
- dtlpy/caches/dl_cache.py +201 -201
- dtlpy/caches/filesystem_cache.py +89 -89
- dtlpy/caches/redis_cache.py +84 -84
- dtlpy/dlp/__init__.py +20 -20
- dtlpy/dlp/cli_utilities.py +367 -367
- dtlpy/dlp/command_executor.py +764 -764
- dtlpy/dlp/dlp +1 -1
- dtlpy/dlp/dlp.bat +1 -1
- dtlpy/dlp/dlp.py +128 -128
- dtlpy/dlp/parser.py +651 -651
- dtlpy/entities/__init__.py +83 -83
- dtlpy/entities/analytic.py +347 -347
- dtlpy/entities/annotation.py +1879 -1879
- dtlpy/entities/annotation_collection.py +699 -699
- dtlpy/entities/annotation_definitions/__init__.py +20 -20
- dtlpy/entities/annotation_definitions/base_annotation_definition.py +100 -100
- dtlpy/entities/annotation_definitions/box.py +195 -195
- dtlpy/entities/annotation_definitions/classification.py +67 -67
- dtlpy/entities/annotation_definitions/comparison.py +72 -72
- dtlpy/entities/annotation_definitions/cube.py +204 -204
- dtlpy/entities/annotation_definitions/cube_3d.py +149 -149
- dtlpy/entities/annotation_definitions/description.py +32 -32
- dtlpy/entities/annotation_definitions/ellipse.py +124 -124
- dtlpy/entities/annotation_definitions/free_text.py +62 -62
- dtlpy/entities/annotation_definitions/gis.py +69 -69
- dtlpy/entities/annotation_definitions/note.py +139 -139
- dtlpy/entities/annotation_definitions/point.py +117 -117
- dtlpy/entities/annotation_definitions/polygon.py +182 -182
- dtlpy/entities/annotation_definitions/polyline.py +111 -111
- dtlpy/entities/annotation_definitions/pose.py +92 -92
- dtlpy/entities/annotation_definitions/ref_image.py +86 -86
- dtlpy/entities/annotation_definitions/segmentation.py +240 -240
- dtlpy/entities/annotation_definitions/subtitle.py +34 -34
- dtlpy/entities/annotation_definitions/text.py +85 -85
- dtlpy/entities/annotation_definitions/undefined_annotation.py +74 -74
- dtlpy/entities/app.py +220 -220
- dtlpy/entities/app_module.py +107 -107
- dtlpy/entities/artifact.py +174 -174
- dtlpy/entities/assignment.py +399 -399
- dtlpy/entities/base_entity.py +214 -214
- dtlpy/entities/bot.py +113 -113
- dtlpy/entities/codebase.py +292 -292
- dtlpy/entities/collection.py +38 -38
- dtlpy/entities/command.py +169 -169
- dtlpy/entities/compute.py +449 -449
- dtlpy/entities/dataset.py +1299 -1299
- dtlpy/entities/directory_tree.py +44 -44
- dtlpy/entities/dpk.py +470 -470
- dtlpy/entities/driver.py +235 -235
- dtlpy/entities/execution.py +397 -397
- dtlpy/entities/feature.py +124 -124
- dtlpy/entities/feature_set.py +145 -145
- dtlpy/entities/filters.py +798 -798
- dtlpy/entities/gis_item.py +107 -107
- dtlpy/entities/integration.py +184 -184
- dtlpy/entities/item.py +959 -959
- dtlpy/entities/label.py +123 -123
- dtlpy/entities/links.py +85 -85
- dtlpy/entities/message.py +175 -175
- dtlpy/entities/model.py +684 -684
- dtlpy/entities/node.py +1005 -1005
- dtlpy/entities/ontology.py +810 -803
- dtlpy/entities/organization.py +287 -287
- dtlpy/entities/package.py +657 -657
- dtlpy/entities/package_defaults.py +5 -5
- dtlpy/entities/package_function.py +185 -185
- dtlpy/entities/package_module.py +113 -113
- dtlpy/entities/package_slot.py +118 -118
- dtlpy/entities/paged_entities.py +299 -299
- dtlpy/entities/pipeline.py +624 -624
- dtlpy/entities/pipeline_execution.py +279 -279
- dtlpy/entities/project.py +394 -394
- dtlpy/entities/prompt_item.py +505 -505
- dtlpy/entities/recipe.py +301 -301
- dtlpy/entities/reflect_dict.py +102 -102
- dtlpy/entities/resource_execution.py +138 -138
- dtlpy/entities/service.py +963 -963
- dtlpy/entities/service_driver.py +117 -117
- dtlpy/entities/setting.py +294 -294
- dtlpy/entities/task.py +495 -495
- dtlpy/entities/time_series.py +143 -143
- dtlpy/entities/trigger.py +426 -426
- dtlpy/entities/user.py +118 -118
- dtlpy/entities/webhook.py +124 -124
- dtlpy/examples/__init__.py +19 -19
- dtlpy/examples/add_labels.py +135 -135
- dtlpy/examples/add_metadata_to_item.py +21 -21
- dtlpy/examples/annotate_items_using_model.py +65 -65
- dtlpy/examples/annotate_video_using_model_and_tracker.py +75 -75
- dtlpy/examples/annotations_convert_to_voc.py +9 -9
- dtlpy/examples/annotations_convert_to_yolo.py +9 -9
- dtlpy/examples/convert_annotation_types.py +51 -51
- dtlpy/examples/converter.py +143 -143
- dtlpy/examples/copy_annotations.py +22 -22
- dtlpy/examples/copy_folder.py +31 -31
- dtlpy/examples/create_annotations.py +51 -51
- dtlpy/examples/create_video_annotations.py +83 -83
- dtlpy/examples/delete_annotations.py +26 -26
- dtlpy/examples/filters.py +113 -113
- dtlpy/examples/move_item.py +23 -23
- dtlpy/examples/play_video_annotation.py +13 -13
- dtlpy/examples/show_item_and_mask.py +53 -53
- dtlpy/examples/triggers.py +49 -49
- dtlpy/examples/upload_batch_of_items.py +20 -20
- dtlpy/examples/upload_items_and_custom_format_annotations.py +55 -55
- dtlpy/examples/upload_items_with_modalities.py +43 -43
- dtlpy/examples/upload_segmentation_annotations_from_mask_image.py +44 -44
- dtlpy/examples/upload_yolo_format_annotations.py +70 -70
- dtlpy/exceptions.py +125 -125
- dtlpy/miscellaneous/__init__.py +20 -20
- dtlpy/miscellaneous/dict_differ.py +95 -95
- dtlpy/miscellaneous/git_utils.py +217 -217
- dtlpy/miscellaneous/json_utils.py +14 -14
- dtlpy/miscellaneous/list_print.py +105 -105
- dtlpy/miscellaneous/zipping.py +130 -130
- dtlpy/ml/__init__.py +20 -20
- dtlpy/ml/base_feature_extractor_adapter.py +27 -27
- dtlpy/ml/base_model_adapter.py +1257 -1230
- dtlpy/ml/metrics.py +461 -461
- dtlpy/ml/predictions_utils.py +274 -274
- dtlpy/ml/summary_writer.py +57 -57
- dtlpy/ml/train_utils.py +60 -60
- dtlpy/new_instance.py +252 -252
- dtlpy/repositories/__init__.py +56 -56
- dtlpy/repositories/analytics.py +85 -85
- dtlpy/repositories/annotations.py +916 -916
- dtlpy/repositories/apps.py +383 -383
- dtlpy/repositories/artifacts.py +452 -452
- dtlpy/repositories/assignments.py +599 -599
- dtlpy/repositories/bots.py +213 -213
- dtlpy/repositories/codebases.py +559 -559
- dtlpy/repositories/collections.py +332 -332
- dtlpy/repositories/commands.py +152 -152
- dtlpy/repositories/compositions.py +61 -61
- dtlpy/repositories/computes.py +439 -439
- dtlpy/repositories/datasets.py +1504 -1504
- dtlpy/repositories/downloader.py +976 -923
- dtlpy/repositories/dpks.py +433 -433
- dtlpy/repositories/drivers.py +482 -482
- dtlpy/repositories/executions.py +815 -815
- dtlpy/repositories/feature_sets.py +226 -226
- dtlpy/repositories/features.py +255 -255
- dtlpy/repositories/integrations.py +484 -484
- dtlpy/repositories/items.py +912 -912
- dtlpy/repositories/messages.py +94 -94
- dtlpy/repositories/models.py +1000 -1000
- dtlpy/repositories/nodes.py +80 -80
- dtlpy/repositories/ontologies.py +511 -511
- dtlpy/repositories/organizations.py +525 -525
- dtlpy/repositories/packages.py +1941 -1941
- dtlpy/repositories/pipeline_executions.py +451 -451
- dtlpy/repositories/pipelines.py +640 -640
- dtlpy/repositories/projects.py +539 -539
- dtlpy/repositories/recipes.py +419 -399
- dtlpy/repositories/resource_executions.py +137 -137
- dtlpy/repositories/schema.py +120 -120
- dtlpy/repositories/service_drivers.py +213 -213
- dtlpy/repositories/services.py +1704 -1704
- dtlpy/repositories/settings.py +339 -339
- dtlpy/repositories/tasks.py +1477 -1477
- dtlpy/repositories/times_series.py +278 -278
- dtlpy/repositories/triggers.py +536 -536
- dtlpy/repositories/upload_element.py +257 -257
- dtlpy/repositories/uploader.py +661 -661
- dtlpy/repositories/webhooks.py +249 -249
- dtlpy/services/__init__.py +22 -22
- dtlpy/services/aihttp_retry.py +131 -131
- dtlpy/services/api_client.py +1785 -1785
- dtlpy/services/api_reference.py +40 -40
- dtlpy/services/async_utils.py +133 -133
- dtlpy/services/calls_counter.py +44 -44
- dtlpy/services/check_sdk.py +68 -68
- dtlpy/services/cookie.py +115 -115
- dtlpy/services/create_logger.py +156 -156
- dtlpy/services/events.py +84 -84
- dtlpy/services/logins.py +235 -235
- dtlpy/services/reporter.py +256 -256
- dtlpy/services/service_defaults.py +91 -91
- dtlpy/utilities/__init__.py +20 -20
- dtlpy/utilities/annotations/__init__.py +16 -16
- dtlpy/utilities/annotations/annotation_converters.py +269 -269
- dtlpy/utilities/base_package_runner.py +285 -264
- dtlpy/utilities/converter.py +1650 -1650
- dtlpy/utilities/dataset_generators/__init__.py +1 -1
- dtlpy/utilities/dataset_generators/dataset_generator.py +670 -670
- dtlpy/utilities/dataset_generators/dataset_generator_tensorflow.py +23 -23
- dtlpy/utilities/dataset_generators/dataset_generator_torch.py +21 -21
- dtlpy/utilities/local_development/__init__.py +1 -1
- dtlpy/utilities/local_development/local_session.py +179 -179
- dtlpy/utilities/reports/__init__.py +2 -2
- dtlpy/utilities/reports/figures.py +343 -343
- dtlpy/utilities/reports/report.py +71 -71
- dtlpy/utilities/videos/__init__.py +17 -17
- dtlpy/utilities/videos/video_player.py +598 -598
- dtlpy/utilities/videos/videos.py +470 -470
- {dtlpy-1.115.44.data → dtlpy-1.116.6.data}/scripts/dlp +1 -1
- dtlpy-1.116.6.data/scripts/dlp.bat +2 -0
- {dtlpy-1.115.44.data → dtlpy-1.116.6.data}/scripts/dlp.py +128 -128
- {dtlpy-1.115.44.dist-info → dtlpy-1.116.6.dist-info}/METADATA +186 -186
- dtlpy-1.116.6.dist-info/RECORD +239 -0
- {dtlpy-1.115.44.dist-info → dtlpy-1.116.6.dist-info}/WHEEL +1 -1
- {dtlpy-1.115.44.dist-info → dtlpy-1.116.6.dist-info}/licenses/LICENSE +200 -200
- tests/features/environment.py +551 -551
- dtlpy/assets/__pycache__/__init__.cpython-310.pyc +0 -0
- dtlpy-1.115.44.data/scripts/dlp.bat +0 -2
- dtlpy-1.115.44.dist-info/RECORD +0 -240
- {dtlpy-1.115.44.dist-info → dtlpy-1.116.6.dist-info}/entry_points.txt +0 -0
- {dtlpy-1.115.44.dist-info → dtlpy-1.116.6.dist-info}/top_level.txt +0 -0
dtlpy/entities/service.py
CHANGED
|
@@ -1,963 +1,963 @@
|
|
|
1
|
-
import warnings
|
|
2
|
-
from collections import namedtuple
|
|
3
|
-
from enum import Enum
|
|
4
|
-
import traceback
|
|
5
|
-
import logging
|
|
6
|
-
from typing import List
|
|
7
|
-
from urllib.parse import urlsplit
|
|
8
|
-
import attr
|
|
9
|
-
from .. import repositories, entities
|
|
10
|
-
from ..services.api_client import ApiClient
|
|
11
|
-
|
|
12
|
-
logger = logging.getLogger(name='dtlpy')
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
class ServiceType(str, Enum):
|
|
16
|
-
""" The type of the service (SYSTEM).
|
|
17
|
-
|
|
18
|
-
.. list-table::
|
|
19
|
-
:widths: 15 150
|
|
20
|
-
:header-rows: 1
|
|
21
|
-
|
|
22
|
-
* - State
|
|
23
|
-
- Description
|
|
24
|
-
* - SYSTEM
|
|
25
|
-
- Dataloop internal service
|
|
26
|
-
"""
|
|
27
|
-
SYSTEM = 'system'
|
|
28
|
-
REGULAR = 'regular'
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
class ServiceModeType(str, Enum):
|
|
32
|
-
""" The type of the service mode.
|
|
33
|
-
|
|
34
|
-
.. list-table::
|
|
35
|
-
:widths: 15 150
|
|
36
|
-
:header-rows: 1
|
|
37
|
-
|
|
38
|
-
* - State
|
|
39
|
-
- Description
|
|
40
|
-
* - REGULAR
|
|
41
|
-
- Service regular mode type
|
|
42
|
-
* - DEBUG
|
|
43
|
-
- Service debug mode type
|
|
44
|
-
"""
|
|
45
|
-
REGULAR = 'regular'
|
|
46
|
-
DEBUG = 'debug'
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
class OnResetAction(str, Enum):
|
|
50
|
-
""" The Execution action when the service reset (RERUN, FAILED).
|
|
51
|
-
|
|
52
|
-
.. list-table::
|
|
53
|
-
:widths: 15 150
|
|
54
|
-
:header-rows: 1
|
|
55
|
-
|
|
56
|
-
* - State
|
|
57
|
-
- Description
|
|
58
|
-
* - RERUN
|
|
59
|
-
- When the service resting rerun the execution
|
|
60
|
-
* - FAILED
|
|
61
|
-
- When the service resting fail the execution
|
|
62
|
-
"""
|
|
63
|
-
RERUN = 'rerun'
|
|
64
|
-
FAILED = 'failed'
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
class InstanceCatalog(str, Enum):
|
|
68
|
-
""" The Service Pode size.
|
|
69
|
-
|
|
70
|
-
.. list-table::
|
|
71
|
-
:widths: 15 150
|
|
72
|
-
:header-rows: 1
|
|
73
|
-
|
|
74
|
-
* - State
|
|
75
|
-
- Description
|
|
76
|
-
* - REGULAR_XS
|
|
77
|
-
- regular pod with extra small size
|
|
78
|
-
* - REGULAR_S
|
|
79
|
-
- regular pod with small size
|
|
80
|
-
* - REGULAR_M
|
|
81
|
-
- regular pod with medium size
|
|
82
|
-
* - REGULAR_L
|
|
83
|
-
- regular pod with large size
|
|
84
|
-
* - HIGHMEM_XS
|
|
85
|
-
- highmem pod with extra small size
|
|
86
|
-
* - HIGHMEM_S
|
|
87
|
-
- highmem pod with small size
|
|
88
|
-
* - HIGHMEM_M
|
|
89
|
-
- highmem pod with medium size
|
|
90
|
-
* - HIGHMEM_L
|
|
91
|
-
- highmem pod with large size
|
|
92
|
-
* - GPU_T4_S
|
|
93
|
-
- GPU NVIDIA T4 pod with regular memory
|
|
94
|
-
* - GPU_T4_M
|
|
95
|
-
- GPU NVIDIA T4 pod with highmem
|
|
96
|
-
"""
|
|
97
|
-
REGULAR_XS = "regular-xs"
|
|
98
|
-
REGULAR_S = "regular-s"
|
|
99
|
-
REGULAR_M = "regular-m"
|
|
100
|
-
REGULAR_L = "regular-l"
|
|
101
|
-
HIGHMEM_XS = "highmem-xs"
|
|
102
|
-
HIGHMEM_S = "highmem-s"
|
|
103
|
-
HIGHMEM_M = "highmem-m"
|
|
104
|
-
HIGHMEM_L = "highmem-l"
|
|
105
|
-
GPU_T4_S = "gpu-t4"
|
|
106
|
-
GPU_T4_M = "gpu-t4-m"
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
class RuntimeType(str, Enum):
|
|
110
|
-
""" Service culture Runtime (KUBERNETES).
|
|
111
|
-
|
|
112
|
-
.. list-table::
|
|
113
|
-
:widths: 15 150
|
|
114
|
-
:header-rows: 1
|
|
115
|
-
|
|
116
|
-
* - State
|
|
117
|
-
- Description
|
|
118
|
-
* - KUBERNETES
|
|
119
|
-
- Service run in kubernetes culture
|
|
120
|
-
"""
|
|
121
|
-
KUBERNETES = 'kubernetes'
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
class ServiceRuntime(entities.BaseEntity):
|
|
125
|
-
def __init__(self, service_type: RuntimeType = RuntimeType.KUBERNETES):
|
|
126
|
-
self.service_type = service_type
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
class KubernetesRuntime(ServiceRuntime):
|
|
130
|
-
DEFAULT_POD_TYPE = InstanceCatalog.REGULAR_S
|
|
131
|
-
DEFAULT_NUM_REPLICAS = 1
|
|
132
|
-
DEFAULT_CONCURRENCY = 10
|
|
133
|
-
|
|
134
|
-
def __init__(self,
|
|
135
|
-
pod_type: InstanceCatalog = DEFAULT_POD_TYPE,
|
|
136
|
-
num_replicas=DEFAULT_NUM_REPLICAS,
|
|
137
|
-
concurrency=DEFAULT_CONCURRENCY,
|
|
138
|
-
dynamic_concurrency=None,
|
|
139
|
-
runner_image=None,
|
|
140
|
-
autoscaler=None,
|
|
141
|
-
**kwargs):
|
|
142
|
-
|
|
143
|
-
super().__init__(service_type=RuntimeType.KUBERNETES)
|
|
144
|
-
self.pod_type = kwargs.get('podType', pod_type)
|
|
145
|
-
self.num_replicas = kwargs.get('numReplicas', num_replicas)
|
|
146
|
-
self.concurrency = kwargs.get('concurrency', concurrency)
|
|
147
|
-
self.runner_image = kwargs.get('runnerImage', runner_image)
|
|
148
|
-
self._proxy_image = kwargs.get('proxyImage', None)
|
|
149
|
-
self.single_agent = kwargs.get('singleAgent', None)
|
|
150
|
-
self.preemptible = kwargs.get('preemptible', None)
|
|
151
|
-
self.dynamic_concurrency = kwargs.get('dynamicConcurrency', dynamic_concurrency)
|
|
152
|
-
|
|
153
|
-
self.autoscaler = kwargs.get('autoscaler', autoscaler)
|
|
154
|
-
if self.autoscaler is not None and isinstance(self.autoscaler, dict):
|
|
155
|
-
if self.autoscaler['type'] == KubernetesAutoscalerType.RABBITMQ:
|
|
156
|
-
self.autoscaler = KubernetesRabbitmqAutoscaler(**self.autoscaler)
|
|
157
|
-
elif self.autoscaler['type'] == KubernetesAutoscalerType.RPS:
|
|
158
|
-
self.autoscaler = KubernetesRPSAutoscaler(**self.autoscaler)
|
|
159
|
-
else:
|
|
160
|
-
raise NotImplementedError(
|
|
161
|
-
'Unknown kubernetes autoscaler type: {}'.format(self.autoscaler['type']))
|
|
162
|
-
|
|
163
|
-
def to_json(self):
|
|
164
|
-
_json = {
|
|
165
|
-
'podType': self.pod_type,
|
|
166
|
-
'numReplicas': self.num_replicas,
|
|
167
|
-
'concurrency': self.concurrency,
|
|
168
|
-
'autoscaler': None if self.autoscaler is None else self.autoscaler.to_json()
|
|
169
|
-
}
|
|
170
|
-
|
|
171
|
-
if self.single_agent is not None:
|
|
172
|
-
_json['singleAgent'] = self.single_agent
|
|
173
|
-
|
|
174
|
-
if self.runner_image is not None:
|
|
175
|
-
_json['runnerImage'] = self.runner_image
|
|
176
|
-
|
|
177
|
-
if self._proxy_image is not None:
|
|
178
|
-
_json['proxyImage'] = self._proxy_image
|
|
179
|
-
|
|
180
|
-
if self.preemptible is not None:
|
|
181
|
-
_json['preemptible'] = self.preemptible
|
|
182
|
-
|
|
183
|
-
if self.dynamic_concurrency is not None:
|
|
184
|
-
_json['dynamicConcurrency'] = self.dynamic_concurrency
|
|
185
|
-
|
|
186
|
-
return _json
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
@attr.s
|
|
190
|
-
class Service(entities.BaseEntity):
|
|
191
|
-
"""
|
|
192
|
-
Service object
|
|
193
|
-
"""
|
|
194
|
-
# platform
|
|
195
|
-
created_at = attr.ib()
|
|
196
|
-
updated_at = attr.ib(repr=False)
|
|
197
|
-
creator = attr.ib()
|
|
198
|
-
version = attr.ib()
|
|
199
|
-
|
|
200
|
-
package_id = attr.ib()
|
|
201
|
-
package_revision = attr.ib()
|
|
202
|
-
|
|
203
|
-
bot = attr.ib()
|
|
204
|
-
use_user_jwt = attr.ib(repr=False)
|
|
205
|
-
init_input = attr.ib()
|
|
206
|
-
versions = attr.ib(repr=False)
|
|
207
|
-
module_name = attr.ib()
|
|
208
|
-
name = attr.ib()
|
|
209
|
-
url = attr.ib()
|
|
210
|
-
id = attr.ib()
|
|
211
|
-
active = attr.ib()
|
|
212
|
-
driver_id = attr.ib(repr=False)
|
|
213
|
-
secrets = attr.ib(repr=False)
|
|
214
|
-
|
|
215
|
-
# name change
|
|
216
|
-
runtime = attr.ib(repr=False, type=KubernetesRuntime)
|
|
217
|
-
queue_length_limit = attr.ib()
|
|
218
|
-
run_execution_as_process = attr.ib(type=bool)
|
|
219
|
-
execution_timeout = attr.ib()
|
|
220
|
-
drain_time = attr.ib()
|
|
221
|
-
on_reset = attr.ib(type=OnResetAction)
|
|
222
|
-
_type = attr.ib(type=ServiceType)
|
|
223
|
-
project_id = attr.ib()
|
|
224
|
-
org_id = attr.ib()
|
|
225
|
-
is_global = attr.ib()
|
|
226
|
-
max_attempts = attr.ib()
|
|
227
|
-
mode = attr.ib(repr=False)
|
|
228
|
-
metadata = attr.ib()
|
|
229
|
-
archive = attr.ib(repr=False)
|
|
230
|
-
config = attr.ib(repr=False)
|
|
231
|
-
settings = attr.ib(repr=False)
|
|
232
|
-
panels = attr.ib(repr=False)
|
|
233
|
-
|
|
234
|
-
# SDK
|
|
235
|
-
_package = attr.ib(repr=False)
|
|
236
|
-
_client_api = attr.ib(type=ApiClient, repr=False)
|
|
237
|
-
_revisions = attr.ib(default=None, repr=False)
|
|
238
|
-
# repositories
|
|
239
|
-
_project = attr.ib(default=None, repr=False)
|
|
240
|
-
_repositories = attr.ib(repr=False)
|
|
241
|
-
updated_by = attr.ib(default=None)
|
|
242
|
-
app = attr.ib(default=None)
|
|
243
|
-
integrations = attr.ib(default=None)
|
|
244
|
-
|
|
245
|
-
@property
|
|
246
|
-
def createdAt(self):
|
|
247
|
-
return self.created_at
|
|
248
|
-
|
|
249
|
-
@property
|
|
250
|
-
def updatedAt(self):
|
|
251
|
-
return self.updated_at
|
|
252
|
-
|
|
253
|
-
@staticmethod
|
|
254
|
-
def _protected_from_json(_json: dict, client_api: ApiClient, package=None, project=None, is_fetched=True):
|
|
255
|
-
"""
|
|
256
|
-
Same as from_json but with try-except to catch if error
|
|
257
|
-
|
|
258
|
-
:param _json: platform json
|
|
259
|
-
:param client_api: ApiClient entity
|
|
260
|
-
:param package:
|
|
261
|
-
:param project: project entity
|
|
262
|
-
:param is_fetched: is Entity fetched from Platform
|
|
263
|
-
:return:
|
|
264
|
-
"""
|
|
265
|
-
try:
|
|
266
|
-
service = Service.from_json(_json=_json,
|
|
267
|
-
client_api=client_api,
|
|
268
|
-
package=package,
|
|
269
|
-
project=project,
|
|
270
|
-
is_fetched=is_fetched)
|
|
271
|
-
status = True
|
|
272
|
-
except Exception:
|
|
273
|
-
service = traceback.format_exc()
|
|
274
|
-
status = False
|
|
275
|
-
return status, service
|
|
276
|
-
|
|
277
|
-
@classmethod
|
|
278
|
-
def from_json(cls, _json: dict, client_api: ApiClient = None, package=None, project=None, is_fetched=True):
|
|
279
|
-
"""
|
|
280
|
-
Build a service entity object from a json
|
|
281
|
-
|
|
282
|
-
:param dict _json: platform json
|
|
283
|
-
:param dl.ApiClient client_api: ApiClient entity
|
|
284
|
-
:param dtlpy.entities.package.Package package: package entity
|
|
285
|
-
:param dtlpy.entities.project.Project project: project entity
|
|
286
|
-
:param bool is_fetched: is Entity fetched from Platform
|
|
287
|
-
:return: service object
|
|
288
|
-
:rtype: dtlpy.entities.service.Service
|
|
289
|
-
"""
|
|
290
|
-
if project is not None:
|
|
291
|
-
if project.id != _json.get('projectId', None):
|
|
292
|
-
logger.warning('Service has been fetched from a project that is not belong to it')
|
|
293
|
-
project = None
|
|
294
|
-
|
|
295
|
-
if package is not None:
|
|
296
|
-
if package.id != _json.get('packageId', None):
|
|
297
|
-
logger.warning('Service has been fetched from a package that is not belong to it')
|
|
298
|
-
package = None
|
|
299
|
-
|
|
300
|
-
versions = _json.get('versions', dict())
|
|
301
|
-
runtime = _json.get("runtime", None)
|
|
302
|
-
if runtime:
|
|
303
|
-
runtime = KubernetesRuntime(**runtime)
|
|
304
|
-
|
|
305
|
-
inst = cls(
|
|
306
|
-
package_revision=_json.get("packageRevision", None),
|
|
307
|
-
bot=_json.get("botUserName", None),
|
|
308
|
-
use_user_jwt=_json.get("useUserJwt", False),
|
|
309
|
-
created_at=_json.get("createdAt", None),
|
|
310
|
-
updated_at=_json.get("updatedAt", None),
|
|
311
|
-
project_id=_json.get('projectId', None),
|
|
312
|
-
package_id=_json.get('packageId', None),
|
|
313
|
-
driver_id=_json.get('driverId', None),
|
|
314
|
-
max_attempts=_json.get('maxAttempts', None),
|
|
315
|
-
version=_json.get('version', None),
|
|
316
|
-
creator=_json.get('creator', None),
|
|
317
|
-
revisions=_json.get('revisions', None),
|
|
318
|
-
queue_length_limit=_json.get('queueLengthLimit', None),
|
|
319
|
-
active=_json.get('active', None),
|
|
320
|
-
runtime=runtime,
|
|
321
|
-
is_global=_json.get("global", False),
|
|
322
|
-
init_input=_json.get("initParams", dict()),
|
|
323
|
-
module_name=_json.get("moduleName", None),
|
|
324
|
-
run_execution_as_process=_json.get('runExecutionAsProcess', False),
|
|
325
|
-
execution_timeout=_json.get('executionTimeout', 60 * 60),
|
|
326
|
-
drain_time=_json.get('drainTime', 60 * 10),
|
|
327
|
-
on_reset=_json.get('onReset', OnResetAction.FAILED),
|
|
328
|
-
name=_json.get("name", None),
|
|
329
|
-
url=_json.get("url", None),
|
|
330
|
-
id=_json.get("id", None),
|
|
331
|
-
versions=versions,
|
|
332
|
-
client_api=client_api,
|
|
333
|
-
package=package,
|
|
334
|
-
project=project,
|
|
335
|
-
secrets=_json.get("secrets", None),
|
|
336
|
-
type=_json.get("type", None),
|
|
337
|
-
mode=_json.get('mode', dict()),
|
|
338
|
-
metadata=_json.get('metadata', None),
|
|
339
|
-
archive=_json.get('archive', None),
|
|
340
|
-
updated_by=_json.get('updatedBy', None),
|
|
341
|
-
config=_json.get('config', None),
|
|
342
|
-
settings=_json.get('settings', None),
|
|
343
|
-
app=_json.get('app', None),
|
|
344
|
-
integrations=_json.get('integrations', None),
|
|
345
|
-
org_id=_json.get('orgId', None),
|
|
346
|
-
panels=_json.get('panels', None)
|
|
347
|
-
)
|
|
348
|
-
inst.is_fetched = is_fetched
|
|
349
|
-
return inst
|
|
350
|
-
|
|
351
|
-
############
|
|
352
|
-
# Entities #
|
|
353
|
-
############
|
|
354
|
-
@property
|
|
355
|
-
def revisions(self):
|
|
356
|
-
if self._revisions is None:
|
|
357
|
-
self._revisions = self.services.revisions(service=self)
|
|
358
|
-
return self._revisions
|
|
359
|
-
|
|
360
|
-
@property
|
|
361
|
-
def platform_url(self):
|
|
362
|
-
return self._client_api._get_resource_url("projects/{}/services/{}/main".format(self.project.id, self.id))
|
|
363
|
-
|
|
364
|
-
@property
|
|
365
|
-
def project(self):
|
|
366
|
-
if self._project is None:
|
|
367
|
-
self._project = repositories.Projects(client_api=self._client_api).get(project_id=self.project_id,
|
|
368
|
-
fetch=None)
|
|
369
|
-
assert isinstance(self._project, entities.Project)
|
|
370
|
-
return self._project
|
|
371
|
-
|
|
372
|
-
@property
|
|
373
|
-
def package(self):
|
|
374
|
-
if self._package is None:
|
|
375
|
-
try:
|
|
376
|
-
dpk_id = None
|
|
377
|
-
dpk_version = None
|
|
378
|
-
if self.app and isinstance(self.app, dict):
|
|
379
|
-
dpk_id = self.app.get('dpkId', None)
|
|
380
|
-
dpk_version = self.app.get('dpkVersion', None)
|
|
381
|
-
if dpk_id is None:
|
|
382
|
-
self._package = repositories.Dpks(client_api=self._client_api, project=self.project).get(
|
|
383
|
-
dpk_id=self.package_id)
|
|
384
|
-
else:
|
|
385
|
-
self._package = repositories.Dpks(client_api=self._client_api, project=self.project).get_revisions(
|
|
386
|
-
dpk_id=dpk_id,
|
|
387
|
-
version=dpk_version)
|
|
388
|
-
|
|
389
|
-
assert isinstance(self._package, entities.Dpk)
|
|
390
|
-
except:
|
|
391
|
-
self._package = repositories.Packages(client_api=self._client_api).get(package_id=self.package_id,
|
|
392
|
-
fetch=None,
|
|
393
|
-
log_error=False)
|
|
394
|
-
assert isinstance(self._package, entities.Package)
|
|
395
|
-
return self._package
|
|
396
|
-
|
|
397
|
-
@property
|
|
398
|
-
def execution_url(self):
|
|
399
|
-
return 'CURL -X POST' \
|
|
400
|
-
'\nauthorization: Bearer <token>' \
|
|
401
|
-
'\nContent-Type: application/json" -d {' \
|
|
402
|
-
'\n"input": {<input json>}, ' \
|
|
403
|
-
'"projectId": "{<project_id>}", ' \
|
|
404
|
-
'"functionName": "<function_name>"}'
|
|
405
|
-
|
|
406
|
-
################
|
|
407
|
-
# repositories #
|
|
408
|
-
################
|
|
409
|
-
@_repositories.default
|
|
410
|
-
def set_repositories(self):
|
|
411
|
-
reps = namedtuple('repositories',
|
|
412
|
-
field_names=['executions', 'services', 'triggers'])
|
|
413
|
-
|
|
414
|
-
if self._package is None:
|
|
415
|
-
services_repo = repositories.Services(client_api=self._client_api,
|
|
416
|
-
package=self._package,
|
|
417
|
-
project=self._project)
|
|
418
|
-
else:
|
|
419
|
-
services_repo = self._package.services
|
|
420
|
-
|
|
421
|
-
triggers = repositories.Triggers(client_api=self._client_api,
|
|
422
|
-
project=self._project,
|
|
423
|
-
service=self)
|
|
424
|
-
|
|
425
|
-
r = reps(executions=repositories.Executions(client_api=self._client_api, service=self),
|
|
426
|
-
services=services_repo, triggers=triggers)
|
|
427
|
-
return r
|
|
428
|
-
|
|
429
|
-
@property
|
|
430
|
-
def executions(self):
|
|
431
|
-
assert isinstance(self._repositories.executions, repositories.Executions)
|
|
432
|
-
return self._repositories.executions
|
|
433
|
-
|
|
434
|
-
@property
|
|
435
|
-
def triggers(self):
|
|
436
|
-
assert isinstance(self._repositories.triggers, repositories.Triggers)
|
|
437
|
-
return self._repositories.triggers
|
|
438
|
-
|
|
439
|
-
@property
|
|
440
|
-
def services(self):
|
|
441
|
-
assert isinstance(self._repositories.services, repositories.Services)
|
|
442
|
-
return self._repositories.services
|
|
443
|
-
|
|
444
|
-
###########
|
|
445
|
-
# methods #
|
|
446
|
-
###########
|
|
447
|
-
def to_json(self):
|
|
448
|
-
"""
|
|
449
|
-
Returns platform _json format of object
|
|
450
|
-
|
|
451
|
-
:return: platform json format of object
|
|
452
|
-
:rtype: dict
|
|
453
|
-
"""
|
|
454
|
-
_json = attr.asdict(
|
|
455
|
-
self,
|
|
456
|
-
filter=attr.filters.exclude(
|
|
457
|
-
attr.fields(Service)._project,
|
|
458
|
-
attr.fields(Service)._package,
|
|
459
|
-
attr.fields(Service)._revisions,
|
|
460
|
-
attr.fields(Service)._client_api,
|
|
461
|
-
attr.fields(Service)._repositories,
|
|
462
|
-
attr.fields(Service).project_id,
|
|
463
|
-
attr.fields(Service).init_input,
|
|
464
|
-
attr.fields(Service).module_name,
|
|
465
|
-
attr.fields(Service).bot,
|
|
466
|
-
attr.fields(Service).package_id,
|
|
467
|
-
attr.fields(Service).is_global,
|
|
468
|
-
attr.fields(Service).use_user_jwt,
|
|
469
|
-
attr.fields(Service).package_revision,
|
|
470
|
-
attr.fields(Service).driver_id,
|
|
471
|
-
attr.fields(Service).run_execution_as_process,
|
|
472
|
-
attr.fields(Service).execution_timeout,
|
|
473
|
-
attr.fields(Service).drain_time,
|
|
474
|
-
attr.fields(Service).runtime,
|
|
475
|
-
attr.fields(Service).queue_length_limit,
|
|
476
|
-
attr.fields(Service).max_attempts,
|
|
477
|
-
attr.fields(Service).on_reset,
|
|
478
|
-
attr.fields(Service).created_at,
|
|
479
|
-
attr.fields(Service).updated_at,
|
|
480
|
-
attr.fields(Service).secrets,
|
|
481
|
-
attr.fields(Service)._type,
|
|
482
|
-
attr.fields(Service).mode,
|
|
483
|
-
attr.fields(Service).metadata,
|
|
484
|
-
attr.fields(Service).archive,
|
|
485
|
-
attr.fields(Service).updated_by,
|
|
486
|
-
attr.fields(Service).config,
|
|
487
|
-
attr.fields(Service).settings,
|
|
488
|
-
attr.fields(Service).app,
|
|
489
|
-
attr.fields(Service).integrations,
|
|
490
|
-
attr.fields(Service).org_id,
|
|
491
|
-
attr.fields(Service).panels
|
|
492
|
-
)
|
|
493
|
-
)
|
|
494
|
-
|
|
495
|
-
_json['projectId'] = self.project_id
|
|
496
|
-
_json['orgId'] = self.org_id
|
|
497
|
-
_json['packageId'] = self.package_id
|
|
498
|
-
_json['initParams'] = self.init_input
|
|
499
|
-
_json['moduleName'] = self.module_name
|
|
500
|
-
_json['botUserName'] = self.bot
|
|
501
|
-
_json['useUserJwt'] = self.use_user_jwt
|
|
502
|
-
_json['global'] = self.is_global
|
|
503
|
-
_json['driverId'] = self.driver_id
|
|
504
|
-
_json['packageRevision'] = self.package_revision
|
|
505
|
-
_json['runExecutionAsProcess'] = self.run_execution_as_process
|
|
506
|
-
_json['executionTimeout'] = self.execution_timeout
|
|
507
|
-
_json['drainTime'] = self.drain_time
|
|
508
|
-
_json['onReset'] = self.on_reset
|
|
509
|
-
_json['createdAt'] = self.created_at
|
|
510
|
-
_json['updatedAt'] = self.updated_at
|
|
511
|
-
|
|
512
|
-
if self.updated_by is not None:
|
|
513
|
-
_json['updatedBy'] = self.updated_by
|
|
514
|
-
|
|
515
|
-
if self.panels is not None:
|
|
516
|
-
_json['panels'] = self.panels
|
|
517
|
-
|
|
518
|
-
if self.max_attempts is not None:
|
|
519
|
-
_json['maxAttempts'] = self.max_attempts
|
|
520
|
-
|
|
521
|
-
if self.is_global is not None:
|
|
522
|
-
_json['global'] = self.is_global
|
|
523
|
-
|
|
524
|
-
if self.runtime:
|
|
525
|
-
_json['runtime'] = self.runtime if isinstance(self.runtime, dict) else self.runtime.to_json()
|
|
526
|
-
|
|
527
|
-
if self.queue_length_limit is not None:
|
|
528
|
-
_json['queueLengthLimit'] = self.queue_length_limit
|
|
529
|
-
|
|
530
|
-
if self.secrets is not None:
|
|
531
|
-
_json['secrets'] = self.secrets
|
|
532
|
-
|
|
533
|
-
if self._type is not None:
|
|
534
|
-
_json['type'] = self._type
|
|
535
|
-
|
|
536
|
-
if self.mode:
|
|
537
|
-
_json['mode'] = self.mode
|
|
538
|
-
|
|
539
|
-
if self.metadata:
|
|
540
|
-
_json['metadata'] = self.metadata
|
|
541
|
-
|
|
542
|
-
if self.archive is not None:
|
|
543
|
-
_json['archive'] = self.archive
|
|
544
|
-
|
|
545
|
-
if self.config is not None:
|
|
546
|
-
_json['config'] = self.config
|
|
547
|
-
|
|
548
|
-
if self.settings is not None:
|
|
549
|
-
_json['settings'] = self.settings
|
|
550
|
-
|
|
551
|
-
if self.app is not None:
|
|
552
|
-
_json['app'] = self.app
|
|
553
|
-
|
|
554
|
-
if self.integrations is not None:
|
|
555
|
-
_json['integrations'] = self.integrations
|
|
556
|
-
|
|
557
|
-
return _json
|
|
558
|
-
|
|
559
|
-
def update(self, force=False):
|
|
560
|
-
"""
|
|
561
|
-
Update Service changes to platform
|
|
562
|
-
|
|
563
|
-
:param bool force: force update
|
|
564
|
-
:return: Service entity
|
|
565
|
-
:rtype: dtlpy.entities.service.Service
|
|
566
|
-
"""
|
|
567
|
-
return self.services.update(service=self, force=force)
|
|
568
|
-
|
|
569
|
-
def delete(self, force: bool = False):
|
|
570
|
-
"""
|
|
571
|
-
Delete Service object
|
|
572
|
-
|
|
573
|
-
:return: True
|
|
574
|
-
:rtype: bool
|
|
575
|
-
"""
|
|
576
|
-
return self.services.delete(service_id=self.id, force=force)
|
|
577
|
-
|
|
578
|
-
def status(self):
|
|
579
|
-
"""
|
|
580
|
-
Get Service status
|
|
581
|
-
|
|
582
|
-
:return: status json
|
|
583
|
-
:rtype: dict
|
|
584
|
-
"""
|
|
585
|
-
return self.services.status(service_id=self.id)
|
|
586
|
-
|
|
587
|
-
def log(self,
|
|
588
|
-
size=None,
|
|
589
|
-
checkpoint=None,
|
|
590
|
-
start=None,
|
|
591
|
-
end=None,
|
|
592
|
-
follow=False,
|
|
593
|
-
text=None,
|
|
594
|
-
execution_id=None,
|
|
595
|
-
function_name=None,
|
|
596
|
-
replica_id=None,
|
|
597
|
-
system=False,
|
|
598
|
-
view=True,
|
|
599
|
-
until_completed=True,
|
|
600
|
-
model_id: str = None,
|
|
601
|
-
model_operation: str = None,
|
|
602
|
-
):
|
|
603
|
-
"""
|
|
604
|
-
Get service logs
|
|
605
|
-
|
|
606
|
-
:param int size: size
|
|
607
|
-
:param dict checkpoint: the information from the lst point checked in the service
|
|
608
|
-
:param str start: iso format time
|
|
609
|
-
:param str end: iso format time
|
|
610
|
-
:param bool follow: if true, keep stream future logs
|
|
611
|
-
:param str text: text
|
|
612
|
-
:param str execution_id: execution id
|
|
613
|
-
:param str function_name: function name
|
|
614
|
-
:param str replica_id: replica id
|
|
615
|
-
:param bool system: system
|
|
616
|
-
:param bool view: if true, print out all the logs
|
|
617
|
-
:param bool until_completed: wait until completed
|
|
618
|
-
:param str model_id: model id
|
|
619
|
-
:param str model_operation: model operation action
|
|
620
|
-
:return: ServiceLog entity
|
|
621
|
-
:rtype: ServiceLog
|
|
622
|
-
|
|
623
|
-
**Example**:
|
|
624
|
-
|
|
625
|
-
.. code-block:: python
|
|
626
|
-
|
|
627
|
-
service_log = service.log()
|
|
628
|
-
"""
|
|
629
|
-
return self.services.log(service=self,
|
|
630
|
-
size=size,
|
|
631
|
-
checkpoint=checkpoint,
|
|
632
|
-
start=start,
|
|
633
|
-
end=end,
|
|
634
|
-
follow=follow,
|
|
635
|
-
execution_id=execution_id,
|
|
636
|
-
function_name=function_name,
|
|
637
|
-
replica_id=replica_id,
|
|
638
|
-
system=system,
|
|
639
|
-
text=text,
|
|
640
|
-
view=view,
|
|
641
|
-
until_completed=until_completed,
|
|
642
|
-
model_id=model_id,
|
|
643
|
-
model_operation=model_operation)
|
|
644
|
-
|
|
645
|
-
def open_in_web(self):
|
|
646
|
-
"""
|
|
647
|
-
Open the service in web platform
|
|
648
|
-
|
|
649
|
-
:return:
|
|
650
|
-
"""
|
|
651
|
-
parsed_url = urlsplit(self.platform_url)
|
|
652
|
-
base_url = parsed_url.scheme + "://" + parsed_url.netloc
|
|
653
|
-
url = '{}/projects/{}/services/{}'.format(base_url, self.project_id, self.id)
|
|
654
|
-
self._client_api._open_in_web(url=url)
|
|
655
|
-
|
|
656
|
-
def checkout(self):
|
|
657
|
-
"""
|
|
658
|
-
Checkout
|
|
659
|
-
|
|
660
|
-
:return:
|
|
661
|
-
"""
|
|
662
|
-
return self.services.checkout(service=self)
|
|
663
|
-
|
|
664
|
-
def pause(self):
|
|
665
|
-
"""
|
|
666
|
-
pause
|
|
667
|
-
|
|
668
|
-
:return:
|
|
669
|
-
"""
|
|
670
|
-
return self.services.pause(service_id=self.id)
|
|
671
|
-
|
|
672
|
-
def resume(self):
|
|
673
|
-
"""
|
|
674
|
-
resume
|
|
675
|
-
|
|
676
|
-
:return:
|
|
677
|
-
"""
|
|
678
|
-
return self.services.resume(service_id=self.id)
|
|
679
|
-
|
|
680
|
-
def execute(
|
|
681
|
-
self,
|
|
682
|
-
execution_input=None,
|
|
683
|
-
function_name=None,
|
|
684
|
-
resource=None,
|
|
685
|
-
item_id=None,
|
|
686
|
-
dataset_id=None,
|
|
687
|
-
annotation_id=None,
|
|
688
|
-
project_id=None,
|
|
689
|
-
sync=False,
|
|
690
|
-
stream_logs=True,
|
|
691
|
-
return_output=True
|
|
692
|
-
):
|
|
693
|
-
"""
|
|
694
|
-
Execute a function on an existing service
|
|
695
|
-
|
|
696
|
-
:param List[FunctionIO] or dict execution_input: input dictionary or list of FunctionIO entities
|
|
697
|
-
:param str function_name: function name to run
|
|
698
|
-
:param str resource: input type.
|
|
699
|
-
:param str item_id: optional - item id as input to function
|
|
700
|
-
:param str dataset_id: optional - dataset id as input to function
|
|
701
|
-
:param str annotation_id: optional - annotation id as input to function
|
|
702
|
-
:param str project_id: resource's project
|
|
703
|
-
:param bool sync: if true, wait for function to end
|
|
704
|
-
:param bool stream_logs: prints logs of the new execution. only works with sync=True
|
|
705
|
-
:param bool return_output: if True and sync is True - will return the output directly
|
|
706
|
-
:return: execution object
|
|
707
|
-
:rtype: dtlpy.entities.execution.Execution
|
|
708
|
-
|
|
709
|
-
**Example**:
|
|
710
|
-
|
|
711
|
-
.. code-block:: python
|
|
712
|
-
|
|
713
|
-
execution = service.execute(function_name='function_name', item_id='item_id', project_id='project_id')
|
|
714
|
-
"""
|
|
715
|
-
execution = self.executions.create(sync=sync,
|
|
716
|
-
execution_input=execution_input,
|
|
717
|
-
function_name=function_name,
|
|
718
|
-
resource=resource,
|
|
719
|
-
item_id=item_id,
|
|
720
|
-
dataset_id=dataset_id,
|
|
721
|
-
annotation_id=annotation_id,
|
|
722
|
-
stream_logs=stream_logs,
|
|
723
|
-
project_id=project_id,
|
|
724
|
-
return_output=return_output)
|
|
725
|
-
return execution
|
|
726
|
-
|
|
727
|
-
def execute_batch(self,
|
|
728
|
-
filters,
|
|
729
|
-
function_name: str = None,
|
|
730
|
-
execution_inputs: list = None,
|
|
731
|
-
wait=True
|
|
732
|
-
):
|
|
733
|
-
"""
|
|
734
|
-
Execute a function on an existing service
|
|
735
|
-
|
|
736
|
-
**Prerequisites**: You must be in the role of an *owner* or *developer*. You must have a service.
|
|
737
|
-
|
|
738
|
-
:param filters: Filters entity for a filtering before execute
|
|
739
|
-
:param str function_name: function name to run
|
|
740
|
-
:param List[FunctionIO] or dict execution_inputs: input dictionary or list of FunctionIO entities, that represent the extra inputs of the function
|
|
741
|
-
:param bool wait: wait until create task finish
|
|
742
|
-
:return: execution object
|
|
743
|
-
:rtype: dtlpy.entities.execution.Execution
|
|
744
|
-
|
|
745
|
-
**Example**:
|
|
746
|
-
|
|
747
|
-
.. code-block:: python
|
|
748
|
-
|
|
749
|
-
command = service.execute_batch(
|
|
750
|
-
execution_inputs=dl.FunctionIO(type=dl.PackageInputType.STRING, value='test', name='string'),
|
|
751
|
-
filters=dl.Filters(field='dir', values='/test', context={"datasets": [dataset.id]),
|
|
752
|
-
function_name='run')
|
|
753
|
-
"""
|
|
754
|
-
execution = self.executions.create_batch(service_id=self.id,
|
|
755
|
-
execution_inputs=execution_inputs,
|
|
756
|
-
filters=filters,
|
|
757
|
-
function_name=function_name,
|
|
758
|
-
wait=wait)
|
|
759
|
-
return execution
|
|
760
|
-
|
|
761
|
-
def rerun_batch(self,
|
|
762
|
-
filters,
|
|
763
|
-
wait=True
|
|
764
|
-
):
|
|
765
|
-
"""
|
|
766
|
-
rerun a executions on an existing service
|
|
767
|
-
|
|
768
|
-
**Prerequisites**: You must be in the role of an *owner* or *developer*. You must have a Filter.
|
|
769
|
-
|
|
770
|
-
:param filters: Filters entity for a filtering before rerun
|
|
771
|
-
:param bool wait: wait until create task finish
|
|
772
|
-
:return: rerun command
|
|
773
|
-
:rtype: dtlpy.entities.command.Command
|
|
774
|
-
|
|
775
|
-
**Example**:
|
|
776
|
-
|
|
777
|
-
.. code-block:: python
|
|
778
|
-
|
|
779
|
-
command = service.executions.rerun_batch(
|
|
780
|
-
filters=dl.Filters(field='id', values=['executionId'], operator=dl.FiltersOperations.IN, resource=dl.FiltersResource.EXECUTION))
|
|
781
|
-
"""
|
|
782
|
-
execution = self.executions.rerun_batch(service_id=self.id,
|
|
783
|
-
filters=filters,
|
|
784
|
-
wait=wait)
|
|
785
|
-
return execution
|
|
786
|
-
|
|
787
|
-
def activate_slots(
|
|
788
|
-
self,
|
|
789
|
-
project_id: str = None,
|
|
790
|
-
task_id: str = None,
|
|
791
|
-
dataset_id: str = None,
|
|
792
|
-
org_id: str = None,
|
|
793
|
-
user_email: str = None,
|
|
794
|
-
slots=None,
|
|
795
|
-
role=None,
|
|
796
|
-
prevent_override: bool = True,
|
|
797
|
-
visible: bool = True,
|
|
798
|
-
icon: str = 'fas fa-magic',
|
|
799
|
-
**kwargs
|
|
800
|
-
) -> object:
|
|
801
|
-
"""
|
|
802
|
-
Activate service slots
|
|
803
|
-
|
|
804
|
-
:param str project_id: project id
|
|
805
|
-
:param str task_id: task id
|
|
806
|
-
:param str dataset_id: dataset id
|
|
807
|
-
:param str org_id: org id
|
|
808
|
-
:param str user_email: user email
|
|
809
|
-
:param list slots: list of entities.PackageSlot
|
|
810
|
-
:param str role: user role MemberOrgRole.ADMIN, MemberOrgRole.owner, MemberOrgRole.MEMBER, MemberOrgRole.WORKER
|
|
811
|
-
:param bool prevent_override: True to prevent override
|
|
812
|
-
:param bool visible: visible
|
|
813
|
-
:param str icon: icon
|
|
814
|
-
:param kwargs: all additional arguments
|
|
815
|
-
:return: list of user setting for activated slots
|
|
816
|
-
:rtype: list
|
|
817
|
-
|
|
818
|
-
**Example**:
|
|
819
|
-
|
|
820
|
-
.. code-block:: python
|
|
821
|
-
|
|
822
|
-
setting = service.activate_slots(project_id='project_id',
|
|
823
|
-
slots=List[entities.PackageSlot],
|
|
824
|
-
icon='fas fa-magic')
|
|
825
|
-
"""
|
|
826
|
-
return self.services.activate_slots(
|
|
827
|
-
service=self,
|
|
828
|
-
project_id=project_id,
|
|
829
|
-
task_id=task_id,
|
|
830
|
-
dataset_id=dataset_id,
|
|
831
|
-
org_id=org_id,
|
|
832
|
-
user_email=user_email,
|
|
833
|
-
slots=slots,
|
|
834
|
-
role=role,
|
|
835
|
-
prevent_override=prevent_override,
|
|
836
|
-
visible=visible,
|
|
837
|
-
icon=icon,
|
|
838
|
-
**kwargs
|
|
839
|
-
)
|
|
840
|
-
|
|
841
|
-
def restart(self, replica_name: str = None):
|
|
842
|
-
"""
|
|
843
|
-
Restart service
|
|
844
|
-
|
|
845
|
-
:param str replica_name: replica name
|
|
846
|
-
:return: True
|
|
847
|
-
:rtype: bool
|
|
848
|
-
"""
|
|
849
|
-
return self.services.restart(service=self, replica_name=replica_name)
|
|
850
|
-
|
|
851
|
-
|
|
852
|
-
class KubernetesAutoscalerType(str, Enum):
|
|
853
|
-
""" The Service Autoscaler Type (RABBITMQ, CPU).
|
|
854
|
-
|
|
855
|
-
.. list-table::
|
|
856
|
-
:widths: 15 150
|
|
857
|
-
:header-rows: 1
|
|
858
|
-
|
|
859
|
-
* - State
|
|
860
|
-
- Description
|
|
861
|
-
* - RABBITMQ
|
|
862
|
-
- Service Autoscaler based on service queue length
|
|
863
|
-
* - CPU
|
|
864
|
-
- Service Autoscaler based on service CPU usage
|
|
865
|
-
* - RPS
|
|
866
|
-
- Service Autoscaler based on service RPS
|
|
867
|
-
"""
|
|
868
|
-
RABBITMQ = 'rabbitmq'
|
|
869
|
-
CPU = 'cpu'
|
|
870
|
-
RPS = 'rps'
|
|
871
|
-
|
|
872
|
-
|
|
873
|
-
# added this class to avoid breaking changes after fixing a spelling mistake in KubernetesAutoscalerType
|
|
874
|
-
class KubernetesAutuscalerTypeMeta(type):
|
|
875
|
-
def __getattribute__(cls, item):
|
|
876
|
-
if hasattr(KubernetesAutoscalerType, item):
|
|
877
|
-
return getattr(KubernetesAutoscalerType, item)
|
|
878
|
-
else:
|
|
879
|
-
raise AttributeError(f"KubernetesAutuscalerType has no attribute '{item}'")
|
|
880
|
-
|
|
881
|
-
|
|
882
|
-
class KubernetesAutoscaler(entities.BaseEntity):
|
|
883
|
-
MIN_REPLICA_DEFAULT = 0
|
|
884
|
-
MAX_REPLICA_DEFAULT = 1
|
|
885
|
-
AUTOSCALER_TYPE_DEFAULT = KubernetesAutoscalerType.RABBITMQ
|
|
886
|
-
|
|
887
|
-
def __init__(self,
|
|
888
|
-
autoscaler_type: KubernetesAutoscalerType.RABBITMQ = AUTOSCALER_TYPE_DEFAULT,
|
|
889
|
-
min_replicas=MIN_REPLICA_DEFAULT,
|
|
890
|
-
max_replicas=MAX_REPLICA_DEFAULT,
|
|
891
|
-
cooldown_period=None,
|
|
892
|
-
polling_interval=None,
|
|
893
|
-
**kwargs):
|
|
894
|
-
self.autoscaler_type = kwargs.get('type', autoscaler_type)
|
|
895
|
-
self.min_replicas = kwargs.get('minReplicas', min_replicas)
|
|
896
|
-
self.max_replicas = kwargs.get('maxReplicas', max_replicas)
|
|
897
|
-
self.cooldown_period = kwargs.get('cooldownPeriod', cooldown_period)
|
|
898
|
-
self.polling_interval = kwargs.get('pollingInterval', polling_interval)
|
|
899
|
-
|
|
900
|
-
def to_json(self):
|
|
901
|
-
_json = {
|
|
902
|
-
'type': self.autoscaler_type,
|
|
903
|
-
'minReplicas': self.min_replicas,
|
|
904
|
-
'maxReplicas': self.max_replicas
|
|
905
|
-
}
|
|
906
|
-
|
|
907
|
-
if self.cooldown_period is not None:
|
|
908
|
-
_json['cooldownPeriod'] = self.cooldown_period
|
|
909
|
-
|
|
910
|
-
if self.polling_interval is not None:
|
|
911
|
-
_json['pollingInterval'] = self.polling_interval
|
|
912
|
-
|
|
913
|
-
return _json
|
|
914
|
-
|
|
915
|
-
|
|
916
|
-
class KubernetesRabbitmqAutoscaler(KubernetesAutoscaler):
|
|
917
|
-
QUEUE_LENGTH_DEFAULT = 1000
|
|
918
|
-
|
|
919
|
-
def __init__(self,
|
|
920
|
-
min_replicas=KubernetesAutoscaler.MIN_REPLICA_DEFAULT,
|
|
921
|
-
max_replicas=KubernetesAutoscaler.MAX_REPLICA_DEFAULT,
|
|
922
|
-
queue_length=QUEUE_LENGTH_DEFAULT,
|
|
923
|
-
cooldown_period=None,
|
|
924
|
-
polling_interval=None,
|
|
925
|
-
**kwargs):
|
|
926
|
-
super().__init__(min_replicas=min_replicas,
|
|
927
|
-
max_replicas=max_replicas,
|
|
928
|
-
autoscaler_type=KubernetesAutoscalerType.RABBITMQ,
|
|
929
|
-
cooldown_period=cooldown_period,
|
|
930
|
-
polling_interval=polling_interval, **kwargs)
|
|
931
|
-
self.queue_length = kwargs.get('queueLength', queue_length)
|
|
932
|
-
|
|
933
|
-
def to_json(self):
|
|
934
|
-
_json = super().to_json()
|
|
935
|
-
_json['queueLength'] = self.queue_length
|
|
936
|
-
return _json
|
|
937
|
-
|
|
938
|
-
|
|
939
|
-
class KubernetesRPSAutoscaler(KubernetesAutoscaler):
|
|
940
|
-
THRESHOLD_DEFAULT = 10
|
|
941
|
-
RATE_SECONDS_DEFAULT = 30
|
|
942
|
-
|
|
943
|
-
def __init__(self,
|
|
944
|
-
min_replicas=KubernetesAutoscaler.MIN_REPLICA_DEFAULT,
|
|
945
|
-
max_replicas=KubernetesAutoscaler.MAX_REPLICA_DEFAULT,
|
|
946
|
-
threshold=THRESHOLD_DEFAULT,
|
|
947
|
-
rate_seconds=RATE_SECONDS_DEFAULT,
|
|
948
|
-
cooldown_period=None,
|
|
949
|
-
polling_interval=None,
|
|
950
|
-
**kwargs):
|
|
951
|
-
super().__init__(min_replicas=min_replicas,
|
|
952
|
-
max_replicas=max_replicas,
|
|
953
|
-
autoscaler_type=KubernetesAutoscalerType.RPS,
|
|
954
|
-
cooldown_period=cooldown_period,
|
|
955
|
-
polling_interval=polling_interval, **kwargs)
|
|
956
|
-
self.threshold = kwargs.get('threshold', threshold)
|
|
957
|
-
self.rate_seconds = kwargs.get('rateSeconds', rate_seconds)
|
|
958
|
-
|
|
959
|
-
def to_json(self):
|
|
960
|
-
_json = super().to_json()
|
|
961
|
-
_json['rateSeconds'] = self.rate_seconds
|
|
962
|
-
_json['threshold'] = self.threshold
|
|
963
|
-
return _json
|
|
1
|
+
import warnings
|
|
2
|
+
from collections import namedtuple
|
|
3
|
+
from enum import Enum
|
|
4
|
+
import traceback
|
|
5
|
+
import logging
|
|
6
|
+
from typing import List
|
|
7
|
+
from urllib.parse import urlsplit
|
|
8
|
+
import attr
|
|
9
|
+
from .. import repositories, entities
|
|
10
|
+
from ..services.api_client import ApiClient
|
|
11
|
+
|
|
12
|
+
logger = logging.getLogger(name='dtlpy')
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class ServiceType(str, Enum):
|
|
16
|
+
""" The type of the service (SYSTEM).
|
|
17
|
+
|
|
18
|
+
.. list-table::
|
|
19
|
+
:widths: 15 150
|
|
20
|
+
:header-rows: 1
|
|
21
|
+
|
|
22
|
+
* - State
|
|
23
|
+
- Description
|
|
24
|
+
* - SYSTEM
|
|
25
|
+
- Dataloop internal service
|
|
26
|
+
"""
|
|
27
|
+
SYSTEM = 'system'
|
|
28
|
+
REGULAR = 'regular'
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
class ServiceModeType(str, Enum):
|
|
32
|
+
""" The type of the service mode.
|
|
33
|
+
|
|
34
|
+
.. list-table::
|
|
35
|
+
:widths: 15 150
|
|
36
|
+
:header-rows: 1
|
|
37
|
+
|
|
38
|
+
* - State
|
|
39
|
+
- Description
|
|
40
|
+
* - REGULAR
|
|
41
|
+
- Service regular mode type
|
|
42
|
+
* - DEBUG
|
|
43
|
+
- Service debug mode type
|
|
44
|
+
"""
|
|
45
|
+
REGULAR = 'regular'
|
|
46
|
+
DEBUG = 'debug'
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
class OnResetAction(str, Enum):
|
|
50
|
+
""" The Execution action when the service reset (RERUN, FAILED).
|
|
51
|
+
|
|
52
|
+
.. list-table::
|
|
53
|
+
:widths: 15 150
|
|
54
|
+
:header-rows: 1
|
|
55
|
+
|
|
56
|
+
* - State
|
|
57
|
+
- Description
|
|
58
|
+
* - RERUN
|
|
59
|
+
- When the service resting rerun the execution
|
|
60
|
+
* - FAILED
|
|
61
|
+
- When the service resting fail the execution
|
|
62
|
+
"""
|
|
63
|
+
RERUN = 'rerun'
|
|
64
|
+
FAILED = 'failed'
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
class InstanceCatalog(str, Enum):
|
|
68
|
+
""" The Service Pode size.
|
|
69
|
+
|
|
70
|
+
.. list-table::
|
|
71
|
+
:widths: 15 150
|
|
72
|
+
:header-rows: 1
|
|
73
|
+
|
|
74
|
+
* - State
|
|
75
|
+
- Description
|
|
76
|
+
* - REGULAR_XS
|
|
77
|
+
- regular pod with extra small size
|
|
78
|
+
* - REGULAR_S
|
|
79
|
+
- regular pod with small size
|
|
80
|
+
* - REGULAR_M
|
|
81
|
+
- regular pod with medium size
|
|
82
|
+
* - REGULAR_L
|
|
83
|
+
- regular pod with large size
|
|
84
|
+
* - HIGHMEM_XS
|
|
85
|
+
- highmem pod with extra small size
|
|
86
|
+
* - HIGHMEM_S
|
|
87
|
+
- highmem pod with small size
|
|
88
|
+
* - HIGHMEM_M
|
|
89
|
+
- highmem pod with medium size
|
|
90
|
+
* - HIGHMEM_L
|
|
91
|
+
- highmem pod with large size
|
|
92
|
+
* - GPU_T4_S
|
|
93
|
+
- GPU NVIDIA T4 pod with regular memory
|
|
94
|
+
* - GPU_T4_M
|
|
95
|
+
- GPU NVIDIA T4 pod with highmem
|
|
96
|
+
"""
|
|
97
|
+
REGULAR_XS = "regular-xs"
|
|
98
|
+
REGULAR_S = "regular-s"
|
|
99
|
+
REGULAR_M = "regular-m"
|
|
100
|
+
REGULAR_L = "regular-l"
|
|
101
|
+
HIGHMEM_XS = "highmem-xs"
|
|
102
|
+
HIGHMEM_S = "highmem-s"
|
|
103
|
+
HIGHMEM_M = "highmem-m"
|
|
104
|
+
HIGHMEM_L = "highmem-l"
|
|
105
|
+
GPU_T4_S = "gpu-t4"
|
|
106
|
+
GPU_T4_M = "gpu-t4-m"
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
class RuntimeType(str, Enum):
|
|
110
|
+
""" Service culture Runtime (KUBERNETES).
|
|
111
|
+
|
|
112
|
+
.. list-table::
|
|
113
|
+
:widths: 15 150
|
|
114
|
+
:header-rows: 1
|
|
115
|
+
|
|
116
|
+
* - State
|
|
117
|
+
- Description
|
|
118
|
+
* - KUBERNETES
|
|
119
|
+
- Service run in kubernetes culture
|
|
120
|
+
"""
|
|
121
|
+
KUBERNETES = 'kubernetes'
|
|
122
|
+
|
|
123
|
+
|
|
124
|
+
class ServiceRuntime(entities.BaseEntity):
|
|
125
|
+
def __init__(self, service_type: RuntimeType = RuntimeType.KUBERNETES):
|
|
126
|
+
self.service_type = service_type
|
|
127
|
+
|
|
128
|
+
|
|
129
|
+
class KubernetesRuntime(ServiceRuntime):
|
|
130
|
+
DEFAULT_POD_TYPE = InstanceCatalog.REGULAR_S
|
|
131
|
+
DEFAULT_NUM_REPLICAS = 1
|
|
132
|
+
DEFAULT_CONCURRENCY = 10
|
|
133
|
+
|
|
134
|
+
def __init__(self,
|
|
135
|
+
pod_type: InstanceCatalog = DEFAULT_POD_TYPE,
|
|
136
|
+
num_replicas=DEFAULT_NUM_REPLICAS,
|
|
137
|
+
concurrency=DEFAULT_CONCURRENCY,
|
|
138
|
+
dynamic_concurrency=None,
|
|
139
|
+
runner_image=None,
|
|
140
|
+
autoscaler=None,
|
|
141
|
+
**kwargs):
|
|
142
|
+
|
|
143
|
+
super().__init__(service_type=RuntimeType.KUBERNETES)
|
|
144
|
+
self.pod_type = kwargs.get('podType', pod_type)
|
|
145
|
+
self.num_replicas = kwargs.get('numReplicas', num_replicas)
|
|
146
|
+
self.concurrency = kwargs.get('concurrency', concurrency)
|
|
147
|
+
self.runner_image = kwargs.get('runnerImage', runner_image)
|
|
148
|
+
self._proxy_image = kwargs.get('proxyImage', None)
|
|
149
|
+
self.single_agent = kwargs.get('singleAgent', None)
|
|
150
|
+
self.preemptible = kwargs.get('preemptible', None)
|
|
151
|
+
self.dynamic_concurrency = kwargs.get('dynamicConcurrency', dynamic_concurrency)
|
|
152
|
+
|
|
153
|
+
self.autoscaler = kwargs.get('autoscaler', autoscaler)
|
|
154
|
+
if self.autoscaler is not None and isinstance(self.autoscaler, dict):
|
|
155
|
+
if self.autoscaler['type'] == KubernetesAutoscalerType.RABBITMQ:
|
|
156
|
+
self.autoscaler = KubernetesRabbitmqAutoscaler(**self.autoscaler)
|
|
157
|
+
elif self.autoscaler['type'] == KubernetesAutoscalerType.RPS:
|
|
158
|
+
self.autoscaler = KubernetesRPSAutoscaler(**self.autoscaler)
|
|
159
|
+
else:
|
|
160
|
+
raise NotImplementedError(
|
|
161
|
+
'Unknown kubernetes autoscaler type: {}'.format(self.autoscaler['type']))
|
|
162
|
+
|
|
163
|
+
def to_json(self):
|
|
164
|
+
_json = {
|
|
165
|
+
'podType': self.pod_type,
|
|
166
|
+
'numReplicas': self.num_replicas,
|
|
167
|
+
'concurrency': self.concurrency,
|
|
168
|
+
'autoscaler': None if self.autoscaler is None else self.autoscaler.to_json()
|
|
169
|
+
}
|
|
170
|
+
|
|
171
|
+
if self.single_agent is not None:
|
|
172
|
+
_json['singleAgent'] = self.single_agent
|
|
173
|
+
|
|
174
|
+
if self.runner_image is not None:
|
|
175
|
+
_json['runnerImage'] = self.runner_image
|
|
176
|
+
|
|
177
|
+
if self._proxy_image is not None:
|
|
178
|
+
_json['proxyImage'] = self._proxy_image
|
|
179
|
+
|
|
180
|
+
if self.preemptible is not None:
|
|
181
|
+
_json['preemptible'] = self.preemptible
|
|
182
|
+
|
|
183
|
+
if self.dynamic_concurrency is not None:
|
|
184
|
+
_json['dynamicConcurrency'] = self.dynamic_concurrency
|
|
185
|
+
|
|
186
|
+
return _json
|
|
187
|
+
|
|
188
|
+
|
|
189
|
+
@attr.s
|
|
190
|
+
class Service(entities.BaseEntity):
|
|
191
|
+
"""
|
|
192
|
+
Service object
|
|
193
|
+
"""
|
|
194
|
+
# platform
|
|
195
|
+
created_at = attr.ib()
|
|
196
|
+
updated_at = attr.ib(repr=False)
|
|
197
|
+
creator = attr.ib()
|
|
198
|
+
version = attr.ib()
|
|
199
|
+
|
|
200
|
+
package_id = attr.ib()
|
|
201
|
+
package_revision = attr.ib()
|
|
202
|
+
|
|
203
|
+
bot = attr.ib()
|
|
204
|
+
use_user_jwt = attr.ib(repr=False)
|
|
205
|
+
init_input = attr.ib()
|
|
206
|
+
versions = attr.ib(repr=False)
|
|
207
|
+
module_name = attr.ib()
|
|
208
|
+
name = attr.ib()
|
|
209
|
+
url = attr.ib()
|
|
210
|
+
id = attr.ib()
|
|
211
|
+
active = attr.ib()
|
|
212
|
+
driver_id = attr.ib(repr=False)
|
|
213
|
+
secrets = attr.ib(repr=False)
|
|
214
|
+
|
|
215
|
+
# name change
|
|
216
|
+
runtime = attr.ib(repr=False, type=KubernetesRuntime)
|
|
217
|
+
queue_length_limit = attr.ib()
|
|
218
|
+
run_execution_as_process = attr.ib(type=bool)
|
|
219
|
+
execution_timeout = attr.ib()
|
|
220
|
+
drain_time = attr.ib()
|
|
221
|
+
on_reset = attr.ib(type=OnResetAction)
|
|
222
|
+
_type = attr.ib(type=ServiceType)
|
|
223
|
+
project_id = attr.ib()
|
|
224
|
+
org_id = attr.ib()
|
|
225
|
+
is_global = attr.ib()
|
|
226
|
+
max_attempts = attr.ib()
|
|
227
|
+
mode = attr.ib(repr=False)
|
|
228
|
+
metadata = attr.ib()
|
|
229
|
+
archive = attr.ib(repr=False)
|
|
230
|
+
config = attr.ib(repr=False)
|
|
231
|
+
settings = attr.ib(repr=False)
|
|
232
|
+
panels = attr.ib(repr=False)
|
|
233
|
+
|
|
234
|
+
# SDK
|
|
235
|
+
_package = attr.ib(repr=False)
|
|
236
|
+
_client_api = attr.ib(type=ApiClient, repr=False)
|
|
237
|
+
_revisions = attr.ib(default=None, repr=False)
|
|
238
|
+
# repositories
|
|
239
|
+
_project = attr.ib(default=None, repr=False)
|
|
240
|
+
_repositories = attr.ib(repr=False)
|
|
241
|
+
updated_by = attr.ib(default=None)
|
|
242
|
+
app = attr.ib(default=None)
|
|
243
|
+
integrations = attr.ib(default=None)
|
|
244
|
+
|
|
245
|
+
@property
|
|
246
|
+
def createdAt(self):
|
|
247
|
+
return self.created_at
|
|
248
|
+
|
|
249
|
+
@property
|
|
250
|
+
def updatedAt(self):
|
|
251
|
+
return self.updated_at
|
|
252
|
+
|
|
253
|
+
@staticmethod
|
|
254
|
+
def _protected_from_json(_json: dict, client_api: ApiClient, package=None, project=None, is_fetched=True):
|
|
255
|
+
"""
|
|
256
|
+
Same as from_json but with try-except to catch if error
|
|
257
|
+
|
|
258
|
+
:param _json: platform json
|
|
259
|
+
:param client_api: ApiClient entity
|
|
260
|
+
:param package:
|
|
261
|
+
:param project: project entity
|
|
262
|
+
:param is_fetched: is Entity fetched from Platform
|
|
263
|
+
:return:
|
|
264
|
+
"""
|
|
265
|
+
try:
|
|
266
|
+
service = Service.from_json(_json=_json,
|
|
267
|
+
client_api=client_api,
|
|
268
|
+
package=package,
|
|
269
|
+
project=project,
|
|
270
|
+
is_fetched=is_fetched)
|
|
271
|
+
status = True
|
|
272
|
+
except Exception:
|
|
273
|
+
service = traceback.format_exc()
|
|
274
|
+
status = False
|
|
275
|
+
return status, service
|
|
276
|
+
|
|
277
|
+
@classmethod
|
|
278
|
+
def from_json(cls, _json: dict, client_api: ApiClient = None, package=None, project=None, is_fetched=True):
|
|
279
|
+
"""
|
|
280
|
+
Build a service entity object from a json
|
|
281
|
+
|
|
282
|
+
:param dict _json: platform json
|
|
283
|
+
:param dl.ApiClient client_api: ApiClient entity
|
|
284
|
+
:param dtlpy.entities.package.Package package: package entity
|
|
285
|
+
:param dtlpy.entities.project.Project project: project entity
|
|
286
|
+
:param bool is_fetched: is Entity fetched from Platform
|
|
287
|
+
:return: service object
|
|
288
|
+
:rtype: dtlpy.entities.service.Service
|
|
289
|
+
"""
|
|
290
|
+
if project is not None:
|
|
291
|
+
if project.id != _json.get('projectId', None):
|
|
292
|
+
logger.warning('Service has been fetched from a project that is not belong to it')
|
|
293
|
+
project = None
|
|
294
|
+
|
|
295
|
+
if package is not None:
|
|
296
|
+
if package.id != _json.get('packageId', None):
|
|
297
|
+
logger.warning('Service has been fetched from a package that is not belong to it')
|
|
298
|
+
package = None
|
|
299
|
+
|
|
300
|
+
versions = _json.get('versions', dict())
|
|
301
|
+
runtime = _json.get("runtime", None)
|
|
302
|
+
if runtime:
|
|
303
|
+
runtime = KubernetesRuntime(**runtime)
|
|
304
|
+
|
|
305
|
+
inst = cls(
|
|
306
|
+
package_revision=_json.get("packageRevision", None),
|
|
307
|
+
bot=_json.get("botUserName", None),
|
|
308
|
+
use_user_jwt=_json.get("useUserJwt", False),
|
|
309
|
+
created_at=_json.get("createdAt", None),
|
|
310
|
+
updated_at=_json.get("updatedAt", None),
|
|
311
|
+
project_id=_json.get('projectId', None),
|
|
312
|
+
package_id=_json.get('packageId', None),
|
|
313
|
+
driver_id=_json.get('driverId', None),
|
|
314
|
+
max_attempts=_json.get('maxAttempts', None),
|
|
315
|
+
version=_json.get('version', None),
|
|
316
|
+
creator=_json.get('creator', None),
|
|
317
|
+
revisions=_json.get('revisions', None),
|
|
318
|
+
queue_length_limit=_json.get('queueLengthLimit', None),
|
|
319
|
+
active=_json.get('active', None),
|
|
320
|
+
runtime=runtime,
|
|
321
|
+
is_global=_json.get("global", False),
|
|
322
|
+
init_input=_json.get("initParams", dict()),
|
|
323
|
+
module_name=_json.get("moduleName", None),
|
|
324
|
+
run_execution_as_process=_json.get('runExecutionAsProcess', False),
|
|
325
|
+
execution_timeout=_json.get('executionTimeout', 60 * 60),
|
|
326
|
+
drain_time=_json.get('drainTime', 60 * 10),
|
|
327
|
+
on_reset=_json.get('onReset', OnResetAction.FAILED),
|
|
328
|
+
name=_json.get("name", None),
|
|
329
|
+
url=_json.get("url", None),
|
|
330
|
+
id=_json.get("id", None),
|
|
331
|
+
versions=versions,
|
|
332
|
+
client_api=client_api,
|
|
333
|
+
package=package,
|
|
334
|
+
project=project,
|
|
335
|
+
secrets=_json.get("secrets", None),
|
|
336
|
+
type=_json.get("type", None),
|
|
337
|
+
mode=_json.get('mode', dict()),
|
|
338
|
+
metadata=_json.get('metadata', None),
|
|
339
|
+
archive=_json.get('archive', None),
|
|
340
|
+
updated_by=_json.get('updatedBy', None),
|
|
341
|
+
config=_json.get('config', None),
|
|
342
|
+
settings=_json.get('settings', None),
|
|
343
|
+
app=_json.get('app', None),
|
|
344
|
+
integrations=_json.get('integrations', None),
|
|
345
|
+
org_id=_json.get('orgId', None),
|
|
346
|
+
panels=_json.get('panels', None)
|
|
347
|
+
)
|
|
348
|
+
inst.is_fetched = is_fetched
|
|
349
|
+
return inst
|
|
350
|
+
|
|
351
|
+
############
|
|
352
|
+
# Entities #
|
|
353
|
+
############
|
|
354
|
+
@property
|
|
355
|
+
def revisions(self):
|
|
356
|
+
if self._revisions is None:
|
|
357
|
+
self._revisions = self.services.revisions(service=self)
|
|
358
|
+
return self._revisions
|
|
359
|
+
|
|
360
|
+
@property
|
|
361
|
+
def platform_url(self):
|
|
362
|
+
return self._client_api._get_resource_url("projects/{}/services/{}/main".format(self.project.id, self.id))
|
|
363
|
+
|
|
364
|
+
@property
|
|
365
|
+
def project(self):
|
|
366
|
+
if self._project is None:
|
|
367
|
+
self._project = repositories.Projects(client_api=self._client_api).get(project_id=self.project_id,
|
|
368
|
+
fetch=None)
|
|
369
|
+
assert isinstance(self._project, entities.Project)
|
|
370
|
+
return self._project
|
|
371
|
+
|
|
372
|
+
@property
|
|
373
|
+
def package(self):
|
|
374
|
+
if self._package is None:
|
|
375
|
+
try:
|
|
376
|
+
dpk_id = None
|
|
377
|
+
dpk_version = None
|
|
378
|
+
if self.app and isinstance(self.app, dict):
|
|
379
|
+
dpk_id = self.app.get('dpkId', None)
|
|
380
|
+
dpk_version = self.app.get('dpkVersion', None)
|
|
381
|
+
if dpk_id is None:
|
|
382
|
+
self._package = repositories.Dpks(client_api=self._client_api, project=self.project).get(
|
|
383
|
+
dpk_id=self.package_id)
|
|
384
|
+
else:
|
|
385
|
+
self._package = repositories.Dpks(client_api=self._client_api, project=self.project).get_revisions(
|
|
386
|
+
dpk_id=dpk_id,
|
|
387
|
+
version=dpk_version)
|
|
388
|
+
|
|
389
|
+
assert isinstance(self._package, entities.Dpk)
|
|
390
|
+
except:
|
|
391
|
+
self._package = repositories.Packages(client_api=self._client_api).get(package_id=self.package_id,
|
|
392
|
+
fetch=None,
|
|
393
|
+
log_error=False)
|
|
394
|
+
assert isinstance(self._package, entities.Package)
|
|
395
|
+
return self._package
|
|
396
|
+
|
|
397
|
+
@property
|
|
398
|
+
def execution_url(self):
|
|
399
|
+
return 'CURL -X POST' \
|
|
400
|
+
'\nauthorization: Bearer <token>' \
|
|
401
|
+
'\nContent-Type: application/json" -d {' \
|
|
402
|
+
'\n"input": {<input json>}, ' \
|
|
403
|
+
'"projectId": "{<project_id>}", ' \
|
|
404
|
+
'"functionName": "<function_name>"}'
|
|
405
|
+
|
|
406
|
+
################
|
|
407
|
+
# repositories #
|
|
408
|
+
################
|
|
409
|
+
@_repositories.default
|
|
410
|
+
def set_repositories(self):
|
|
411
|
+
reps = namedtuple('repositories',
|
|
412
|
+
field_names=['executions', 'services', 'triggers'])
|
|
413
|
+
|
|
414
|
+
if self._package is None:
|
|
415
|
+
services_repo = repositories.Services(client_api=self._client_api,
|
|
416
|
+
package=self._package,
|
|
417
|
+
project=self._project)
|
|
418
|
+
else:
|
|
419
|
+
services_repo = self._package.services
|
|
420
|
+
|
|
421
|
+
triggers = repositories.Triggers(client_api=self._client_api,
|
|
422
|
+
project=self._project,
|
|
423
|
+
service=self)
|
|
424
|
+
|
|
425
|
+
r = reps(executions=repositories.Executions(client_api=self._client_api, service=self),
|
|
426
|
+
services=services_repo, triggers=triggers)
|
|
427
|
+
return r
|
|
428
|
+
|
|
429
|
+
@property
|
|
430
|
+
def executions(self):
|
|
431
|
+
assert isinstance(self._repositories.executions, repositories.Executions)
|
|
432
|
+
return self._repositories.executions
|
|
433
|
+
|
|
434
|
+
@property
|
|
435
|
+
def triggers(self):
|
|
436
|
+
assert isinstance(self._repositories.triggers, repositories.Triggers)
|
|
437
|
+
return self._repositories.triggers
|
|
438
|
+
|
|
439
|
+
@property
|
|
440
|
+
def services(self):
|
|
441
|
+
assert isinstance(self._repositories.services, repositories.Services)
|
|
442
|
+
return self._repositories.services
|
|
443
|
+
|
|
444
|
+
###########
|
|
445
|
+
# methods #
|
|
446
|
+
###########
|
|
447
|
+
def to_json(self):
|
|
448
|
+
"""
|
|
449
|
+
Returns platform _json format of object
|
|
450
|
+
|
|
451
|
+
:return: platform json format of object
|
|
452
|
+
:rtype: dict
|
|
453
|
+
"""
|
|
454
|
+
_json = attr.asdict(
|
|
455
|
+
self,
|
|
456
|
+
filter=attr.filters.exclude(
|
|
457
|
+
attr.fields(Service)._project,
|
|
458
|
+
attr.fields(Service)._package,
|
|
459
|
+
attr.fields(Service)._revisions,
|
|
460
|
+
attr.fields(Service)._client_api,
|
|
461
|
+
attr.fields(Service)._repositories,
|
|
462
|
+
attr.fields(Service).project_id,
|
|
463
|
+
attr.fields(Service).init_input,
|
|
464
|
+
attr.fields(Service).module_name,
|
|
465
|
+
attr.fields(Service).bot,
|
|
466
|
+
attr.fields(Service).package_id,
|
|
467
|
+
attr.fields(Service).is_global,
|
|
468
|
+
attr.fields(Service).use_user_jwt,
|
|
469
|
+
attr.fields(Service).package_revision,
|
|
470
|
+
attr.fields(Service).driver_id,
|
|
471
|
+
attr.fields(Service).run_execution_as_process,
|
|
472
|
+
attr.fields(Service).execution_timeout,
|
|
473
|
+
attr.fields(Service).drain_time,
|
|
474
|
+
attr.fields(Service).runtime,
|
|
475
|
+
attr.fields(Service).queue_length_limit,
|
|
476
|
+
attr.fields(Service).max_attempts,
|
|
477
|
+
attr.fields(Service).on_reset,
|
|
478
|
+
attr.fields(Service).created_at,
|
|
479
|
+
attr.fields(Service).updated_at,
|
|
480
|
+
attr.fields(Service).secrets,
|
|
481
|
+
attr.fields(Service)._type,
|
|
482
|
+
attr.fields(Service).mode,
|
|
483
|
+
attr.fields(Service).metadata,
|
|
484
|
+
attr.fields(Service).archive,
|
|
485
|
+
attr.fields(Service).updated_by,
|
|
486
|
+
attr.fields(Service).config,
|
|
487
|
+
attr.fields(Service).settings,
|
|
488
|
+
attr.fields(Service).app,
|
|
489
|
+
attr.fields(Service).integrations,
|
|
490
|
+
attr.fields(Service).org_id,
|
|
491
|
+
attr.fields(Service).panels
|
|
492
|
+
)
|
|
493
|
+
)
|
|
494
|
+
|
|
495
|
+
_json['projectId'] = self.project_id
|
|
496
|
+
_json['orgId'] = self.org_id
|
|
497
|
+
_json['packageId'] = self.package_id
|
|
498
|
+
_json['initParams'] = self.init_input
|
|
499
|
+
_json['moduleName'] = self.module_name
|
|
500
|
+
_json['botUserName'] = self.bot
|
|
501
|
+
_json['useUserJwt'] = self.use_user_jwt
|
|
502
|
+
_json['global'] = self.is_global
|
|
503
|
+
_json['driverId'] = self.driver_id
|
|
504
|
+
_json['packageRevision'] = self.package_revision
|
|
505
|
+
_json['runExecutionAsProcess'] = self.run_execution_as_process
|
|
506
|
+
_json['executionTimeout'] = self.execution_timeout
|
|
507
|
+
_json['drainTime'] = self.drain_time
|
|
508
|
+
_json['onReset'] = self.on_reset
|
|
509
|
+
_json['createdAt'] = self.created_at
|
|
510
|
+
_json['updatedAt'] = self.updated_at
|
|
511
|
+
|
|
512
|
+
if self.updated_by is not None:
|
|
513
|
+
_json['updatedBy'] = self.updated_by
|
|
514
|
+
|
|
515
|
+
if self.panels is not None:
|
|
516
|
+
_json['panels'] = self.panels
|
|
517
|
+
|
|
518
|
+
if self.max_attempts is not None:
|
|
519
|
+
_json['maxAttempts'] = self.max_attempts
|
|
520
|
+
|
|
521
|
+
if self.is_global is not None:
|
|
522
|
+
_json['global'] = self.is_global
|
|
523
|
+
|
|
524
|
+
if self.runtime:
|
|
525
|
+
_json['runtime'] = self.runtime if isinstance(self.runtime, dict) else self.runtime.to_json()
|
|
526
|
+
|
|
527
|
+
if self.queue_length_limit is not None:
|
|
528
|
+
_json['queueLengthLimit'] = self.queue_length_limit
|
|
529
|
+
|
|
530
|
+
if self.secrets is not None:
|
|
531
|
+
_json['secrets'] = self.secrets
|
|
532
|
+
|
|
533
|
+
if self._type is not None:
|
|
534
|
+
_json['type'] = self._type
|
|
535
|
+
|
|
536
|
+
if self.mode:
|
|
537
|
+
_json['mode'] = self.mode
|
|
538
|
+
|
|
539
|
+
if self.metadata:
|
|
540
|
+
_json['metadata'] = self.metadata
|
|
541
|
+
|
|
542
|
+
if self.archive is not None:
|
|
543
|
+
_json['archive'] = self.archive
|
|
544
|
+
|
|
545
|
+
if self.config is not None:
|
|
546
|
+
_json['config'] = self.config
|
|
547
|
+
|
|
548
|
+
if self.settings is not None:
|
|
549
|
+
_json['settings'] = self.settings
|
|
550
|
+
|
|
551
|
+
if self.app is not None:
|
|
552
|
+
_json['app'] = self.app
|
|
553
|
+
|
|
554
|
+
if self.integrations is not None:
|
|
555
|
+
_json['integrations'] = self.integrations
|
|
556
|
+
|
|
557
|
+
return _json
|
|
558
|
+
|
|
559
|
+
def update(self, force=False):
|
|
560
|
+
"""
|
|
561
|
+
Update Service changes to platform
|
|
562
|
+
|
|
563
|
+
:param bool force: force update
|
|
564
|
+
:return: Service entity
|
|
565
|
+
:rtype: dtlpy.entities.service.Service
|
|
566
|
+
"""
|
|
567
|
+
return self.services.update(service=self, force=force)
|
|
568
|
+
|
|
569
|
+
def delete(self, force: bool = False):
|
|
570
|
+
"""
|
|
571
|
+
Delete Service object
|
|
572
|
+
|
|
573
|
+
:return: True
|
|
574
|
+
:rtype: bool
|
|
575
|
+
"""
|
|
576
|
+
return self.services.delete(service_id=self.id, force=force)
|
|
577
|
+
|
|
578
|
+
def status(self):
|
|
579
|
+
"""
|
|
580
|
+
Get Service status
|
|
581
|
+
|
|
582
|
+
:return: status json
|
|
583
|
+
:rtype: dict
|
|
584
|
+
"""
|
|
585
|
+
return self.services.status(service_id=self.id)
|
|
586
|
+
|
|
587
|
+
def log(self,
|
|
588
|
+
size=None,
|
|
589
|
+
checkpoint=None,
|
|
590
|
+
start=None,
|
|
591
|
+
end=None,
|
|
592
|
+
follow=False,
|
|
593
|
+
text=None,
|
|
594
|
+
execution_id=None,
|
|
595
|
+
function_name=None,
|
|
596
|
+
replica_id=None,
|
|
597
|
+
system=False,
|
|
598
|
+
view=True,
|
|
599
|
+
until_completed=True,
|
|
600
|
+
model_id: str = None,
|
|
601
|
+
model_operation: str = None,
|
|
602
|
+
):
|
|
603
|
+
"""
|
|
604
|
+
Get service logs
|
|
605
|
+
|
|
606
|
+
:param int size: size
|
|
607
|
+
:param dict checkpoint: the information from the lst point checked in the service
|
|
608
|
+
:param str start: iso format time
|
|
609
|
+
:param str end: iso format time
|
|
610
|
+
:param bool follow: if true, keep stream future logs
|
|
611
|
+
:param str text: text
|
|
612
|
+
:param str execution_id: execution id
|
|
613
|
+
:param str function_name: function name
|
|
614
|
+
:param str replica_id: replica id
|
|
615
|
+
:param bool system: system
|
|
616
|
+
:param bool view: if true, print out all the logs
|
|
617
|
+
:param bool until_completed: wait until completed
|
|
618
|
+
:param str model_id: model id
|
|
619
|
+
:param str model_operation: model operation action
|
|
620
|
+
:return: ServiceLog entity
|
|
621
|
+
:rtype: ServiceLog
|
|
622
|
+
|
|
623
|
+
**Example**:
|
|
624
|
+
|
|
625
|
+
.. code-block:: python
|
|
626
|
+
|
|
627
|
+
service_log = service.log()
|
|
628
|
+
"""
|
|
629
|
+
return self.services.log(service=self,
|
|
630
|
+
size=size,
|
|
631
|
+
checkpoint=checkpoint,
|
|
632
|
+
start=start,
|
|
633
|
+
end=end,
|
|
634
|
+
follow=follow,
|
|
635
|
+
execution_id=execution_id,
|
|
636
|
+
function_name=function_name,
|
|
637
|
+
replica_id=replica_id,
|
|
638
|
+
system=system,
|
|
639
|
+
text=text,
|
|
640
|
+
view=view,
|
|
641
|
+
until_completed=until_completed,
|
|
642
|
+
model_id=model_id,
|
|
643
|
+
model_operation=model_operation)
|
|
644
|
+
|
|
645
|
+
def open_in_web(self):
|
|
646
|
+
"""
|
|
647
|
+
Open the service in web platform
|
|
648
|
+
|
|
649
|
+
:return:
|
|
650
|
+
"""
|
|
651
|
+
parsed_url = urlsplit(self.platform_url)
|
|
652
|
+
base_url = parsed_url.scheme + "://" + parsed_url.netloc
|
|
653
|
+
url = '{}/projects/{}/services/{}'.format(base_url, self.project_id, self.id)
|
|
654
|
+
self._client_api._open_in_web(url=url)
|
|
655
|
+
|
|
656
|
+
def checkout(self):
|
|
657
|
+
"""
|
|
658
|
+
Checkout
|
|
659
|
+
|
|
660
|
+
:return:
|
|
661
|
+
"""
|
|
662
|
+
return self.services.checkout(service=self)
|
|
663
|
+
|
|
664
|
+
def pause(self):
|
|
665
|
+
"""
|
|
666
|
+
pause
|
|
667
|
+
|
|
668
|
+
:return:
|
|
669
|
+
"""
|
|
670
|
+
return self.services.pause(service_id=self.id)
|
|
671
|
+
|
|
672
|
+
def resume(self):
|
|
673
|
+
"""
|
|
674
|
+
resume
|
|
675
|
+
|
|
676
|
+
:return:
|
|
677
|
+
"""
|
|
678
|
+
return self.services.resume(service_id=self.id)
|
|
679
|
+
|
|
680
|
+
def execute(
|
|
681
|
+
self,
|
|
682
|
+
execution_input=None,
|
|
683
|
+
function_name=None,
|
|
684
|
+
resource=None,
|
|
685
|
+
item_id=None,
|
|
686
|
+
dataset_id=None,
|
|
687
|
+
annotation_id=None,
|
|
688
|
+
project_id=None,
|
|
689
|
+
sync=False,
|
|
690
|
+
stream_logs=True,
|
|
691
|
+
return_output=True
|
|
692
|
+
):
|
|
693
|
+
"""
|
|
694
|
+
Execute a function on an existing service
|
|
695
|
+
|
|
696
|
+
:param List[FunctionIO] or dict execution_input: input dictionary or list of FunctionIO entities
|
|
697
|
+
:param str function_name: function name to run
|
|
698
|
+
:param str resource: input type.
|
|
699
|
+
:param str item_id: optional - item id as input to function
|
|
700
|
+
:param str dataset_id: optional - dataset id as input to function
|
|
701
|
+
:param str annotation_id: optional - annotation id as input to function
|
|
702
|
+
:param str project_id: resource's project
|
|
703
|
+
:param bool sync: if true, wait for function to end
|
|
704
|
+
:param bool stream_logs: prints logs of the new execution. only works with sync=True
|
|
705
|
+
:param bool return_output: if True and sync is True - will return the output directly
|
|
706
|
+
:return: execution object
|
|
707
|
+
:rtype: dtlpy.entities.execution.Execution
|
|
708
|
+
|
|
709
|
+
**Example**:
|
|
710
|
+
|
|
711
|
+
.. code-block:: python
|
|
712
|
+
|
|
713
|
+
execution = service.execute(function_name='function_name', item_id='item_id', project_id='project_id')
|
|
714
|
+
"""
|
|
715
|
+
execution = self.executions.create(sync=sync,
|
|
716
|
+
execution_input=execution_input,
|
|
717
|
+
function_name=function_name,
|
|
718
|
+
resource=resource,
|
|
719
|
+
item_id=item_id,
|
|
720
|
+
dataset_id=dataset_id,
|
|
721
|
+
annotation_id=annotation_id,
|
|
722
|
+
stream_logs=stream_logs,
|
|
723
|
+
project_id=project_id,
|
|
724
|
+
return_output=return_output)
|
|
725
|
+
return execution
|
|
726
|
+
|
|
727
|
+
def execute_batch(self,
|
|
728
|
+
filters,
|
|
729
|
+
function_name: str = None,
|
|
730
|
+
execution_inputs: list = None,
|
|
731
|
+
wait=True
|
|
732
|
+
):
|
|
733
|
+
"""
|
|
734
|
+
Execute a function on an existing service
|
|
735
|
+
|
|
736
|
+
**Prerequisites**: You must be in the role of an *owner* or *developer*. You must have a service.
|
|
737
|
+
|
|
738
|
+
:param filters: Filters entity for a filtering before execute
|
|
739
|
+
:param str function_name: function name to run
|
|
740
|
+
:param List[FunctionIO] or dict execution_inputs: input dictionary or list of FunctionIO entities, that represent the extra inputs of the function
|
|
741
|
+
:param bool wait: wait until create task finish
|
|
742
|
+
:return: execution object
|
|
743
|
+
:rtype: dtlpy.entities.execution.Execution
|
|
744
|
+
|
|
745
|
+
**Example**:
|
|
746
|
+
|
|
747
|
+
.. code-block:: python
|
|
748
|
+
|
|
749
|
+
command = service.execute_batch(
|
|
750
|
+
execution_inputs=dl.FunctionIO(type=dl.PackageInputType.STRING, value='test', name='string'),
|
|
751
|
+
filters=dl.Filters(field='dir', values='/test', context={"datasets": [dataset.id]),
|
|
752
|
+
function_name='run')
|
|
753
|
+
"""
|
|
754
|
+
execution = self.executions.create_batch(service_id=self.id,
|
|
755
|
+
execution_inputs=execution_inputs,
|
|
756
|
+
filters=filters,
|
|
757
|
+
function_name=function_name,
|
|
758
|
+
wait=wait)
|
|
759
|
+
return execution
|
|
760
|
+
|
|
761
|
+
def rerun_batch(self,
|
|
762
|
+
filters,
|
|
763
|
+
wait=True
|
|
764
|
+
):
|
|
765
|
+
"""
|
|
766
|
+
rerun a executions on an existing service
|
|
767
|
+
|
|
768
|
+
**Prerequisites**: You must be in the role of an *owner* or *developer*. You must have a Filter.
|
|
769
|
+
|
|
770
|
+
:param filters: Filters entity for a filtering before rerun
|
|
771
|
+
:param bool wait: wait until create task finish
|
|
772
|
+
:return: rerun command
|
|
773
|
+
:rtype: dtlpy.entities.command.Command
|
|
774
|
+
|
|
775
|
+
**Example**:
|
|
776
|
+
|
|
777
|
+
.. code-block:: python
|
|
778
|
+
|
|
779
|
+
command = service.executions.rerun_batch(
|
|
780
|
+
filters=dl.Filters(field='id', values=['executionId'], operator=dl.FiltersOperations.IN, resource=dl.FiltersResource.EXECUTION))
|
|
781
|
+
"""
|
|
782
|
+
execution = self.executions.rerun_batch(service_id=self.id,
|
|
783
|
+
filters=filters,
|
|
784
|
+
wait=wait)
|
|
785
|
+
return execution
|
|
786
|
+
|
|
787
|
+
def activate_slots(
|
|
788
|
+
self,
|
|
789
|
+
project_id: str = None,
|
|
790
|
+
task_id: str = None,
|
|
791
|
+
dataset_id: str = None,
|
|
792
|
+
org_id: str = None,
|
|
793
|
+
user_email: str = None,
|
|
794
|
+
slots=None,
|
|
795
|
+
role=None,
|
|
796
|
+
prevent_override: bool = True,
|
|
797
|
+
visible: bool = True,
|
|
798
|
+
icon: str = 'fas fa-magic',
|
|
799
|
+
**kwargs
|
|
800
|
+
) -> object:
|
|
801
|
+
"""
|
|
802
|
+
Activate service slots
|
|
803
|
+
|
|
804
|
+
:param str project_id: project id
|
|
805
|
+
:param str task_id: task id
|
|
806
|
+
:param str dataset_id: dataset id
|
|
807
|
+
:param str org_id: org id
|
|
808
|
+
:param str user_email: user email
|
|
809
|
+
:param list slots: list of entities.PackageSlot
|
|
810
|
+
:param str role: user role MemberOrgRole.ADMIN, MemberOrgRole.owner, MemberOrgRole.MEMBER, MemberOrgRole.WORKER
|
|
811
|
+
:param bool prevent_override: True to prevent override
|
|
812
|
+
:param bool visible: visible
|
|
813
|
+
:param str icon: icon
|
|
814
|
+
:param kwargs: all additional arguments
|
|
815
|
+
:return: list of user setting for activated slots
|
|
816
|
+
:rtype: list
|
|
817
|
+
|
|
818
|
+
**Example**:
|
|
819
|
+
|
|
820
|
+
.. code-block:: python
|
|
821
|
+
|
|
822
|
+
setting = service.activate_slots(project_id='project_id',
|
|
823
|
+
slots=List[entities.PackageSlot],
|
|
824
|
+
icon='fas fa-magic')
|
|
825
|
+
"""
|
|
826
|
+
return self.services.activate_slots(
|
|
827
|
+
service=self,
|
|
828
|
+
project_id=project_id,
|
|
829
|
+
task_id=task_id,
|
|
830
|
+
dataset_id=dataset_id,
|
|
831
|
+
org_id=org_id,
|
|
832
|
+
user_email=user_email,
|
|
833
|
+
slots=slots,
|
|
834
|
+
role=role,
|
|
835
|
+
prevent_override=prevent_override,
|
|
836
|
+
visible=visible,
|
|
837
|
+
icon=icon,
|
|
838
|
+
**kwargs
|
|
839
|
+
)
|
|
840
|
+
|
|
841
|
+
def restart(self, replica_name: str = None):
|
|
842
|
+
"""
|
|
843
|
+
Restart service
|
|
844
|
+
|
|
845
|
+
:param str replica_name: replica name
|
|
846
|
+
:return: True
|
|
847
|
+
:rtype: bool
|
|
848
|
+
"""
|
|
849
|
+
return self.services.restart(service=self, replica_name=replica_name)
|
|
850
|
+
|
|
851
|
+
|
|
852
|
+
class KubernetesAutoscalerType(str, Enum):
|
|
853
|
+
""" The Service Autoscaler Type (RABBITMQ, CPU).
|
|
854
|
+
|
|
855
|
+
.. list-table::
|
|
856
|
+
:widths: 15 150
|
|
857
|
+
:header-rows: 1
|
|
858
|
+
|
|
859
|
+
* - State
|
|
860
|
+
- Description
|
|
861
|
+
* - RABBITMQ
|
|
862
|
+
- Service Autoscaler based on service queue length
|
|
863
|
+
* - CPU
|
|
864
|
+
- Service Autoscaler based on service CPU usage
|
|
865
|
+
* - RPS
|
|
866
|
+
- Service Autoscaler based on service RPS
|
|
867
|
+
"""
|
|
868
|
+
RABBITMQ = 'rabbitmq'
|
|
869
|
+
CPU = 'cpu'
|
|
870
|
+
RPS = 'rps'
|
|
871
|
+
|
|
872
|
+
|
|
873
|
+
# added this class to avoid breaking changes after fixing a spelling mistake in KubernetesAutoscalerType
|
|
874
|
+
class KubernetesAutuscalerTypeMeta(type):
|
|
875
|
+
def __getattribute__(cls, item):
|
|
876
|
+
if hasattr(KubernetesAutoscalerType, item):
|
|
877
|
+
return getattr(KubernetesAutoscalerType, item)
|
|
878
|
+
else:
|
|
879
|
+
raise AttributeError(f"KubernetesAutuscalerType has no attribute '{item}'")
|
|
880
|
+
|
|
881
|
+
|
|
882
|
+
class KubernetesAutoscaler(entities.BaseEntity):
|
|
883
|
+
MIN_REPLICA_DEFAULT = 0
|
|
884
|
+
MAX_REPLICA_DEFAULT = 1
|
|
885
|
+
AUTOSCALER_TYPE_DEFAULT = KubernetesAutoscalerType.RABBITMQ
|
|
886
|
+
|
|
887
|
+
def __init__(self,
|
|
888
|
+
autoscaler_type: KubernetesAutoscalerType.RABBITMQ = AUTOSCALER_TYPE_DEFAULT,
|
|
889
|
+
min_replicas=MIN_REPLICA_DEFAULT,
|
|
890
|
+
max_replicas=MAX_REPLICA_DEFAULT,
|
|
891
|
+
cooldown_period=None,
|
|
892
|
+
polling_interval=None,
|
|
893
|
+
**kwargs):
|
|
894
|
+
self.autoscaler_type = kwargs.get('type', autoscaler_type)
|
|
895
|
+
self.min_replicas = kwargs.get('minReplicas', min_replicas)
|
|
896
|
+
self.max_replicas = kwargs.get('maxReplicas', max_replicas)
|
|
897
|
+
self.cooldown_period = kwargs.get('cooldownPeriod', cooldown_period)
|
|
898
|
+
self.polling_interval = kwargs.get('pollingInterval', polling_interval)
|
|
899
|
+
|
|
900
|
+
def to_json(self):
|
|
901
|
+
_json = {
|
|
902
|
+
'type': self.autoscaler_type,
|
|
903
|
+
'minReplicas': self.min_replicas,
|
|
904
|
+
'maxReplicas': self.max_replicas
|
|
905
|
+
}
|
|
906
|
+
|
|
907
|
+
if self.cooldown_period is not None:
|
|
908
|
+
_json['cooldownPeriod'] = self.cooldown_period
|
|
909
|
+
|
|
910
|
+
if self.polling_interval is not None:
|
|
911
|
+
_json['pollingInterval'] = self.polling_interval
|
|
912
|
+
|
|
913
|
+
return _json
|
|
914
|
+
|
|
915
|
+
|
|
916
|
+
class KubernetesRabbitmqAutoscaler(KubernetesAutoscaler):
|
|
917
|
+
QUEUE_LENGTH_DEFAULT = 1000
|
|
918
|
+
|
|
919
|
+
def __init__(self,
|
|
920
|
+
min_replicas=KubernetesAutoscaler.MIN_REPLICA_DEFAULT,
|
|
921
|
+
max_replicas=KubernetesAutoscaler.MAX_REPLICA_DEFAULT,
|
|
922
|
+
queue_length=QUEUE_LENGTH_DEFAULT,
|
|
923
|
+
cooldown_period=None,
|
|
924
|
+
polling_interval=None,
|
|
925
|
+
**kwargs):
|
|
926
|
+
super().__init__(min_replicas=min_replicas,
|
|
927
|
+
max_replicas=max_replicas,
|
|
928
|
+
autoscaler_type=KubernetesAutoscalerType.RABBITMQ,
|
|
929
|
+
cooldown_period=cooldown_period,
|
|
930
|
+
polling_interval=polling_interval, **kwargs)
|
|
931
|
+
self.queue_length = kwargs.get('queueLength', queue_length)
|
|
932
|
+
|
|
933
|
+
def to_json(self):
|
|
934
|
+
_json = super().to_json()
|
|
935
|
+
_json['queueLength'] = self.queue_length
|
|
936
|
+
return _json
|
|
937
|
+
|
|
938
|
+
|
|
939
|
+
class KubernetesRPSAutoscaler(KubernetesAutoscaler):
|
|
940
|
+
THRESHOLD_DEFAULT = 10
|
|
941
|
+
RATE_SECONDS_DEFAULT = 30
|
|
942
|
+
|
|
943
|
+
def __init__(self,
|
|
944
|
+
min_replicas=KubernetesAutoscaler.MIN_REPLICA_DEFAULT,
|
|
945
|
+
max_replicas=KubernetesAutoscaler.MAX_REPLICA_DEFAULT,
|
|
946
|
+
threshold=THRESHOLD_DEFAULT,
|
|
947
|
+
rate_seconds=RATE_SECONDS_DEFAULT,
|
|
948
|
+
cooldown_period=None,
|
|
949
|
+
polling_interval=None,
|
|
950
|
+
**kwargs):
|
|
951
|
+
super().__init__(min_replicas=min_replicas,
|
|
952
|
+
max_replicas=max_replicas,
|
|
953
|
+
autoscaler_type=KubernetesAutoscalerType.RPS,
|
|
954
|
+
cooldown_period=cooldown_period,
|
|
955
|
+
polling_interval=polling_interval, **kwargs)
|
|
956
|
+
self.threshold = kwargs.get('threshold', threshold)
|
|
957
|
+
self.rate_seconds = kwargs.get('rateSeconds', rate_seconds)
|
|
958
|
+
|
|
959
|
+
def to_json(self):
|
|
960
|
+
_json = super().to_json()
|
|
961
|
+
_json['rateSeconds'] = self.rate_seconds
|
|
962
|
+
_json['threshold'] = self.threshold
|
|
963
|
+
return _json
|