dtlpy 1.115.44__py3-none-any.whl → 1.117.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dtlpy/__init__.py +491 -491
- dtlpy/__version__.py +1 -1
- dtlpy/assets/__init__.py +26 -26
- dtlpy/assets/code_server/config.yaml +2 -2
- dtlpy/assets/code_server/installation.sh +24 -24
- dtlpy/assets/code_server/launch.json +13 -13
- dtlpy/assets/code_server/settings.json +2 -2
- dtlpy/assets/main.py +53 -53
- dtlpy/assets/main_partial.py +18 -18
- dtlpy/assets/mock.json +11 -11
- dtlpy/assets/model_adapter.py +83 -83
- dtlpy/assets/package.json +61 -61
- dtlpy/assets/package_catalog.json +29 -29
- dtlpy/assets/package_gitignore +307 -307
- dtlpy/assets/service_runners/__init__.py +33 -33
- dtlpy/assets/service_runners/converter.py +96 -96
- dtlpy/assets/service_runners/multi_method.py +49 -49
- dtlpy/assets/service_runners/multi_method_annotation.py +54 -54
- dtlpy/assets/service_runners/multi_method_dataset.py +55 -55
- dtlpy/assets/service_runners/multi_method_item.py +52 -52
- dtlpy/assets/service_runners/multi_method_json.py +52 -52
- dtlpy/assets/service_runners/single_method.py +37 -37
- dtlpy/assets/service_runners/single_method_annotation.py +43 -43
- dtlpy/assets/service_runners/single_method_dataset.py +43 -43
- dtlpy/assets/service_runners/single_method_item.py +41 -41
- dtlpy/assets/service_runners/single_method_json.py +42 -42
- dtlpy/assets/service_runners/single_method_multi_input.py +45 -45
- dtlpy/assets/voc_annotation_template.xml +23 -23
- dtlpy/caches/base_cache.py +32 -32
- dtlpy/caches/cache.py +473 -473
- dtlpy/caches/dl_cache.py +201 -201
- dtlpy/caches/filesystem_cache.py +89 -89
- dtlpy/caches/redis_cache.py +84 -84
- dtlpy/dlp/__init__.py +20 -20
- dtlpy/dlp/cli_utilities.py +367 -367
- dtlpy/dlp/command_executor.py +764 -764
- dtlpy/dlp/dlp +1 -1
- dtlpy/dlp/dlp.bat +1 -1
- dtlpy/dlp/dlp.py +128 -128
- dtlpy/dlp/parser.py +651 -651
- dtlpy/entities/__init__.py +83 -83
- dtlpy/entities/analytic.py +347 -347
- dtlpy/entities/annotation.py +1879 -1879
- dtlpy/entities/annotation_collection.py +699 -699
- dtlpy/entities/annotation_definitions/__init__.py +20 -20
- dtlpy/entities/annotation_definitions/base_annotation_definition.py +100 -100
- dtlpy/entities/annotation_definitions/box.py +195 -195
- dtlpy/entities/annotation_definitions/classification.py +67 -67
- dtlpy/entities/annotation_definitions/comparison.py +72 -72
- dtlpy/entities/annotation_definitions/cube.py +204 -204
- dtlpy/entities/annotation_definitions/cube_3d.py +149 -149
- dtlpy/entities/annotation_definitions/description.py +32 -32
- dtlpy/entities/annotation_definitions/ellipse.py +124 -124
- dtlpy/entities/annotation_definitions/free_text.py +62 -62
- dtlpy/entities/annotation_definitions/gis.py +69 -69
- dtlpy/entities/annotation_definitions/note.py +139 -139
- dtlpy/entities/annotation_definitions/point.py +117 -117
- dtlpy/entities/annotation_definitions/polygon.py +182 -182
- dtlpy/entities/annotation_definitions/polyline.py +111 -111
- dtlpy/entities/annotation_definitions/pose.py +92 -92
- dtlpy/entities/annotation_definitions/ref_image.py +86 -86
- dtlpy/entities/annotation_definitions/segmentation.py +240 -240
- dtlpy/entities/annotation_definitions/subtitle.py +34 -34
- dtlpy/entities/annotation_definitions/text.py +85 -85
- dtlpy/entities/annotation_definitions/undefined_annotation.py +74 -74
- dtlpy/entities/app.py +220 -220
- dtlpy/entities/app_module.py +107 -107
- dtlpy/entities/artifact.py +174 -174
- dtlpy/entities/assignment.py +399 -399
- dtlpy/entities/base_entity.py +214 -214
- dtlpy/entities/bot.py +113 -113
- dtlpy/entities/codebase.py +292 -292
- dtlpy/entities/collection.py +38 -38
- dtlpy/entities/command.py +169 -169
- dtlpy/entities/compute.py +449 -449
- dtlpy/entities/dataset.py +1299 -1299
- dtlpy/entities/directory_tree.py +44 -44
- dtlpy/entities/dpk.py +470 -470
- dtlpy/entities/driver.py +235 -235
- dtlpy/entities/execution.py +397 -397
- dtlpy/entities/feature.py +124 -124
- dtlpy/entities/feature_set.py +152 -145
- dtlpy/entities/filters.py +798 -798
- dtlpy/entities/gis_item.py +107 -107
- dtlpy/entities/integration.py +184 -184
- dtlpy/entities/item.py +975 -959
- dtlpy/entities/label.py +123 -123
- dtlpy/entities/links.py +85 -85
- dtlpy/entities/message.py +175 -175
- dtlpy/entities/model.py +684 -684
- dtlpy/entities/node.py +1005 -1005
- dtlpy/entities/ontology.py +810 -803
- dtlpy/entities/organization.py +287 -287
- dtlpy/entities/package.py +657 -657
- dtlpy/entities/package_defaults.py +5 -5
- dtlpy/entities/package_function.py +185 -185
- dtlpy/entities/package_module.py +113 -113
- dtlpy/entities/package_slot.py +118 -118
- dtlpy/entities/paged_entities.py +299 -299
- dtlpy/entities/pipeline.py +624 -624
- dtlpy/entities/pipeline_execution.py +279 -279
- dtlpy/entities/project.py +394 -394
- dtlpy/entities/prompt_item.py +505 -505
- dtlpy/entities/recipe.py +301 -301
- dtlpy/entities/reflect_dict.py +102 -102
- dtlpy/entities/resource_execution.py +138 -138
- dtlpy/entities/service.py +974 -963
- dtlpy/entities/service_driver.py +117 -117
- dtlpy/entities/setting.py +294 -294
- dtlpy/entities/task.py +495 -495
- dtlpy/entities/time_series.py +143 -143
- dtlpy/entities/trigger.py +426 -426
- dtlpy/entities/user.py +118 -118
- dtlpy/entities/webhook.py +124 -124
- dtlpy/examples/__init__.py +19 -19
- dtlpy/examples/add_labels.py +135 -135
- dtlpy/examples/add_metadata_to_item.py +21 -21
- dtlpy/examples/annotate_items_using_model.py +65 -65
- dtlpy/examples/annotate_video_using_model_and_tracker.py +75 -75
- dtlpy/examples/annotations_convert_to_voc.py +9 -9
- dtlpy/examples/annotations_convert_to_yolo.py +9 -9
- dtlpy/examples/convert_annotation_types.py +51 -51
- dtlpy/examples/converter.py +143 -143
- dtlpy/examples/copy_annotations.py +22 -22
- dtlpy/examples/copy_folder.py +31 -31
- dtlpy/examples/create_annotations.py +51 -51
- dtlpy/examples/create_video_annotations.py +83 -83
- dtlpy/examples/delete_annotations.py +26 -26
- dtlpy/examples/filters.py +113 -113
- dtlpy/examples/move_item.py +23 -23
- dtlpy/examples/play_video_annotation.py +13 -13
- dtlpy/examples/show_item_and_mask.py +53 -53
- dtlpy/examples/triggers.py +49 -49
- dtlpy/examples/upload_batch_of_items.py +20 -20
- dtlpy/examples/upload_items_and_custom_format_annotations.py +55 -55
- dtlpy/examples/upload_items_with_modalities.py +43 -43
- dtlpy/examples/upload_segmentation_annotations_from_mask_image.py +44 -44
- dtlpy/examples/upload_yolo_format_annotations.py +70 -70
- dtlpy/exceptions.py +125 -125
- dtlpy/miscellaneous/__init__.py +20 -20
- dtlpy/miscellaneous/dict_differ.py +95 -95
- dtlpy/miscellaneous/git_utils.py +217 -217
- dtlpy/miscellaneous/json_utils.py +14 -14
- dtlpy/miscellaneous/list_print.py +105 -105
- dtlpy/miscellaneous/zipping.py +130 -130
- dtlpy/ml/__init__.py +20 -20
- dtlpy/ml/base_feature_extractor_adapter.py +27 -27
- dtlpy/ml/base_model_adapter.py +1287 -1230
- dtlpy/ml/metrics.py +461 -461
- dtlpy/ml/predictions_utils.py +274 -274
- dtlpy/ml/summary_writer.py +57 -57
- dtlpy/ml/train_utils.py +60 -60
- dtlpy/new_instance.py +252 -252
- dtlpy/repositories/__init__.py +56 -56
- dtlpy/repositories/analytics.py +85 -85
- dtlpy/repositories/annotations.py +916 -916
- dtlpy/repositories/apps.py +383 -383
- dtlpy/repositories/artifacts.py +452 -452
- dtlpy/repositories/assignments.py +599 -599
- dtlpy/repositories/bots.py +213 -213
- dtlpy/repositories/codebases.py +559 -559
- dtlpy/repositories/collections.py +332 -332
- dtlpy/repositories/commands.py +152 -152
- dtlpy/repositories/compositions.py +61 -61
- dtlpy/repositories/computes.py +439 -439
- dtlpy/repositories/datasets.py +1585 -1504
- dtlpy/repositories/downloader.py +1157 -923
- dtlpy/repositories/dpks.py +433 -433
- dtlpy/repositories/drivers.py +482 -482
- dtlpy/repositories/executions.py +815 -815
- dtlpy/repositories/feature_sets.py +256 -226
- dtlpy/repositories/features.py +255 -255
- dtlpy/repositories/integrations.py +484 -484
- dtlpy/repositories/items.py +912 -912
- dtlpy/repositories/messages.py +94 -94
- dtlpy/repositories/models.py +1000 -1000
- dtlpy/repositories/nodes.py +80 -80
- dtlpy/repositories/ontologies.py +511 -511
- dtlpy/repositories/organizations.py +525 -525
- dtlpy/repositories/packages.py +1941 -1941
- dtlpy/repositories/pipeline_executions.py +451 -451
- dtlpy/repositories/pipelines.py +640 -640
- dtlpy/repositories/projects.py +539 -539
- dtlpy/repositories/recipes.py +429 -399
- dtlpy/repositories/resource_executions.py +137 -137
- dtlpy/repositories/schema.py +120 -120
- dtlpy/repositories/service_drivers.py +213 -213
- dtlpy/repositories/services.py +1704 -1704
- dtlpy/repositories/settings.py +339 -339
- dtlpy/repositories/tasks.py +1477 -1477
- dtlpy/repositories/times_series.py +278 -278
- dtlpy/repositories/triggers.py +536 -536
- dtlpy/repositories/upload_element.py +257 -257
- dtlpy/repositories/uploader.py +661 -661
- dtlpy/repositories/webhooks.py +249 -249
- dtlpy/services/__init__.py +22 -22
- dtlpy/services/aihttp_retry.py +131 -131
- dtlpy/services/api_client.py +1786 -1785
- dtlpy/services/api_reference.py +40 -40
- dtlpy/services/async_utils.py +133 -133
- dtlpy/services/calls_counter.py +44 -44
- dtlpy/services/check_sdk.py +68 -68
- dtlpy/services/cookie.py +115 -115
- dtlpy/services/create_logger.py +156 -156
- dtlpy/services/events.py +84 -84
- dtlpy/services/logins.py +235 -235
- dtlpy/services/reporter.py +256 -256
- dtlpy/services/service_defaults.py +91 -91
- dtlpy/utilities/__init__.py +20 -20
- dtlpy/utilities/annotations/__init__.py +16 -16
- dtlpy/utilities/annotations/annotation_converters.py +269 -269
- dtlpy/utilities/base_package_runner.py +285 -264
- dtlpy/utilities/converter.py +1650 -1650
- dtlpy/utilities/dataset_generators/__init__.py +1 -1
- dtlpy/utilities/dataset_generators/dataset_generator.py +670 -670
- dtlpy/utilities/dataset_generators/dataset_generator_tensorflow.py +23 -23
- dtlpy/utilities/dataset_generators/dataset_generator_torch.py +21 -21
- dtlpy/utilities/local_development/__init__.py +1 -1
- dtlpy/utilities/local_development/local_session.py +179 -179
- dtlpy/utilities/reports/__init__.py +2 -2
- dtlpy/utilities/reports/figures.py +343 -343
- dtlpy/utilities/reports/report.py +71 -71
- dtlpy/utilities/videos/__init__.py +17 -17
- dtlpy/utilities/videos/video_player.py +598 -598
- dtlpy/utilities/videos/videos.py +470 -470
- {dtlpy-1.115.44.data → dtlpy-1.117.6.data}/scripts/dlp +1 -1
- dtlpy-1.117.6.data/scripts/dlp.bat +2 -0
- {dtlpy-1.115.44.data → dtlpy-1.117.6.data}/scripts/dlp.py +128 -128
- {dtlpy-1.115.44.dist-info → dtlpy-1.117.6.dist-info}/METADATA +186 -186
- dtlpy-1.117.6.dist-info/RECORD +239 -0
- {dtlpy-1.115.44.dist-info → dtlpy-1.117.6.dist-info}/WHEEL +1 -1
- {dtlpy-1.115.44.dist-info → dtlpy-1.117.6.dist-info}/licenses/LICENSE +200 -200
- tests/features/environment.py +551 -551
- dtlpy/assets/__pycache__/__init__.cpython-310.pyc +0 -0
- dtlpy-1.115.44.data/scripts/dlp.bat +0 -2
- dtlpy-1.115.44.dist-info/RECORD +0 -240
- {dtlpy-1.115.44.dist-info → dtlpy-1.117.6.dist-info}/entry_points.txt +0 -0
- {dtlpy-1.115.44.dist-info → dtlpy-1.117.6.dist-info}/top_level.txt +0 -0
|
@@ -1,279 +1,279 @@
|
|
|
1
|
-
from collections import namedtuple
|
|
2
|
-
import logging
|
|
3
|
-
import traceback
|
|
4
|
-
from enum import Enum
|
|
5
|
-
|
|
6
|
-
import attr
|
|
7
|
-
|
|
8
|
-
from .. import repositories, entities
|
|
9
|
-
from ..services.api_client import ApiClient
|
|
10
|
-
|
|
11
|
-
logger = logging.getLogger(name='dtlpy')
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
class PipelineExecutionStatus(str, Enum):
|
|
15
|
-
PENDING = "pending"
|
|
16
|
-
IN_PROGRESS = "in-progress"
|
|
17
|
-
FAILED = "failed"
|
|
18
|
-
SUCCESS = "success"
|
|
19
|
-
QUEUE = "queue"
|
|
20
|
-
TERMINATED = "terminated"
|
|
21
|
-
RERUN = "rerun"
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
class CycleRerunMethod(str, Enum):
|
|
25
|
-
START_FROM_NODES = 'startFromNodes',
|
|
26
|
-
START_FROM_FAILED_EXECUTIONS = 'startFromFailedExecutions',
|
|
27
|
-
START_FROM_BEGINNING = 'startFromBeginning'
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
class PipelineExecutionNode:
|
|
31
|
-
def __init__(self, name, node_id, ports, metadata, node_type, namespace, project_id, status):
|
|
32
|
-
self.node_id = node_id
|
|
33
|
-
self.namespace = namespace
|
|
34
|
-
self.node_type = node_type
|
|
35
|
-
self.status = status
|
|
36
|
-
self.ports = ports
|
|
37
|
-
self.metadata = metadata
|
|
38
|
-
self.project_id = project_id
|
|
39
|
-
self.name = name
|
|
40
|
-
|
|
41
|
-
@staticmethod
|
|
42
|
-
def from_json(_json: dict):
|
|
43
|
-
ports = [entities.PipelineNodeIO.from_json(_json=i_input) for i_input in _json.get('ports', list())]
|
|
44
|
-
return PipelineExecutionNode(
|
|
45
|
-
node_id=_json.get('id', None),
|
|
46
|
-
namespace=_json.get('namespace', None),
|
|
47
|
-
node_type=_json.get('type', None),
|
|
48
|
-
status=_json.get('status', None),
|
|
49
|
-
ports=ports,
|
|
50
|
-
metadata=_json.get('metadata', None),
|
|
51
|
-
project_id=_json.get('projectId', None),
|
|
52
|
-
name=_json.get('name', None),
|
|
53
|
-
)
|
|
54
|
-
|
|
55
|
-
def to_json(self):
|
|
56
|
-
_json = {
|
|
57
|
-
'id': self.node_id,
|
|
58
|
-
'namespace': self.namespace,
|
|
59
|
-
'type': self.node_type,
|
|
60
|
-
'status': self.status,
|
|
61
|
-
'ports': [_io.to_json() for _io in self.ports],
|
|
62
|
-
'metadata': self.metadata,
|
|
63
|
-
'projectId': self.project_id,
|
|
64
|
-
'name': self.name,
|
|
65
|
-
}
|
|
66
|
-
|
|
67
|
-
return _json
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
@attr.s
|
|
71
|
-
class PipelineExecution(entities.BaseEntity):
|
|
72
|
-
"""
|
|
73
|
-
Package object
|
|
74
|
-
"""
|
|
75
|
-
# platform
|
|
76
|
-
id = attr.ib()
|
|
77
|
-
nodes = attr.ib(repr=False)
|
|
78
|
-
executions = attr.ib(repr=False)
|
|
79
|
-
status = attr.ib()
|
|
80
|
-
# name change
|
|
81
|
-
created_at = attr.ib()
|
|
82
|
-
updated_at = attr.ib(repr=False)
|
|
83
|
-
pipeline_id = attr.ib()
|
|
84
|
-
max_attempts = attr.ib()
|
|
85
|
-
creator = attr.ib()
|
|
86
|
-
|
|
87
|
-
# sdk
|
|
88
|
-
_pipeline = attr.ib(repr=False)
|
|
89
|
-
_project = attr.ib(repr=False)
|
|
90
|
-
_client_api = attr.ib(type=ApiClient, repr=False)
|
|
91
|
-
_repositories = attr.ib(repr=False)
|
|
92
|
-
|
|
93
|
-
@staticmethod
|
|
94
|
-
def _protected_from_json(_json, client_api, pipeline=None, is_fetched=True):
|
|
95
|
-
"""
|
|
96
|
-
Same as from_json but with try-except to catch if error
|
|
97
|
-
:param _json: platform json
|
|
98
|
-
:param client_api: ApiClient entity
|
|
99
|
-
:param pipeline: Pipeline entity
|
|
100
|
-
:param is_fetched: is Entity fetched from Platform
|
|
101
|
-
:return:
|
|
102
|
-
"""
|
|
103
|
-
try:
|
|
104
|
-
pipeline = PipelineExecution.from_json(
|
|
105
|
-
_json=_json,
|
|
106
|
-
client_api=client_api,
|
|
107
|
-
pipeline=pipeline,
|
|
108
|
-
is_fetched=is_fetched
|
|
109
|
-
)
|
|
110
|
-
status = True
|
|
111
|
-
except Exception:
|
|
112
|
-
pipeline = traceback.format_exc()
|
|
113
|
-
status = False
|
|
114
|
-
return status, pipeline
|
|
115
|
-
|
|
116
|
-
@classmethod
|
|
117
|
-
def from_json(cls, _json, client_api, pipeline=None, is_fetched=True) -> 'PipelineExecution':
|
|
118
|
-
"""
|
|
119
|
-
Turn platform representation of pipeline_execution into a pipeline_execution entity
|
|
120
|
-
|
|
121
|
-
:param dict _json: platform representation of package
|
|
122
|
-
:param dl.ApiClient client_api: ApiClient entity
|
|
123
|
-
:param dtlpy.entities.pipeline.Pipeline pipeline: Pipeline entity
|
|
124
|
-
:param bool is_fetched: is Entity fetched from Platform
|
|
125
|
-
:return: Pipeline entity
|
|
126
|
-
:rtype: dtlpy.entities.PipelineExecution
|
|
127
|
-
"""
|
|
128
|
-
project = None
|
|
129
|
-
if pipeline is not None:
|
|
130
|
-
project = pipeline._project
|
|
131
|
-
if pipeline.id != _json.get('pipelineId', None):
|
|
132
|
-
logger.warning('Pipeline has been fetched from a project that is not belong to it')
|
|
133
|
-
pipeline = None
|
|
134
|
-
|
|
135
|
-
nodes = [PipelineExecutionNode.from_json(_json=node) for node in _json.get('nodes', list())]
|
|
136
|
-
|
|
137
|
-
inst = cls(
|
|
138
|
-
id=_json.get('id', None),
|
|
139
|
-
created_at=_json.get('createdAt', None),
|
|
140
|
-
updated_at=_json.get('updatedAt', None),
|
|
141
|
-
pipeline_id=_json.get('pipelineId', None),
|
|
142
|
-
status=_json.get('status', None),
|
|
143
|
-
max_attempts=_json.get('maxAttempts', None),
|
|
144
|
-
creator=_json.get('creator', None),
|
|
145
|
-
nodes=nodes,
|
|
146
|
-
executions=_json.get('executions', dict()),
|
|
147
|
-
pipeline=pipeline,
|
|
148
|
-
project=project,
|
|
149
|
-
client_api=client_api,
|
|
150
|
-
)
|
|
151
|
-
|
|
152
|
-
inst.is_fetched = is_fetched
|
|
153
|
-
return inst
|
|
154
|
-
|
|
155
|
-
def to_json(self):
|
|
156
|
-
"""
|
|
157
|
-
Turn Package entity into a platform representation of Package
|
|
158
|
-
|
|
159
|
-
:return: platform json of package
|
|
160
|
-
:rtype: dict
|
|
161
|
-
"""
|
|
162
|
-
_json = attr.asdict(self,
|
|
163
|
-
filter=attr.filters.exclude(attr.fields(PipelineExecution)._repositories,
|
|
164
|
-
attr.fields(PipelineExecution)._client_api,
|
|
165
|
-
attr.fields(PipelineExecution)._pipeline,
|
|
166
|
-
attr.fields(PipelineExecution).nodes,
|
|
167
|
-
attr.fields(PipelineExecution).created_at,
|
|
168
|
-
attr.fields(PipelineExecution).updated_at,
|
|
169
|
-
attr.fields(PipelineExecution).pipeline_id,
|
|
170
|
-
attr.fields(PipelineExecution).executions,
|
|
171
|
-
attr.fields(PipelineExecution).max_attempts
|
|
172
|
-
))
|
|
173
|
-
executions = dict()
|
|
174
|
-
for node_id, executions_list in self.executions.items():
|
|
175
|
-
if len(executions_list) > 0 and isinstance(executions_list[0], entities.Execution):
|
|
176
|
-
executions[node_id] = [e.to_json() for e in executions_list]
|
|
177
|
-
else:
|
|
178
|
-
executions[node_id] = executions_list
|
|
179
|
-
|
|
180
|
-
_json['pipelineId'] = self.pipeline_id
|
|
181
|
-
_json['maxAttempts'] = self.max_attempts
|
|
182
|
-
_json['createdAt'] = self.created_at
|
|
183
|
-
_json['updatedAt'] = self.updated_at
|
|
184
|
-
_json['nodes'] = [node.to_json() for node in self.nodes]
|
|
185
|
-
_json['executions'] = executions
|
|
186
|
-
return _json
|
|
187
|
-
|
|
188
|
-
#########
|
|
189
|
-
# Props #
|
|
190
|
-
#########
|
|
191
|
-
@property
|
|
192
|
-
def pipeline(self):
|
|
193
|
-
if self._pipeline is None:
|
|
194
|
-
self._pipeline = self.pipelines.get(pipeline_id=self.pipeline_id, fetch=None)
|
|
195
|
-
assert isinstance(self._pipeline, entities.Pipeline)
|
|
196
|
-
return self._pipeline
|
|
197
|
-
|
|
198
|
-
@property
|
|
199
|
-
def project(self):
|
|
200
|
-
if self._project is None:
|
|
201
|
-
self._project = self.pipeline.project
|
|
202
|
-
assert isinstance(self._pipeline.project, entities.Project)
|
|
203
|
-
return self._pipeline.project
|
|
204
|
-
|
|
205
|
-
################
|
|
206
|
-
# repositories #
|
|
207
|
-
################
|
|
208
|
-
@_repositories.default
|
|
209
|
-
def set_repositories(self):
|
|
210
|
-
reps = namedtuple('repositories',
|
|
211
|
-
field_names=['projects', 'pipelines', 'pipeline_executions'])
|
|
212
|
-
|
|
213
|
-
r = reps(
|
|
214
|
-
projects=repositories.Projects(client_api=self._client_api),
|
|
215
|
-
pipelines=repositories.Pipelines(client_api=self._client_api, project=self._project),
|
|
216
|
-
pipeline_executions=repositories.PipelineExecutions(client_api=self._client_api,
|
|
217
|
-
project=self._project,
|
|
218
|
-
pipeline=self._pipeline)
|
|
219
|
-
)
|
|
220
|
-
return r
|
|
221
|
-
|
|
222
|
-
@property
|
|
223
|
-
def projects(self):
|
|
224
|
-
assert isinstance(self._repositories.projects, repositories.Projects)
|
|
225
|
-
return self._repositories.projects
|
|
226
|
-
|
|
227
|
-
@property
|
|
228
|
-
def pipelines(self):
|
|
229
|
-
assert isinstance(self._repositories.pipelines, repositories.Pipelines)
|
|
230
|
-
return self._repositories.pipelines
|
|
231
|
-
|
|
232
|
-
@property
|
|
233
|
-
def pipeline_executions(self):
|
|
234
|
-
assert isinstance(self._repositories.pipeline_executions, repositories.PipelineExecutions)
|
|
235
|
-
return self._repositories.pipeline_executions
|
|
236
|
-
|
|
237
|
-
def rerun(self,
|
|
238
|
-
method: str = None,
|
|
239
|
-
start_nodes_ids: list = None,
|
|
240
|
-
wait: bool = True
|
|
241
|
-
) -> bool:
|
|
242
|
-
"""
|
|
243
|
-
Get Pipeline Execution object
|
|
244
|
-
|
|
245
|
-
**prerequisites**: You must be an *owner* or *developer* to use this method.
|
|
246
|
-
|
|
247
|
-
:param str method: method to run
|
|
248
|
-
:param list start_nodes_ids: list of start nodes ids
|
|
249
|
-
:param bool wait: wait until rerun finish
|
|
250
|
-
:return: True if success
|
|
251
|
-
:rtype: bool
|
|
252
|
-
|
|
253
|
-
**Example**:
|
|
254
|
-
|
|
255
|
-
.. code-block:: python
|
|
256
|
-
|
|
257
|
-
pipeline_executions.rerun(method=dl.CycleRerunMethod.START_FROM_BEGINNING,)
|
|
258
|
-
"""
|
|
259
|
-
filters = entities.Filters(field='id', values=[self.id], operator=entities.FiltersOperations.IN,
|
|
260
|
-
resource=entities.FiltersResource.PIPELINE_EXECUTION)
|
|
261
|
-
return self.pipeline_executions.rerun(
|
|
262
|
-
method=method,
|
|
263
|
-
start_nodes_ids=start_nodes_ids,
|
|
264
|
-
filters=filters,
|
|
265
|
-
wait=wait
|
|
266
|
-
)
|
|
267
|
-
|
|
268
|
-
def wait(self):
|
|
269
|
-
"""
|
|
270
|
-
Wait for pipeline execution
|
|
271
|
-
|
|
272
|
-
:return: Pipeline execution object
|
|
273
|
-
"""
|
|
274
|
-
return self.pipeline_executions.wait(pipeline_execution_id=self.id)
|
|
275
|
-
|
|
276
|
-
def in_progress(self):
|
|
277
|
-
return self.status not in [PipelineExecutionStatus.FAILED,
|
|
278
|
-
PipelineExecutionStatus.SUCCESS,
|
|
279
|
-
PipelineExecutionStatus.TERMINATED]
|
|
1
|
+
from collections import namedtuple
|
|
2
|
+
import logging
|
|
3
|
+
import traceback
|
|
4
|
+
from enum import Enum
|
|
5
|
+
|
|
6
|
+
import attr
|
|
7
|
+
|
|
8
|
+
from .. import repositories, entities
|
|
9
|
+
from ..services.api_client import ApiClient
|
|
10
|
+
|
|
11
|
+
logger = logging.getLogger(name='dtlpy')
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class PipelineExecutionStatus(str, Enum):
|
|
15
|
+
PENDING = "pending"
|
|
16
|
+
IN_PROGRESS = "in-progress"
|
|
17
|
+
FAILED = "failed"
|
|
18
|
+
SUCCESS = "success"
|
|
19
|
+
QUEUE = "queue"
|
|
20
|
+
TERMINATED = "terminated"
|
|
21
|
+
RERUN = "rerun"
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class CycleRerunMethod(str, Enum):
|
|
25
|
+
START_FROM_NODES = 'startFromNodes',
|
|
26
|
+
START_FROM_FAILED_EXECUTIONS = 'startFromFailedExecutions',
|
|
27
|
+
START_FROM_BEGINNING = 'startFromBeginning'
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
class PipelineExecutionNode:
|
|
31
|
+
def __init__(self, name, node_id, ports, metadata, node_type, namespace, project_id, status):
|
|
32
|
+
self.node_id = node_id
|
|
33
|
+
self.namespace = namespace
|
|
34
|
+
self.node_type = node_type
|
|
35
|
+
self.status = status
|
|
36
|
+
self.ports = ports
|
|
37
|
+
self.metadata = metadata
|
|
38
|
+
self.project_id = project_id
|
|
39
|
+
self.name = name
|
|
40
|
+
|
|
41
|
+
@staticmethod
|
|
42
|
+
def from_json(_json: dict):
|
|
43
|
+
ports = [entities.PipelineNodeIO.from_json(_json=i_input) for i_input in _json.get('ports', list())]
|
|
44
|
+
return PipelineExecutionNode(
|
|
45
|
+
node_id=_json.get('id', None),
|
|
46
|
+
namespace=_json.get('namespace', None),
|
|
47
|
+
node_type=_json.get('type', None),
|
|
48
|
+
status=_json.get('status', None),
|
|
49
|
+
ports=ports,
|
|
50
|
+
metadata=_json.get('metadata', None),
|
|
51
|
+
project_id=_json.get('projectId', None),
|
|
52
|
+
name=_json.get('name', None),
|
|
53
|
+
)
|
|
54
|
+
|
|
55
|
+
def to_json(self):
|
|
56
|
+
_json = {
|
|
57
|
+
'id': self.node_id,
|
|
58
|
+
'namespace': self.namespace,
|
|
59
|
+
'type': self.node_type,
|
|
60
|
+
'status': self.status,
|
|
61
|
+
'ports': [_io.to_json() for _io in self.ports],
|
|
62
|
+
'metadata': self.metadata,
|
|
63
|
+
'projectId': self.project_id,
|
|
64
|
+
'name': self.name,
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
return _json
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
@attr.s
|
|
71
|
+
class PipelineExecution(entities.BaseEntity):
|
|
72
|
+
"""
|
|
73
|
+
Package object
|
|
74
|
+
"""
|
|
75
|
+
# platform
|
|
76
|
+
id = attr.ib()
|
|
77
|
+
nodes = attr.ib(repr=False)
|
|
78
|
+
executions = attr.ib(repr=False)
|
|
79
|
+
status = attr.ib()
|
|
80
|
+
# name change
|
|
81
|
+
created_at = attr.ib()
|
|
82
|
+
updated_at = attr.ib(repr=False)
|
|
83
|
+
pipeline_id = attr.ib()
|
|
84
|
+
max_attempts = attr.ib()
|
|
85
|
+
creator = attr.ib()
|
|
86
|
+
|
|
87
|
+
# sdk
|
|
88
|
+
_pipeline = attr.ib(repr=False)
|
|
89
|
+
_project = attr.ib(repr=False)
|
|
90
|
+
_client_api = attr.ib(type=ApiClient, repr=False)
|
|
91
|
+
_repositories = attr.ib(repr=False)
|
|
92
|
+
|
|
93
|
+
@staticmethod
|
|
94
|
+
def _protected_from_json(_json, client_api, pipeline=None, is_fetched=True):
|
|
95
|
+
"""
|
|
96
|
+
Same as from_json but with try-except to catch if error
|
|
97
|
+
:param _json: platform json
|
|
98
|
+
:param client_api: ApiClient entity
|
|
99
|
+
:param pipeline: Pipeline entity
|
|
100
|
+
:param is_fetched: is Entity fetched from Platform
|
|
101
|
+
:return:
|
|
102
|
+
"""
|
|
103
|
+
try:
|
|
104
|
+
pipeline = PipelineExecution.from_json(
|
|
105
|
+
_json=_json,
|
|
106
|
+
client_api=client_api,
|
|
107
|
+
pipeline=pipeline,
|
|
108
|
+
is_fetched=is_fetched
|
|
109
|
+
)
|
|
110
|
+
status = True
|
|
111
|
+
except Exception:
|
|
112
|
+
pipeline = traceback.format_exc()
|
|
113
|
+
status = False
|
|
114
|
+
return status, pipeline
|
|
115
|
+
|
|
116
|
+
@classmethod
|
|
117
|
+
def from_json(cls, _json, client_api, pipeline=None, is_fetched=True) -> 'PipelineExecution':
|
|
118
|
+
"""
|
|
119
|
+
Turn platform representation of pipeline_execution into a pipeline_execution entity
|
|
120
|
+
|
|
121
|
+
:param dict _json: platform representation of package
|
|
122
|
+
:param dl.ApiClient client_api: ApiClient entity
|
|
123
|
+
:param dtlpy.entities.pipeline.Pipeline pipeline: Pipeline entity
|
|
124
|
+
:param bool is_fetched: is Entity fetched from Platform
|
|
125
|
+
:return: Pipeline entity
|
|
126
|
+
:rtype: dtlpy.entities.PipelineExecution
|
|
127
|
+
"""
|
|
128
|
+
project = None
|
|
129
|
+
if pipeline is not None:
|
|
130
|
+
project = pipeline._project
|
|
131
|
+
if pipeline.id != _json.get('pipelineId', None):
|
|
132
|
+
logger.warning('Pipeline has been fetched from a project that is not belong to it')
|
|
133
|
+
pipeline = None
|
|
134
|
+
|
|
135
|
+
nodes = [PipelineExecutionNode.from_json(_json=node) for node in _json.get('nodes', list())]
|
|
136
|
+
|
|
137
|
+
inst = cls(
|
|
138
|
+
id=_json.get('id', None),
|
|
139
|
+
created_at=_json.get('createdAt', None),
|
|
140
|
+
updated_at=_json.get('updatedAt', None),
|
|
141
|
+
pipeline_id=_json.get('pipelineId', None),
|
|
142
|
+
status=_json.get('status', None),
|
|
143
|
+
max_attempts=_json.get('maxAttempts', None),
|
|
144
|
+
creator=_json.get('creator', None),
|
|
145
|
+
nodes=nodes,
|
|
146
|
+
executions=_json.get('executions', dict()),
|
|
147
|
+
pipeline=pipeline,
|
|
148
|
+
project=project,
|
|
149
|
+
client_api=client_api,
|
|
150
|
+
)
|
|
151
|
+
|
|
152
|
+
inst.is_fetched = is_fetched
|
|
153
|
+
return inst
|
|
154
|
+
|
|
155
|
+
def to_json(self):
|
|
156
|
+
"""
|
|
157
|
+
Turn Package entity into a platform representation of Package
|
|
158
|
+
|
|
159
|
+
:return: platform json of package
|
|
160
|
+
:rtype: dict
|
|
161
|
+
"""
|
|
162
|
+
_json = attr.asdict(self,
|
|
163
|
+
filter=attr.filters.exclude(attr.fields(PipelineExecution)._repositories,
|
|
164
|
+
attr.fields(PipelineExecution)._client_api,
|
|
165
|
+
attr.fields(PipelineExecution)._pipeline,
|
|
166
|
+
attr.fields(PipelineExecution).nodes,
|
|
167
|
+
attr.fields(PipelineExecution).created_at,
|
|
168
|
+
attr.fields(PipelineExecution).updated_at,
|
|
169
|
+
attr.fields(PipelineExecution).pipeline_id,
|
|
170
|
+
attr.fields(PipelineExecution).executions,
|
|
171
|
+
attr.fields(PipelineExecution).max_attempts
|
|
172
|
+
))
|
|
173
|
+
executions = dict()
|
|
174
|
+
for node_id, executions_list in self.executions.items():
|
|
175
|
+
if len(executions_list) > 0 and isinstance(executions_list[0], entities.Execution):
|
|
176
|
+
executions[node_id] = [e.to_json() for e in executions_list]
|
|
177
|
+
else:
|
|
178
|
+
executions[node_id] = executions_list
|
|
179
|
+
|
|
180
|
+
_json['pipelineId'] = self.pipeline_id
|
|
181
|
+
_json['maxAttempts'] = self.max_attempts
|
|
182
|
+
_json['createdAt'] = self.created_at
|
|
183
|
+
_json['updatedAt'] = self.updated_at
|
|
184
|
+
_json['nodes'] = [node.to_json() for node in self.nodes]
|
|
185
|
+
_json['executions'] = executions
|
|
186
|
+
return _json
|
|
187
|
+
|
|
188
|
+
#########
|
|
189
|
+
# Props #
|
|
190
|
+
#########
|
|
191
|
+
@property
|
|
192
|
+
def pipeline(self):
|
|
193
|
+
if self._pipeline is None:
|
|
194
|
+
self._pipeline = self.pipelines.get(pipeline_id=self.pipeline_id, fetch=None)
|
|
195
|
+
assert isinstance(self._pipeline, entities.Pipeline)
|
|
196
|
+
return self._pipeline
|
|
197
|
+
|
|
198
|
+
@property
|
|
199
|
+
def project(self):
|
|
200
|
+
if self._project is None:
|
|
201
|
+
self._project = self.pipeline.project
|
|
202
|
+
assert isinstance(self._pipeline.project, entities.Project)
|
|
203
|
+
return self._pipeline.project
|
|
204
|
+
|
|
205
|
+
################
|
|
206
|
+
# repositories #
|
|
207
|
+
################
|
|
208
|
+
@_repositories.default
|
|
209
|
+
def set_repositories(self):
|
|
210
|
+
reps = namedtuple('repositories',
|
|
211
|
+
field_names=['projects', 'pipelines', 'pipeline_executions'])
|
|
212
|
+
|
|
213
|
+
r = reps(
|
|
214
|
+
projects=repositories.Projects(client_api=self._client_api),
|
|
215
|
+
pipelines=repositories.Pipelines(client_api=self._client_api, project=self._project),
|
|
216
|
+
pipeline_executions=repositories.PipelineExecutions(client_api=self._client_api,
|
|
217
|
+
project=self._project,
|
|
218
|
+
pipeline=self._pipeline)
|
|
219
|
+
)
|
|
220
|
+
return r
|
|
221
|
+
|
|
222
|
+
@property
|
|
223
|
+
def projects(self):
|
|
224
|
+
assert isinstance(self._repositories.projects, repositories.Projects)
|
|
225
|
+
return self._repositories.projects
|
|
226
|
+
|
|
227
|
+
@property
|
|
228
|
+
def pipelines(self):
|
|
229
|
+
assert isinstance(self._repositories.pipelines, repositories.Pipelines)
|
|
230
|
+
return self._repositories.pipelines
|
|
231
|
+
|
|
232
|
+
@property
|
|
233
|
+
def pipeline_executions(self):
|
|
234
|
+
assert isinstance(self._repositories.pipeline_executions, repositories.PipelineExecutions)
|
|
235
|
+
return self._repositories.pipeline_executions
|
|
236
|
+
|
|
237
|
+
def rerun(self,
|
|
238
|
+
method: str = None,
|
|
239
|
+
start_nodes_ids: list = None,
|
|
240
|
+
wait: bool = True
|
|
241
|
+
) -> bool:
|
|
242
|
+
"""
|
|
243
|
+
Get Pipeline Execution object
|
|
244
|
+
|
|
245
|
+
**prerequisites**: You must be an *owner* or *developer* to use this method.
|
|
246
|
+
|
|
247
|
+
:param str method: method to run
|
|
248
|
+
:param list start_nodes_ids: list of start nodes ids
|
|
249
|
+
:param bool wait: wait until rerun finish
|
|
250
|
+
:return: True if success
|
|
251
|
+
:rtype: bool
|
|
252
|
+
|
|
253
|
+
**Example**:
|
|
254
|
+
|
|
255
|
+
.. code-block:: python
|
|
256
|
+
|
|
257
|
+
pipeline_executions.rerun(method=dl.CycleRerunMethod.START_FROM_BEGINNING,)
|
|
258
|
+
"""
|
|
259
|
+
filters = entities.Filters(field='id', values=[self.id], operator=entities.FiltersOperations.IN,
|
|
260
|
+
resource=entities.FiltersResource.PIPELINE_EXECUTION)
|
|
261
|
+
return self.pipeline_executions.rerun(
|
|
262
|
+
method=method,
|
|
263
|
+
start_nodes_ids=start_nodes_ids,
|
|
264
|
+
filters=filters,
|
|
265
|
+
wait=wait
|
|
266
|
+
)
|
|
267
|
+
|
|
268
|
+
def wait(self):
|
|
269
|
+
"""
|
|
270
|
+
Wait for pipeline execution
|
|
271
|
+
|
|
272
|
+
:return: Pipeline execution object
|
|
273
|
+
"""
|
|
274
|
+
return self.pipeline_executions.wait(pipeline_execution_id=self.id)
|
|
275
|
+
|
|
276
|
+
def in_progress(self):
|
|
277
|
+
return self.status not in [PipelineExecutionStatus.FAILED,
|
|
278
|
+
PipelineExecutionStatus.SUCCESS,
|
|
279
|
+
PipelineExecutionStatus.TERMINATED]
|