dtlpy 1.115.44__py3-none-any.whl → 1.116.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (238) hide show
  1. dtlpy/__init__.py +491 -491
  2. dtlpy/__version__.py +1 -1
  3. dtlpy/assets/__init__.py +26 -26
  4. dtlpy/assets/code_server/config.yaml +2 -2
  5. dtlpy/assets/code_server/installation.sh +24 -24
  6. dtlpy/assets/code_server/launch.json +13 -13
  7. dtlpy/assets/code_server/settings.json +2 -2
  8. dtlpy/assets/main.py +53 -53
  9. dtlpy/assets/main_partial.py +18 -18
  10. dtlpy/assets/mock.json +11 -11
  11. dtlpy/assets/model_adapter.py +83 -83
  12. dtlpy/assets/package.json +61 -61
  13. dtlpy/assets/package_catalog.json +29 -29
  14. dtlpy/assets/package_gitignore +307 -307
  15. dtlpy/assets/service_runners/__init__.py +33 -33
  16. dtlpy/assets/service_runners/converter.py +96 -96
  17. dtlpy/assets/service_runners/multi_method.py +49 -49
  18. dtlpy/assets/service_runners/multi_method_annotation.py +54 -54
  19. dtlpy/assets/service_runners/multi_method_dataset.py +55 -55
  20. dtlpy/assets/service_runners/multi_method_item.py +52 -52
  21. dtlpy/assets/service_runners/multi_method_json.py +52 -52
  22. dtlpy/assets/service_runners/single_method.py +37 -37
  23. dtlpy/assets/service_runners/single_method_annotation.py +43 -43
  24. dtlpy/assets/service_runners/single_method_dataset.py +43 -43
  25. dtlpy/assets/service_runners/single_method_item.py +41 -41
  26. dtlpy/assets/service_runners/single_method_json.py +42 -42
  27. dtlpy/assets/service_runners/single_method_multi_input.py +45 -45
  28. dtlpy/assets/voc_annotation_template.xml +23 -23
  29. dtlpy/caches/base_cache.py +32 -32
  30. dtlpy/caches/cache.py +473 -473
  31. dtlpy/caches/dl_cache.py +201 -201
  32. dtlpy/caches/filesystem_cache.py +89 -89
  33. dtlpy/caches/redis_cache.py +84 -84
  34. dtlpy/dlp/__init__.py +20 -20
  35. dtlpy/dlp/cli_utilities.py +367 -367
  36. dtlpy/dlp/command_executor.py +764 -764
  37. dtlpy/dlp/dlp +1 -1
  38. dtlpy/dlp/dlp.bat +1 -1
  39. dtlpy/dlp/dlp.py +128 -128
  40. dtlpy/dlp/parser.py +651 -651
  41. dtlpy/entities/__init__.py +83 -83
  42. dtlpy/entities/analytic.py +347 -347
  43. dtlpy/entities/annotation.py +1879 -1879
  44. dtlpy/entities/annotation_collection.py +699 -699
  45. dtlpy/entities/annotation_definitions/__init__.py +20 -20
  46. dtlpy/entities/annotation_definitions/base_annotation_definition.py +100 -100
  47. dtlpy/entities/annotation_definitions/box.py +195 -195
  48. dtlpy/entities/annotation_definitions/classification.py +67 -67
  49. dtlpy/entities/annotation_definitions/comparison.py +72 -72
  50. dtlpy/entities/annotation_definitions/cube.py +204 -204
  51. dtlpy/entities/annotation_definitions/cube_3d.py +149 -149
  52. dtlpy/entities/annotation_definitions/description.py +32 -32
  53. dtlpy/entities/annotation_definitions/ellipse.py +124 -124
  54. dtlpy/entities/annotation_definitions/free_text.py +62 -62
  55. dtlpy/entities/annotation_definitions/gis.py +69 -69
  56. dtlpy/entities/annotation_definitions/note.py +139 -139
  57. dtlpy/entities/annotation_definitions/point.py +117 -117
  58. dtlpy/entities/annotation_definitions/polygon.py +182 -182
  59. dtlpy/entities/annotation_definitions/polyline.py +111 -111
  60. dtlpy/entities/annotation_definitions/pose.py +92 -92
  61. dtlpy/entities/annotation_definitions/ref_image.py +86 -86
  62. dtlpy/entities/annotation_definitions/segmentation.py +240 -240
  63. dtlpy/entities/annotation_definitions/subtitle.py +34 -34
  64. dtlpy/entities/annotation_definitions/text.py +85 -85
  65. dtlpy/entities/annotation_definitions/undefined_annotation.py +74 -74
  66. dtlpy/entities/app.py +220 -220
  67. dtlpy/entities/app_module.py +107 -107
  68. dtlpy/entities/artifact.py +174 -174
  69. dtlpy/entities/assignment.py +399 -399
  70. dtlpy/entities/base_entity.py +214 -214
  71. dtlpy/entities/bot.py +113 -113
  72. dtlpy/entities/codebase.py +292 -292
  73. dtlpy/entities/collection.py +38 -38
  74. dtlpy/entities/command.py +169 -169
  75. dtlpy/entities/compute.py +449 -449
  76. dtlpy/entities/dataset.py +1299 -1299
  77. dtlpy/entities/directory_tree.py +44 -44
  78. dtlpy/entities/dpk.py +470 -470
  79. dtlpy/entities/driver.py +235 -235
  80. dtlpy/entities/execution.py +397 -397
  81. dtlpy/entities/feature.py +124 -124
  82. dtlpy/entities/feature_set.py +145 -145
  83. dtlpy/entities/filters.py +798 -798
  84. dtlpy/entities/gis_item.py +107 -107
  85. dtlpy/entities/integration.py +184 -184
  86. dtlpy/entities/item.py +959 -959
  87. dtlpy/entities/label.py +123 -123
  88. dtlpy/entities/links.py +85 -85
  89. dtlpy/entities/message.py +175 -175
  90. dtlpy/entities/model.py +684 -684
  91. dtlpy/entities/node.py +1005 -1005
  92. dtlpy/entities/ontology.py +810 -803
  93. dtlpy/entities/organization.py +287 -287
  94. dtlpy/entities/package.py +657 -657
  95. dtlpy/entities/package_defaults.py +5 -5
  96. dtlpy/entities/package_function.py +185 -185
  97. dtlpy/entities/package_module.py +113 -113
  98. dtlpy/entities/package_slot.py +118 -118
  99. dtlpy/entities/paged_entities.py +299 -299
  100. dtlpy/entities/pipeline.py +624 -624
  101. dtlpy/entities/pipeline_execution.py +279 -279
  102. dtlpy/entities/project.py +394 -394
  103. dtlpy/entities/prompt_item.py +505 -505
  104. dtlpy/entities/recipe.py +301 -301
  105. dtlpy/entities/reflect_dict.py +102 -102
  106. dtlpy/entities/resource_execution.py +138 -138
  107. dtlpy/entities/service.py +963 -963
  108. dtlpy/entities/service_driver.py +117 -117
  109. dtlpy/entities/setting.py +294 -294
  110. dtlpy/entities/task.py +495 -495
  111. dtlpy/entities/time_series.py +143 -143
  112. dtlpy/entities/trigger.py +426 -426
  113. dtlpy/entities/user.py +118 -118
  114. dtlpy/entities/webhook.py +124 -124
  115. dtlpy/examples/__init__.py +19 -19
  116. dtlpy/examples/add_labels.py +135 -135
  117. dtlpy/examples/add_metadata_to_item.py +21 -21
  118. dtlpy/examples/annotate_items_using_model.py +65 -65
  119. dtlpy/examples/annotate_video_using_model_and_tracker.py +75 -75
  120. dtlpy/examples/annotations_convert_to_voc.py +9 -9
  121. dtlpy/examples/annotations_convert_to_yolo.py +9 -9
  122. dtlpy/examples/convert_annotation_types.py +51 -51
  123. dtlpy/examples/converter.py +143 -143
  124. dtlpy/examples/copy_annotations.py +22 -22
  125. dtlpy/examples/copy_folder.py +31 -31
  126. dtlpy/examples/create_annotations.py +51 -51
  127. dtlpy/examples/create_video_annotations.py +83 -83
  128. dtlpy/examples/delete_annotations.py +26 -26
  129. dtlpy/examples/filters.py +113 -113
  130. dtlpy/examples/move_item.py +23 -23
  131. dtlpy/examples/play_video_annotation.py +13 -13
  132. dtlpy/examples/show_item_and_mask.py +53 -53
  133. dtlpy/examples/triggers.py +49 -49
  134. dtlpy/examples/upload_batch_of_items.py +20 -20
  135. dtlpy/examples/upload_items_and_custom_format_annotations.py +55 -55
  136. dtlpy/examples/upload_items_with_modalities.py +43 -43
  137. dtlpy/examples/upload_segmentation_annotations_from_mask_image.py +44 -44
  138. dtlpy/examples/upload_yolo_format_annotations.py +70 -70
  139. dtlpy/exceptions.py +125 -125
  140. dtlpy/miscellaneous/__init__.py +20 -20
  141. dtlpy/miscellaneous/dict_differ.py +95 -95
  142. dtlpy/miscellaneous/git_utils.py +217 -217
  143. dtlpy/miscellaneous/json_utils.py +14 -14
  144. dtlpy/miscellaneous/list_print.py +105 -105
  145. dtlpy/miscellaneous/zipping.py +130 -130
  146. dtlpy/ml/__init__.py +20 -20
  147. dtlpy/ml/base_feature_extractor_adapter.py +27 -27
  148. dtlpy/ml/base_model_adapter.py +1257 -1230
  149. dtlpy/ml/metrics.py +461 -461
  150. dtlpy/ml/predictions_utils.py +274 -274
  151. dtlpy/ml/summary_writer.py +57 -57
  152. dtlpy/ml/train_utils.py +60 -60
  153. dtlpy/new_instance.py +252 -252
  154. dtlpy/repositories/__init__.py +56 -56
  155. dtlpy/repositories/analytics.py +85 -85
  156. dtlpy/repositories/annotations.py +916 -916
  157. dtlpy/repositories/apps.py +383 -383
  158. dtlpy/repositories/artifacts.py +452 -452
  159. dtlpy/repositories/assignments.py +599 -599
  160. dtlpy/repositories/bots.py +213 -213
  161. dtlpy/repositories/codebases.py +559 -559
  162. dtlpy/repositories/collections.py +332 -332
  163. dtlpy/repositories/commands.py +152 -152
  164. dtlpy/repositories/compositions.py +61 -61
  165. dtlpy/repositories/computes.py +439 -439
  166. dtlpy/repositories/datasets.py +1504 -1504
  167. dtlpy/repositories/downloader.py +976 -923
  168. dtlpy/repositories/dpks.py +433 -433
  169. dtlpy/repositories/drivers.py +482 -482
  170. dtlpy/repositories/executions.py +815 -815
  171. dtlpy/repositories/feature_sets.py +226 -226
  172. dtlpy/repositories/features.py +255 -255
  173. dtlpy/repositories/integrations.py +484 -484
  174. dtlpy/repositories/items.py +912 -912
  175. dtlpy/repositories/messages.py +94 -94
  176. dtlpy/repositories/models.py +1000 -1000
  177. dtlpy/repositories/nodes.py +80 -80
  178. dtlpy/repositories/ontologies.py +511 -511
  179. dtlpy/repositories/organizations.py +525 -525
  180. dtlpy/repositories/packages.py +1941 -1941
  181. dtlpy/repositories/pipeline_executions.py +451 -451
  182. dtlpy/repositories/pipelines.py +640 -640
  183. dtlpy/repositories/projects.py +539 -539
  184. dtlpy/repositories/recipes.py +419 -399
  185. dtlpy/repositories/resource_executions.py +137 -137
  186. dtlpy/repositories/schema.py +120 -120
  187. dtlpy/repositories/service_drivers.py +213 -213
  188. dtlpy/repositories/services.py +1704 -1704
  189. dtlpy/repositories/settings.py +339 -339
  190. dtlpy/repositories/tasks.py +1477 -1477
  191. dtlpy/repositories/times_series.py +278 -278
  192. dtlpy/repositories/triggers.py +536 -536
  193. dtlpy/repositories/upload_element.py +257 -257
  194. dtlpy/repositories/uploader.py +661 -661
  195. dtlpy/repositories/webhooks.py +249 -249
  196. dtlpy/services/__init__.py +22 -22
  197. dtlpy/services/aihttp_retry.py +131 -131
  198. dtlpy/services/api_client.py +1785 -1785
  199. dtlpy/services/api_reference.py +40 -40
  200. dtlpy/services/async_utils.py +133 -133
  201. dtlpy/services/calls_counter.py +44 -44
  202. dtlpy/services/check_sdk.py +68 -68
  203. dtlpy/services/cookie.py +115 -115
  204. dtlpy/services/create_logger.py +156 -156
  205. dtlpy/services/events.py +84 -84
  206. dtlpy/services/logins.py +235 -235
  207. dtlpy/services/reporter.py +256 -256
  208. dtlpy/services/service_defaults.py +91 -91
  209. dtlpy/utilities/__init__.py +20 -20
  210. dtlpy/utilities/annotations/__init__.py +16 -16
  211. dtlpy/utilities/annotations/annotation_converters.py +269 -269
  212. dtlpy/utilities/base_package_runner.py +285 -264
  213. dtlpy/utilities/converter.py +1650 -1650
  214. dtlpy/utilities/dataset_generators/__init__.py +1 -1
  215. dtlpy/utilities/dataset_generators/dataset_generator.py +670 -670
  216. dtlpy/utilities/dataset_generators/dataset_generator_tensorflow.py +23 -23
  217. dtlpy/utilities/dataset_generators/dataset_generator_torch.py +21 -21
  218. dtlpy/utilities/local_development/__init__.py +1 -1
  219. dtlpy/utilities/local_development/local_session.py +179 -179
  220. dtlpy/utilities/reports/__init__.py +2 -2
  221. dtlpy/utilities/reports/figures.py +343 -343
  222. dtlpy/utilities/reports/report.py +71 -71
  223. dtlpy/utilities/videos/__init__.py +17 -17
  224. dtlpy/utilities/videos/video_player.py +598 -598
  225. dtlpy/utilities/videos/videos.py +470 -470
  226. {dtlpy-1.115.44.data → dtlpy-1.116.6.data}/scripts/dlp +1 -1
  227. dtlpy-1.116.6.data/scripts/dlp.bat +2 -0
  228. {dtlpy-1.115.44.data → dtlpy-1.116.6.data}/scripts/dlp.py +128 -128
  229. {dtlpy-1.115.44.dist-info → dtlpy-1.116.6.dist-info}/METADATA +186 -186
  230. dtlpy-1.116.6.dist-info/RECORD +239 -0
  231. {dtlpy-1.115.44.dist-info → dtlpy-1.116.6.dist-info}/WHEEL +1 -1
  232. {dtlpy-1.115.44.dist-info → dtlpy-1.116.6.dist-info}/licenses/LICENSE +200 -200
  233. tests/features/environment.py +551 -551
  234. dtlpy/assets/__pycache__/__init__.cpython-310.pyc +0 -0
  235. dtlpy-1.115.44.data/scripts/dlp.bat +0 -2
  236. dtlpy-1.115.44.dist-info/RECORD +0 -240
  237. {dtlpy-1.115.44.dist-info → dtlpy-1.116.6.dist-info}/entry_points.txt +0 -0
  238. {dtlpy-1.115.44.dist-info → dtlpy-1.116.6.dist-info}/top_level.txt +0 -0
@@ -1,624 +1,624 @@
1
- from collections import namedtuple
2
- import logging
3
- import traceback
4
- from enum import Enum
5
- from typing import List
6
- import attr
7
- from .node import PipelineNode, PipelineConnection, TaskNode, CodeNode, FunctionNode, DatasetNode
8
- from .. import repositories, entities
9
- from ..services.api_client import ApiClient
10
- from .package_function import PackageInputType
11
- import copy
12
-
13
- logger = logging.getLogger(name='dtlpy')
14
-
15
-
16
- class PipelineResumeOption(str, Enum):
17
- TERMINATE_EXISTING_CYCLES = 'terminateExistingCycles',
18
- RESUME_EXISTING_CYCLES = 'resumeExistingCycles'
19
-
20
-
21
- class CompositionStatus(str, Enum):
22
- CREATED = "Created",
23
- INITIALIZING = "Initializing",
24
- INSTALLED = "Installed",
25
- ACTIVATED = "Activated",
26
- DEACTIVATED = "Deactivated",
27
- UNINSTALLED = "Uninstalled",
28
- TERMINATING = "Terminating",
29
- TERMINATED = "Terminated",
30
- UPDATING = "Updating",
31
- FAILURE = "Failure"
32
-
33
-
34
- class PipelineSettings:
35
-
36
- def __init__(
37
- self,
38
- default_resume_option: PipelineResumeOption = None,
39
- keep_triggers_active: bool = None,
40
- active_trigger_ask_again: bool = None,
41
- last_update: dict = None
42
- ):
43
- self.default_resume_option = default_resume_option
44
- self.keep_triggers_active = keep_triggers_active
45
- self.active_trigger_ask_again = active_trigger_ask_again
46
- self.last_update = last_update
47
-
48
- @classmethod
49
- def from_json(cls, _json: dict = None):
50
- if _json is None:
51
- _json = dict()
52
- return cls(
53
- default_resume_option=_json.get('defaultResumeOption', None),
54
- keep_triggers_active=_json.get('keepTriggersActive', None),
55
- active_trigger_ask_again=_json.get('activeTriggerAskAgain', None),
56
- last_update=_json.get('lastUpdate', None)
57
- )
58
-
59
- def to_json(self):
60
- _json = dict()
61
-
62
- if self.default_resume_option is not None:
63
- _json['defaultResumeOption'] = self.default_resume_option
64
-
65
- if self.default_resume_option is not None:
66
- _json['keepTriggersActive'] = self.default_resume_option
67
-
68
- if self.default_resume_option is not None:
69
- _json['activeTriggerAskAgain'] = self.default_resume_option
70
-
71
- if self.default_resume_option is not None:
72
- _json['lastUpdate'] = self.default_resume_option
73
-
74
- return _json
75
-
76
-
77
- class Variable(entities.DlEntity):
78
- """
79
- Pipeline Variables
80
- """
81
- id: str = entities.DlProperty(location=['id'], _type=str)
82
- created_at: str = entities.DlProperty(location=['createdAt'], _type=str)
83
- updated_at: str = entities.DlProperty(location=['updatedAt'], _type=str)
84
- reference: str = entities.DlProperty(location=['reference'], _type=str)
85
- creator: str = entities.DlProperty(location=['creator'], _type=str)
86
- variable_type: PackageInputType = entities.DlProperty(location=['type'], _type=PackageInputType)
87
- name: str = entities.DlProperty(location=['name'], _type=str)
88
- value = entities.DlProperty(location=['value'])
89
-
90
- @classmethod
91
- def from_json(cls, _json):
92
- """
93
- Turn platform representation of variable into a pipeline variable entity
94
-
95
- :param dict _json: platform representation of pipeline variable
96
- :return: pipeline variable entity
97
- :rtype: dtlpy.entities.pipeline.PipelineVariables
98
- """
99
-
100
- inst = cls(_dict=_json)
101
- return inst
102
-
103
- def to_json(self):
104
- """
105
- :return: variable of pipeline
106
- :rtype: dict
107
- """
108
- _json = self._dict.copy()
109
- return _json
110
-
111
-
112
- class PipelineAverages:
113
- def __init__(
114
- self,
115
- avg_time_per_execution: float,
116
- avg_execution_per_day: float
117
- ):
118
- self.avg_time_per_execution = avg_time_per_execution
119
- self.avg_execution_per_day = avg_execution_per_day
120
-
121
- @classmethod
122
- def from_json(cls, _json: dict = None):
123
- if _json is None:
124
- _json = dict()
125
- return cls(
126
- avg_time_per_execution=_json.get('avgTimePerExecution', 'NA'),
127
- avg_execution_per_day=_json.get('avgExecutionsPerDay', 'NA')
128
- )
129
-
130
-
131
- class NodeAverages:
132
- def __init__(
133
- self,
134
- node_id: str,
135
- averages: PipelineAverages
136
- ):
137
- self.node_id = node_id
138
- self.averages = averages
139
-
140
- @classmethod
141
- def from_json(cls, _json: dict):
142
- return cls(
143
- node_id=_json.get('nodeId', None),
144
- averages=PipelineAverages.from_json(_json.get('executionStatistics'))
145
- )
146
-
147
-
148
- class PipelineCounter:
149
- def __init__(
150
- self,
151
- status: str,
152
- count: int
153
- ):
154
- self.status = status
155
- self.count = count
156
-
157
-
158
- class NodeCounters:
159
- def __init__(
160
- self,
161
- node_id: str,
162
- counters: List[PipelineCounter]
163
- ):
164
- self.node_id = node_id
165
- self.counters = counters
166
-
167
- @classmethod
168
- def from_json(cls, _json: dict):
169
- return cls(
170
- node_id=_json.get('nodeId', None),
171
- counters=[PipelineCounter(**c) for c in _json.get('statusCount', list())],
172
- )
173
-
174
-
175
- class PipelineStats:
176
- def __init__(
177
- self,
178
- pipeline_counters: List[PipelineCounter],
179
- node_counters: List[NodeCounters],
180
- pipeline_averages: PipelineAverages,
181
- node_averages: List[NodeAverages]
182
- ):
183
- self.pipeline_counters = pipeline_counters
184
- self.node_counters = node_counters
185
- self.pipeline_averages = pipeline_averages
186
- self.node_averages = node_averages
187
-
188
- @classmethod
189
- def from_json(cls, _json: dict):
190
- return cls(
191
- pipeline_counters=[PipelineCounter(**c) for c in _json.get('pipelineExecutionCounters', list())],
192
- node_counters=[NodeCounters.from_json(_json=c) for c in _json.get('nodeExecutionsCounters', list())],
193
- pipeline_averages=PipelineAverages.from_json(_json.get('pipelineExecutionStatistics', None)),
194
- node_averages=[NodeAverages.from_json(_json=c) for c in _json.get('nodeExecutionStatistics', list())]
195
- )
196
-
197
-
198
- @attr.s
199
- class Pipeline(entities.BaseEntity):
200
- """
201
- Pipeline object
202
- """
203
- # platform
204
- id = attr.ib()
205
- name = attr.ib()
206
- creator = attr.ib()
207
- org_id = attr.ib()
208
- connections = attr.ib()
209
- settings = attr.ib(type=PipelineSettings)
210
- variables = attr.ib(type=List[Variable])
211
-
212
- status = attr.ib(type=CompositionStatus)
213
-
214
- # name change
215
- created_at = attr.ib()
216
- updated_at = attr.ib(repr=False)
217
- start_nodes = attr.ib()
218
- project_id = attr.ib()
219
- composition_id = attr.ib()
220
- url = attr.ib()
221
- preview = attr.ib()
222
- description = attr.ib()
223
- revisions = attr.ib()
224
-
225
- # sdk
226
- _project = attr.ib(repr=False)
227
- _client_api = attr.ib(type=ApiClient, repr=False)
228
- _original_settings = attr.ib(repr=False, type=PipelineSettings)
229
- _original_variables = attr.ib(repr=False, type=List[Variable])
230
- _repositories = attr.ib(repr=False)
231
-
232
- updated_by = attr.ib(default=None)
233
-
234
- @staticmethod
235
- def _protected_from_json(_json, client_api, project=None, is_fetched=True):
236
- """
237
- Same as from_json but with try-except to catch if error
238
- :param _json: platform json
239
- :param client_api: ApiClient entity
240
- :param dtlpy.entities.project.Project project: entity
241
- :param is_fetched: is Entity fetched from Platform
242
- :return:
243
- """
244
- try:
245
- pipeline = Pipeline.from_json(
246
- _json=_json,
247
- client_api=client_api,
248
- project=project,
249
- is_fetched=is_fetched
250
- )
251
- status = True
252
- except Exception:
253
- pipeline = traceback.format_exc()
254
- status = False
255
- return status, pipeline
256
-
257
- @classmethod
258
- def from_json(cls, _json, client_api, project=None, is_fetched=True):
259
- """
260
- Turn platform representation of pipeline into a pipeline entity
261
-
262
- :param dict _json: platform representation of package
263
- :param dl.ApiClient client_api: ApiClient entity
264
- :param dtlpy.entities.project.Project project: entity
265
- :param bool is_fetched: is Entity fetched from Platform
266
- :return: Pipeline entity
267
- :rtype: dtlpy.entities.pipeline.Pipeline
268
- """
269
- if project is not None:
270
- if project.id != _json.get('projectId', None):
271
- logger.warning('Pipeline has been fetched from a project that is not belong to it')
272
- project = None
273
-
274
- connections = [PipelineConnection.from_json(_json=con) for con in _json.get('connections', list())]
275
- json_variables = _json.get('variables', None) or list()
276
- variables = list()
277
- if json_variables:
278
- copy_json_variables = copy.deepcopy(json_variables)
279
- variables = [Variable.from_json(_json=v) for v in copy_json_variables]
280
-
281
- settings = PipelineSettings.from_json(_json=_json.get('settings', dict()))
282
- inst = cls(
283
- created_at=_json.get('createdAt', None),
284
- updated_at=_json.get('updatedAt', None),
285
- project_id=_json.get('projectId', None),
286
- org_id=_json.get('orgId', None),
287
- composition_id=_json.get('compositionId', None),
288
- creator=_json.get('creator', None),
289
- client_api=client_api,
290
- name=_json.get('name', None),
291
- project=project,
292
- id=_json.get('id', None),
293
- connections=connections,
294
- start_nodes=_json.get('startNodes', None),
295
- url=_json.get('url', None),
296
- preview=_json.get('preview', None),
297
- description=_json.get('description', None),
298
- revisions=_json.get('revisions', None),
299
- settings=settings,
300
- variables=variables,
301
- status=_json.get('status', None),
302
- original_settings=settings,
303
- original_variables=json_variables,
304
- updated_by=_json.get('updatedBy', None),
305
- )
306
- for node in _json.get('nodes', list()):
307
- inst.nodes.add(node=cls.pipeline_node(node))
308
- inst.is_fetched = is_fetched
309
- return inst
310
-
311
- @classmethod
312
- def pipeline_node(cls, _json):
313
- node_type = _json.get('type')
314
- if node_type == 'task':
315
- return TaskNode.from_json(_json)
316
- elif node_type == 'code':
317
- return CodeNode.from_json(_json)
318
- elif node_type == 'function':
319
- return FunctionNode.from_json(_json)
320
- elif node_type == 'storage':
321
- return DatasetNode.from_json(_json)
322
- else:
323
- return PipelineNode.from_json(_json)
324
-
325
- def settings_changed(self) -> bool:
326
- return self.settings.to_json() != self._original_settings.to_json()
327
-
328
- def variables_changed(self) -> bool:
329
- new_vars = [var.to_json() for var in self.variables]
330
- old_vars = self._original_variables or list()
331
- return new_vars != old_vars
332
-
333
- def to_json(self):
334
- """
335
- Turn Package entity into a platform representation of Package
336
-
337
- :return: platform json of package
338
- :rtype: dict
339
- """
340
- _json = attr.asdict(self,
341
- filter=attr.filters.exclude(attr.fields(Pipeline)._project,
342
- attr.fields(Pipeline)._repositories,
343
- attr.fields(Pipeline)._client_api,
344
- attr.fields(Pipeline).project_id,
345
- attr.fields(Pipeline).org_id,
346
- attr.fields(Pipeline).connections,
347
- attr.fields(Pipeline).created_at,
348
- attr.fields(Pipeline).updated_at,
349
- attr.fields(Pipeline).start_nodes,
350
- attr.fields(Pipeline).project_id,
351
- attr.fields(Pipeline).composition_id,
352
- attr.fields(Pipeline).url,
353
- attr.fields(Pipeline).preview,
354
- attr.fields(Pipeline).description,
355
- attr.fields(Pipeline).revisions,
356
- attr.fields(Pipeline).settings,
357
- attr.fields(Pipeline).variables,
358
- attr.fields(Pipeline)._original_settings,
359
- attr.fields(Pipeline)._original_variables,
360
- attr.fields(Pipeline).updated_by,
361
- ))
362
-
363
- _json['projectId'] = self.project_id
364
- _json['createdAt'] = self.created_at
365
- _json['updatedAt'] = self.updated_at
366
- _json['compositionId'] = self.composition_id
367
- _json['startNodes'] = self.start_nodes
368
- _json['orgId'] = self.org_id
369
- _json['nodes'] = [node.to_json() for node in self.nodes]
370
- _json['connections'] = [con.to_json() for con in self.connections]
371
- if self.variables:
372
- _json['variables'] = [v.to_json() for v in self.variables]
373
- _json['url'] = self.url
374
-
375
- settings_json = self.settings.to_json()
376
- if settings_json:
377
- _json['settings'] = settings_json
378
-
379
- if self.preview is not None:
380
- _json['preview'] = self.preview
381
- if self.description is not None:
382
- _json['description'] = self.description
383
- if self.revisions is not None:
384
- _json['revisions'] = self.revisions
385
- if self.updated_by is not None:
386
- _json['updatedBy'] = self.updated_by
387
-
388
- return _json
389
-
390
- #########
391
- # Props #
392
- #########
393
-
394
- @property
395
- def platform_url(self):
396
- return self._client_api._get_resource_url("projects/{}/pipelines/{}".format(self.project_id, self.id))
397
-
398
- @property
399
- def project(self):
400
- if self._project is None:
401
- self._project = self.projects.get(project_id=self.project_id, fetch=None)
402
- assert isinstance(self._project, entities.Project)
403
- return self._project
404
-
405
- ################
406
- # repositories #
407
- ################
408
- @_repositories.default
409
- def set_repositories(self):
410
- reps = namedtuple('repositories',
411
- field_names=['projects', 'pipelines', 'pipeline_executions', 'triggers', 'nodes'])
412
-
413
- r = reps(
414
- projects=repositories.Projects(client_api=self._client_api),
415
- pipelines=repositories.Pipelines(client_api=self._client_api, project=self._project),
416
- pipeline_executions=repositories.PipelineExecutions(
417
- client_api=self._client_api, project=self._project, pipeline=self
418
- ),
419
- triggers=repositories.Triggers(client_api=self._client_api, pipeline=self),
420
- nodes=repositories.Nodes(client_api=self._client_api, pipeline=self)
421
- )
422
- return r
423
-
424
- @property
425
- def projects(self):
426
- assert isinstance(self._repositories.projects, repositories.Projects)
427
- return self._repositories.projects
428
-
429
- @property
430
- def triggers(self):
431
- assert isinstance(self._repositories.triggers, repositories.Triggers)
432
- return self._repositories.triggers
433
-
434
- @property
435
- def nodes(self):
436
- assert isinstance(self._repositories.nodes, repositories.Nodes)
437
- return self._repositories.nodes
438
-
439
- @property
440
- def pipelines(self):
441
- assert isinstance(self._repositories.pipelines, repositories.Pipelines)
442
- return self._repositories.pipelines
443
-
444
- @property
445
- def pipeline_executions(self):
446
- assert isinstance(self._repositories.pipeline_executions, repositories.PipelineExecutions)
447
- return self._repositories.pipeline_executions
448
-
449
- ###########
450
- # methods #
451
- ###########
452
- def update(self):
453
- """
454
- Update pipeline changes to platform
455
-
456
- :return: pipeline entity
457
- """
458
- return self.pipelines.update(pipeline=self)
459
-
460
- def delete(self):
461
- """
462
- Delete pipeline object
463
-
464
- :return: True
465
- """
466
- return self.pipelines.delete(pipeline=self)
467
-
468
- def open_in_web(self):
469
- """
470
- Open the pipeline in web platform
471
-
472
- :return:
473
- """
474
- self._client_api._open_in_web(url=self.platform_url)
475
-
476
- def install(self, resume_option: PipelineResumeOption = None):
477
- """
478
- install pipeline
479
-
480
- :return: Composition entity
481
- """
482
- return self.pipelines.install(pipeline=self, resume_option=resume_option)
483
-
484
- def pause(self, keep_triggers_active: bool = None):
485
- """
486
- pause pipeline
487
-
488
- :return: Composition entity
489
- """
490
- return self.pipelines.pause(pipeline=self, keep_triggers_active=keep_triggers_active)
491
-
492
- def execute(self, execution_input=None, node_id: str = None):
493
- """
494
- execute a pipeline and return to execute
495
-
496
- :param execution_input: list of the dl.FunctionIO or dict of pipeline input - example {'item': 'item_id'}
497
- :param str node_id: node id to execute
498
- :return: entities.PipelineExecution object
499
- """
500
- execution = self.pipeline_executions.create(
501
- pipeline_id=self.id,
502
- execution_input=execution_input,
503
- node_id=node_id
504
- )
505
- return execution
506
-
507
- def test(self, execution_input=None):
508
- """
509
- Execute a pipeline in test mode and return the pipeline execution as an object.
510
-
511
- :param execution_input: list of the dl.FunctionIO or dict of pipeline input - example {'item': 'item_id'}
512
- :return: entities.PipelineExecution object
513
- """
514
- execution = self.pipelines.test(
515
- pipeline=self,
516
- pipeline_id=self.id,
517
- execution_input=execution_input,
518
- )
519
- return execution
520
-
521
- def execute_batch(
522
- self,
523
- filters,
524
- execution_inputs=None,
525
- wait=True,
526
- node_id: str = None
527
- ):
528
- """
529
- execute a pipeline and return to execute
530
-
531
- :param execution_inputs: list of the dl.FunctionIO or dict of pipeline input - example {'item': 'item_id'}, that represent the extra inputs of the function
532
- :param filters: Filters entity for a filtering before execute
533
- :param bool wait: wait until create task finish
534
- :param str node_id: node id to execute
535
- :return: entities.PipelineExecution object
536
-
537
- **Example**:
538
-
539
- .. code-block:: python
540
-
541
- command = pipeline.execute_batch(
542
- execution_inputs=dl.FunctionIO(type=dl.PackageInputType.STRING, value='test', name='string'),
543
- filters=dl.Filters(field='dir', values='/test', context={'datasets': [dataset.id]))
544
- """
545
- command = self.pipeline_executions.create_batch(
546
- pipeline_id=self.id,
547
- execution_inputs=execution_inputs,
548
- filters=filters,
549
- wait=wait,
550
- node_id=node_id
551
- )
552
- return command
553
-
554
- def reset(self, stop_if_running: bool = False):
555
- """
556
- Resets pipeline counters
557
-
558
- :param bool stop_if_running: If the pipeline is installed it will stop the pipeline and reset the counters.
559
- :return: bool
560
- """
561
- return self.pipelines.reset(pipeline_id=self.id, stop_if_running=stop_if_running)
562
-
563
- def stats(self):
564
- """
565
- Get pipeline counters
566
-
567
- :return: PipelineStats
568
- :rtype: dtlpy.entities.pipeline.PipelineStats
569
- """
570
- return self.pipelines.stats(pipeline_id=self.id)
571
-
572
- def set_start_node(self, node: PipelineNode):
573
- """
574
- Set the start node of the pipeline
575
-
576
- :param PipelineNode node: node to be the start node
577
- """
578
- connections = [connection for connection in self.connections if connection.target.node_id == node.node_id]
579
- if connections:
580
- raise Exception(
581
- 'Connections cannot be added to Pipeline start-node. To add a connection, please reposition the start sign')
582
- if self.start_nodes:
583
- for pipe_node in self.start_nodes:
584
- if pipe_node['type'] == 'root':
585
- pipe_node['nodeId'] = node.node_id
586
- else:
587
- self.start_nodes = [{"nodeId": node.node_id,
588
- "type": "root", }]
589
-
590
- def update_variables_values(self, **kwargs):
591
- """
592
- Update pipeline variables values for the given keyword arguments.
593
-
594
- **Example**:
595
-
596
- .. code-block:: python
597
- pipeline.update_variables_values(
598
- dataset=dataset.id,
599
- model=model.id,
600
- threshold=0.9
601
- )
602
- pipeline.update()
603
- """
604
- keys = kwargs.keys()
605
- for variable in self.variables:
606
- if variable.name in keys:
607
- variable.value = kwargs[variable.name]
608
-
609
- def validate(self):
610
- """
611
- Validate the pipeline configuration.
612
-
613
- **prerequisites**: You must be an *owner* or *developer* to use this method.
614
-
615
- :return: Validation result
616
- :rtype: dict
617
-
618
- **Example**:
619
-
620
- .. code-block:: python
621
-
622
- validation_result = pipeline.validate()
623
- """
624
- return self.pipelines.validate(pipeline_json=self.to_json())
1
+ from collections import namedtuple
2
+ import logging
3
+ import traceback
4
+ from enum import Enum
5
+ from typing import List
6
+ import attr
7
+ from .node import PipelineNode, PipelineConnection, TaskNode, CodeNode, FunctionNode, DatasetNode
8
+ from .. import repositories, entities
9
+ from ..services.api_client import ApiClient
10
+ from .package_function import PackageInputType
11
+ import copy
12
+
13
+ logger = logging.getLogger(name='dtlpy')
14
+
15
+
16
+ class PipelineResumeOption(str, Enum):
17
+ TERMINATE_EXISTING_CYCLES = 'terminateExistingCycles',
18
+ RESUME_EXISTING_CYCLES = 'resumeExistingCycles'
19
+
20
+
21
+ class CompositionStatus(str, Enum):
22
+ CREATED = "Created",
23
+ INITIALIZING = "Initializing",
24
+ INSTALLED = "Installed",
25
+ ACTIVATED = "Activated",
26
+ DEACTIVATED = "Deactivated",
27
+ UNINSTALLED = "Uninstalled",
28
+ TERMINATING = "Terminating",
29
+ TERMINATED = "Terminated",
30
+ UPDATING = "Updating",
31
+ FAILURE = "Failure"
32
+
33
+
34
+ class PipelineSettings:
35
+
36
+ def __init__(
37
+ self,
38
+ default_resume_option: PipelineResumeOption = None,
39
+ keep_triggers_active: bool = None,
40
+ active_trigger_ask_again: bool = None,
41
+ last_update: dict = None
42
+ ):
43
+ self.default_resume_option = default_resume_option
44
+ self.keep_triggers_active = keep_triggers_active
45
+ self.active_trigger_ask_again = active_trigger_ask_again
46
+ self.last_update = last_update
47
+
48
+ @classmethod
49
+ def from_json(cls, _json: dict = None):
50
+ if _json is None:
51
+ _json = dict()
52
+ return cls(
53
+ default_resume_option=_json.get('defaultResumeOption', None),
54
+ keep_triggers_active=_json.get('keepTriggersActive', None),
55
+ active_trigger_ask_again=_json.get('activeTriggerAskAgain', None),
56
+ last_update=_json.get('lastUpdate', None)
57
+ )
58
+
59
+ def to_json(self):
60
+ _json = dict()
61
+
62
+ if self.default_resume_option is not None:
63
+ _json['defaultResumeOption'] = self.default_resume_option
64
+
65
+ if self.default_resume_option is not None:
66
+ _json['keepTriggersActive'] = self.default_resume_option
67
+
68
+ if self.default_resume_option is not None:
69
+ _json['activeTriggerAskAgain'] = self.default_resume_option
70
+
71
+ if self.default_resume_option is not None:
72
+ _json['lastUpdate'] = self.default_resume_option
73
+
74
+ return _json
75
+
76
+
77
+ class Variable(entities.DlEntity):
78
+ """
79
+ Pipeline Variables
80
+ """
81
+ id: str = entities.DlProperty(location=['id'], _type=str)
82
+ created_at: str = entities.DlProperty(location=['createdAt'], _type=str)
83
+ updated_at: str = entities.DlProperty(location=['updatedAt'], _type=str)
84
+ reference: str = entities.DlProperty(location=['reference'], _type=str)
85
+ creator: str = entities.DlProperty(location=['creator'], _type=str)
86
+ variable_type: PackageInputType = entities.DlProperty(location=['type'], _type=PackageInputType)
87
+ name: str = entities.DlProperty(location=['name'], _type=str)
88
+ value = entities.DlProperty(location=['value'])
89
+
90
+ @classmethod
91
+ def from_json(cls, _json):
92
+ """
93
+ Turn platform representation of variable into a pipeline variable entity
94
+
95
+ :param dict _json: platform representation of pipeline variable
96
+ :return: pipeline variable entity
97
+ :rtype: dtlpy.entities.pipeline.PipelineVariables
98
+ """
99
+
100
+ inst = cls(_dict=_json)
101
+ return inst
102
+
103
+ def to_json(self):
104
+ """
105
+ :return: variable of pipeline
106
+ :rtype: dict
107
+ """
108
+ _json = self._dict.copy()
109
+ return _json
110
+
111
+
112
+ class PipelineAverages:
113
+ def __init__(
114
+ self,
115
+ avg_time_per_execution: float,
116
+ avg_execution_per_day: float
117
+ ):
118
+ self.avg_time_per_execution = avg_time_per_execution
119
+ self.avg_execution_per_day = avg_execution_per_day
120
+
121
+ @classmethod
122
+ def from_json(cls, _json: dict = None):
123
+ if _json is None:
124
+ _json = dict()
125
+ return cls(
126
+ avg_time_per_execution=_json.get('avgTimePerExecution', 'NA'),
127
+ avg_execution_per_day=_json.get('avgExecutionsPerDay', 'NA')
128
+ )
129
+
130
+
131
+ class NodeAverages:
132
+ def __init__(
133
+ self,
134
+ node_id: str,
135
+ averages: PipelineAverages
136
+ ):
137
+ self.node_id = node_id
138
+ self.averages = averages
139
+
140
+ @classmethod
141
+ def from_json(cls, _json: dict):
142
+ return cls(
143
+ node_id=_json.get('nodeId', None),
144
+ averages=PipelineAverages.from_json(_json.get('executionStatistics'))
145
+ )
146
+
147
+
148
+ class PipelineCounter:
149
+ def __init__(
150
+ self,
151
+ status: str,
152
+ count: int
153
+ ):
154
+ self.status = status
155
+ self.count = count
156
+
157
+
158
+ class NodeCounters:
159
+ def __init__(
160
+ self,
161
+ node_id: str,
162
+ counters: List[PipelineCounter]
163
+ ):
164
+ self.node_id = node_id
165
+ self.counters = counters
166
+
167
+ @classmethod
168
+ def from_json(cls, _json: dict):
169
+ return cls(
170
+ node_id=_json.get('nodeId', None),
171
+ counters=[PipelineCounter(**c) for c in _json.get('statusCount', list())],
172
+ )
173
+
174
+
175
+ class PipelineStats:
176
+ def __init__(
177
+ self,
178
+ pipeline_counters: List[PipelineCounter],
179
+ node_counters: List[NodeCounters],
180
+ pipeline_averages: PipelineAverages,
181
+ node_averages: List[NodeAverages]
182
+ ):
183
+ self.pipeline_counters = pipeline_counters
184
+ self.node_counters = node_counters
185
+ self.pipeline_averages = pipeline_averages
186
+ self.node_averages = node_averages
187
+
188
+ @classmethod
189
+ def from_json(cls, _json: dict):
190
+ return cls(
191
+ pipeline_counters=[PipelineCounter(**c) for c in _json.get('pipelineExecutionCounters', list())],
192
+ node_counters=[NodeCounters.from_json(_json=c) for c in _json.get('nodeExecutionsCounters', list())],
193
+ pipeline_averages=PipelineAverages.from_json(_json.get('pipelineExecutionStatistics', None)),
194
+ node_averages=[NodeAverages.from_json(_json=c) for c in _json.get('nodeExecutionStatistics', list())]
195
+ )
196
+
197
+
198
+ @attr.s
199
+ class Pipeline(entities.BaseEntity):
200
+ """
201
+ Pipeline object
202
+ """
203
+ # platform
204
+ id = attr.ib()
205
+ name = attr.ib()
206
+ creator = attr.ib()
207
+ org_id = attr.ib()
208
+ connections = attr.ib()
209
+ settings = attr.ib(type=PipelineSettings)
210
+ variables = attr.ib(type=List[Variable])
211
+
212
+ status = attr.ib(type=CompositionStatus)
213
+
214
+ # name change
215
+ created_at = attr.ib()
216
+ updated_at = attr.ib(repr=False)
217
+ start_nodes = attr.ib()
218
+ project_id = attr.ib()
219
+ composition_id = attr.ib()
220
+ url = attr.ib()
221
+ preview = attr.ib()
222
+ description = attr.ib()
223
+ revisions = attr.ib()
224
+
225
+ # sdk
226
+ _project = attr.ib(repr=False)
227
+ _client_api = attr.ib(type=ApiClient, repr=False)
228
+ _original_settings = attr.ib(repr=False, type=PipelineSettings)
229
+ _original_variables = attr.ib(repr=False, type=List[Variable])
230
+ _repositories = attr.ib(repr=False)
231
+
232
+ updated_by = attr.ib(default=None)
233
+
234
+ @staticmethod
235
+ def _protected_from_json(_json, client_api, project=None, is_fetched=True):
236
+ """
237
+ Same as from_json but with try-except to catch if error
238
+ :param _json: platform json
239
+ :param client_api: ApiClient entity
240
+ :param dtlpy.entities.project.Project project: entity
241
+ :param is_fetched: is Entity fetched from Platform
242
+ :return:
243
+ """
244
+ try:
245
+ pipeline = Pipeline.from_json(
246
+ _json=_json,
247
+ client_api=client_api,
248
+ project=project,
249
+ is_fetched=is_fetched
250
+ )
251
+ status = True
252
+ except Exception:
253
+ pipeline = traceback.format_exc()
254
+ status = False
255
+ return status, pipeline
256
+
257
+ @classmethod
258
+ def from_json(cls, _json, client_api, project=None, is_fetched=True):
259
+ """
260
+ Turn platform representation of pipeline into a pipeline entity
261
+
262
+ :param dict _json: platform representation of package
263
+ :param dl.ApiClient client_api: ApiClient entity
264
+ :param dtlpy.entities.project.Project project: entity
265
+ :param bool is_fetched: is Entity fetched from Platform
266
+ :return: Pipeline entity
267
+ :rtype: dtlpy.entities.pipeline.Pipeline
268
+ """
269
+ if project is not None:
270
+ if project.id != _json.get('projectId', None):
271
+ logger.warning('Pipeline has been fetched from a project that is not belong to it')
272
+ project = None
273
+
274
+ connections = [PipelineConnection.from_json(_json=con) for con in _json.get('connections', list())]
275
+ json_variables = _json.get('variables', None) or list()
276
+ variables = list()
277
+ if json_variables:
278
+ copy_json_variables = copy.deepcopy(json_variables)
279
+ variables = [Variable.from_json(_json=v) for v in copy_json_variables]
280
+
281
+ settings = PipelineSettings.from_json(_json=_json.get('settings', dict()))
282
+ inst = cls(
283
+ created_at=_json.get('createdAt', None),
284
+ updated_at=_json.get('updatedAt', None),
285
+ project_id=_json.get('projectId', None),
286
+ org_id=_json.get('orgId', None),
287
+ composition_id=_json.get('compositionId', None),
288
+ creator=_json.get('creator', None),
289
+ client_api=client_api,
290
+ name=_json.get('name', None),
291
+ project=project,
292
+ id=_json.get('id', None),
293
+ connections=connections,
294
+ start_nodes=_json.get('startNodes', None),
295
+ url=_json.get('url', None),
296
+ preview=_json.get('preview', None),
297
+ description=_json.get('description', None),
298
+ revisions=_json.get('revisions', None),
299
+ settings=settings,
300
+ variables=variables,
301
+ status=_json.get('status', None),
302
+ original_settings=settings,
303
+ original_variables=json_variables,
304
+ updated_by=_json.get('updatedBy', None),
305
+ )
306
+ for node in _json.get('nodes', list()):
307
+ inst.nodes.add(node=cls.pipeline_node(node))
308
+ inst.is_fetched = is_fetched
309
+ return inst
310
+
311
+ @classmethod
312
+ def pipeline_node(cls, _json):
313
+ node_type = _json.get('type')
314
+ if node_type == 'task':
315
+ return TaskNode.from_json(_json)
316
+ elif node_type == 'code':
317
+ return CodeNode.from_json(_json)
318
+ elif node_type == 'function':
319
+ return FunctionNode.from_json(_json)
320
+ elif node_type == 'storage':
321
+ return DatasetNode.from_json(_json)
322
+ else:
323
+ return PipelineNode.from_json(_json)
324
+
325
+ def settings_changed(self) -> bool:
326
+ return self.settings.to_json() != self._original_settings.to_json()
327
+
328
+ def variables_changed(self) -> bool:
329
+ new_vars = [var.to_json() for var in self.variables]
330
+ old_vars = self._original_variables or list()
331
+ return new_vars != old_vars
332
+
333
+ def to_json(self):
334
+ """
335
+ Turn Package entity into a platform representation of Package
336
+
337
+ :return: platform json of package
338
+ :rtype: dict
339
+ """
340
+ _json = attr.asdict(self,
341
+ filter=attr.filters.exclude(attr.fields(Pipeline)._project,
342
+ attr.fields(Pipeline)._repositories,
343
+ attr.fields(Pipeline)._client_api,
344
+ attr.fields(Pipeline).project_id,
345
+ attr.fields(Pipeline).org_id,
346
+ attr.fields(Pipeline).connections,
347
+ attr.fields(Pipeline).created_at,
348
+ attr.fields(Pipeline).updated_at,
349
+ attr.fields(Pipeline).start_nodes,
350
+ attr.fields(Pipeline).project_id,
351
+ attr.fields(Pipeline).composition_id,
352
+ attr.fields(Pipeline).url,
353
+ attr.fields(Pipeline).preview,
354
+ attr.fields(Pipeline).description,
355
+ attr.fields(Pipeline).revisions,
356
+ attr.fields(Pipeline).settings,
357
+ attr.fields(Pipeline).variables,
358
+ attr.fields(Pipeline)._original_settings,
359
+ attr.fields(Pipeline)._original_variables,
360
+ attr.fields(Pipeline).updated_by,
361
+ ))
362
+
363
+ _json['projectId'] = self.project_id
364
+ _json['createdAt'] = self.created_at
365
+ _json['updatedAt'] = self.updated_at
366
+ _json['compositionId'] = self.composition_id
367
+ _json['startNodes'] = self.start_nodes
368
+ _json['orgId'] = self.org_id
369
+ _json['nodes'] = [node.to_json() for node in self.nodes]
370
+ _json['connections'] = [con.to_json() for con in self.connections]
371
+ if self.variables:
372
+ _json['variables'] = [v.to_json() for v in self.variables]
373
+ _json['url'] = self.url
374
+
375
+ settings_json = self.settings.to_json()
376
+ if settings_json:
377
+ _json['settings'] = settings_json
378
+
379
+ if self.preview is not None:
380
+ _json['preview'] = self.preview
381
+ if self.description is not None:
382
+ _json['description'] = self.description
383
+ if self.revisions is not None:
384
+ _json['revisions'] = self.revisions
385
+ if self.updated_by is not None:
386
+ _json['updatedBy'] = self.updated_by
387
+
388
+ return _json
389
+
390
+ #########
391
+ # Props #
392
+ #########
393
+
394
+ @property
395
+ def platform_url(self):
396
+ return self._client_api._get_resource_url("projects/{}/pipelines/{}".format(self.project_id, self.id))
397
+
398
+ @property
399
+ def project(self):
400
+ if self._project is None:
401
+ self._project = self.projects.get(project_id=self.project_id, fetch=None)
402
+ assert isinstance(self._project, entities.Project)
403
+ return self._project
404
+
405
+ ################
406
+ # repositories #
407
+ ################
408
+ @_repositories.default
409
+ def set_repositories(self):
410
+ reps = namedtuple('repositories',
411
+ field_names=['projects', 'pipelines', 'pipeline_executions', 'triggers', 'nodes'])
412
+
413
+ r = reps(
414
+ projects=repositories.Projects(client_api=self._client_api),
415
+ pipelines=repositories.Pipelines(client_api=self._client_api, project=self._project),
416
+ pipeline_executions=repositories.PipelineExecutions(
417
+ client_api=self._client_api, project=self._project, pipeline=self
418
+ ),
419
+ triggers=repositories.Triggers(client_api=self._client_api, pipeline=self),
420
+ nodes=repositories.Nodes(client_api=self._client_api, pipeline=self)
421
+ )
422
+ return r
423
+
424
+ @property
425
+ def projects(self):
426
+ assert isinstance(self._repositories.projects, repositories.Projects)
427
+ return self._repositories.projects
428
+
429
+ @property
430
+ def triggers(self):
431
+ assert isinstance(self._repositories.triggers, repositories.Triggers)
432
+ return self._repositories.triggers
433
+
434
+ @property
435
+ def nodes(self):
436
+ assert isinstance(self._repositories.nodes, repositories.Nodes)
437
+ return self._repositories.nodes
438
+
439
+ @property
440
+ def pipelines(self):
441
+ assert isinstance(self._repositories.pipelines, repositories.Pipelines)
442
+ return self._repositories.pipelines
443
+
444
+ @property
445
+ def pipeline_executions(self):
446
+ assert isinstance(self._repositories.pipeline_executions, repositories.PipelineExecutions)
447
+ return self._repositories.pipeline_executions
448
+
449
+ ###########
450
+ # methods #
451
+ ###########
452
+ def update(self):
453
+ """
454
+ Update pipeline changes to platform
455
+
456
+ :return: pipeline entity
457
+ """
458
+ return self.pipelines.update(pipeline=self)
459
+
460
+ def delete(self):
461
+ """
462
+ Delete pipeline object
463
+
464
+ :return: True
465
+ """
466
+ return self.pipelines.delete(pipeline=self)
467
+
468
+ def open_in_web(self):
469
+ """
470
+ Open the pipeline in web platform
471
+
472
+ :return:
473
+ """
474
+ self._client_api._open_in_web(url=self.platform_url)
475
+
476
+ def install(self, resume_option: PipelineResumeOption = None):
477
+ """
478
+ install pipeline
479
+
480
+ :return: Composition entity
481
+ """
482
+ return self.pipelines.install(pipeline=self, resume_option=resume_option)
483
+
484
+ def pause(self, keep_triggers_active: bool = None):
485
+ """
486
+ pause pipeline
487
+
488
+ :return: Composition entity
489
+ """
490
+ return self.pipelines.pause(pipeline=self, keep_triggers_active=keep_triggers_active)
491
+
492
+ def execute(self, execution_input=None, node_id: str = None):
493
+ """
494
+ execute a pipeline and return to execute
495
+
496
+ :param execution_input: list of the dl.FunctionIO or dict of pipeline input - example {'item': 'item_id'}
497
+ :param str node_id: node id to execute
498
+ :return: entities.PipelineExecution object
499
+ """
500
+ execution = self.pipeline_executions.create(
501
+ pipeline_id=self.id,
502
+ execution_input=execution_input,
503
+ node_id=node_id
504
+ )
505
+ return execution
506
+
507
+ def test(self, execution_input=None):
508
+ """
509
+ Execute a pipeline in test mode and return the pipeline execution as an object.
510
+
511
+ :param execution_input: list of the dl.FunctionIO or dict of pipeline input - example {'item': 'item_id'}
512
+ :return: entities.PipelineExecution object
513
+ """
514
+ execution = self.pipelines.test(
515
+ pipeline=self,
516
+ pipeline_id=self.id,
517
+ execution_input=execution_input,
518
+ )
519
+ return execution
520
+
521
+ def execute_batch(
522
+ self,
523
+ filters,
524
+ execution_inputs=None,
525
+ wait=True,
526
+ node_id: str = None
527
+ ):
528
+ """
529
+ execute a pipeline and return to execute
530
+
531
+ :param execution_inputs: list of the dl.FunctionIO or dict of pipeline input - example {'item': 'item_id'}, that represent the extra inputs of the function
532
+ :param filters: Filters entity for a filtering before execute
533
+ :param bool wait: wait until create task finish
534
+ :param str node_id: node id to execute
535
+ :return: entities.PipelineExecution object
536
+
537
+ **Example**:
538
+
539
+ .. code-block:: python
540
+
541
+ command = pipeline.execute_batch(
542
+ execution_inputs=dl.FunctionIO(type=dl.PackageInputType.STRING, value='test', name='string'),
543
+ filters=dl.Filters(field='dir', values='/test', context={'datasets': [dataset.id]))
544
+ """
545
+ command = self.pipeline_executions.create_batch(
546
+ pipeline_id=self.id,
547
+ execution_inputs=execution_inputs,
548
+ filters=filters,
549
+ wait=wait,
550
+ node_id=node_id
551
+ )
552
+ return command
553
+
554
+ def reset(self, stop_if_running: bool = False):
555
+ """
556
+ Resets pipeline counters
557
+
558
+ :param bool stop_if_running: If the pipeline is installed it will stop the pipeline and reset the counters.
559
+ :return: bool
560
+ """
561
+ return self.pipelines.reset(pipeline_id=self.id, stop_if_running=stop_if_running)
562
+
563
+ def stats(self):
564
+ """
565
+ Get pipeline counters
566
+
567
+ :return: PipelineStats
568
+ :rtype: dtlpy.entities.pipeline.PipelineStats
569
+ """
570
+ return self.pipelines.stats(pipeline_id=self.id)
571
+
572
+ def set_start_node(self, node: PipelineNode):
573
+ """
574
+ Set the start node of the pipeline
575
+
576
+ :param PipelineNode node: node to be the start node
577
+ """
578
+ connections = [connection for connection in self.connections if connection.target.node_id == node.node_id]
579
+ if connections:
580
+ raise Exception(
581
+ 'Connections cannot be added to Pipeline start-node. To add a connection, please reposition the start sign')
582
+ if self.start_nodes:
583
+ for pipe_node in self.start_nodes:
584
+ if pipe_node['type'] == 'root':
585
+ pipe_node['nodeId'] = node.node_id
586
+ else:
587
+ self.start_nodes = [{"nodeId": node.node_id,
588
+ "type": "root", }]
589
+
590
+ def update_variables_values(self, **kwargs):
591
+ """
592
+ Update pipeline variables values for the given keyword arguments.
593
+
594
+ **Example**:
595
+
596
+ .. code-block:: python
597
+ pipeline.update_variables_values(
598
+ dataset=dataset.id,
599
+ model=model.id,
600
+ threshold=0.9
601
+ )
602
+ pipeline.update()
603
+ """
604
+ keys = kwargs.keys()
605
+ for variable in self.variables:
606
+ if variable.name in keys:
607
+ variable.value = kwargs[variable.name]
608
+
609
+ def validate(self):
610
+ """
611
+ Validate the pipeline configuration.
612
+
613
+ **prerequisites**: You must be an *owner* or *developer* to use this method.
614
+
615
+ :return: Validation result
616
+ :rtype: dict
617
+
618
+ **Example**:
619
+
620
+ .. code-block:: python
621
+
622
+ validation_result = pipeline.validate()
623
+ """
624
+ return self.pipelines.validate(pipeline_json=self.to_json())