dtlpy 1.113.10__py3-none-any.whl → 1.114.13__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (243) hide show
  1. dtlpy/__init__.py +488 -488
  2. dtlpy/__version__.py +1 -1
  3. dtlpy/assets/__init__.py +26 -26
  4. dtlpy/assets/__pycache__/__init__.cpython-38.pyc +0 -0
  5. dtlpy/assets/code_server/config.yaml +2 -2
  6. dtlpy/assets/code_server/installation.sh +24 -24
  7. dtlpy/assets/code_server/launch.json +13 -13
  8. dtlpy/assets/code_server/settings.json +2 -2
  9. dtlpy/assets/main.py +53 -53
  10. dtlpy/assets/main_partial.py +18 -18
  11. dtlpy/assets/mock.json +11 -11
  12. dtlpy/assets/model_adapter.py +83 -83
  13. dtlpy/assets/package.json +61 -61
  14. dtlpy/assets/package_catalog.json +29 -29
  15. dtlpy/assets/package_gitignore +307 -307
  16. dtlpy/assets/service_runners/__init__.py +33 -33
  17. dtlpy/assets/service_runners/converter.py +96 -96
  18. dtlpy/assets/service_runners/multi_method.py +49 -49
  19. dtlpy/assets/service_runners/multi_method_annotation.py +54 -54
  20. dtlpy/assets/service_runners/multi_method_dataset.py +55 -55
  21. dtlpy/assets/service_runners/multi_method_item.py +52 -52
  22. dtlpy/assets/service_runners/multi_method_json.py +52 -52
  23. dtlpy/assets/service_runners/single_method.py +37 -37
  24. dtlpy/assets/service_runners/single_method_annotation.py +43 -43
  25. dtlpy/assets/service_runners/single_method_dataset.py +43 -43
  26. dtlpy/assets/service_runners/single_method_item.py +41 -41
  27. dtlpy/assets/service_runners/single_method_json.py +42 -42
  28. dtlpy/assets/service_runners/single_method_multi_input.py +45 -45
  29. dtlpy/assets/voc_annotation_template.xml +23 -23
  30. dtlpy/caches/base_cache.py +32 -32
  31. dtlpy/caches/cache.py +473 -473
  32. dtlpy/caches/dl_cache.py +201 -201
  33. dtlpy/caches/filesystem_cache.py +89 -89
  34. dtlpy/caches/redis_cache.py +84 -84
  35. dtlpy/dlp/__init__.py +20 -20
  36. dtlpy/dlp/cli_utilities.py +367 -367
  37. dtlpy/dlp/command_executor.py +764 -764
  38. dtlpy/dlp/dlp +1 -1
  39. dtlpy/dlp/dlp.bat +1 -1
  40. dtlpy/dlp/dlp.py +128 -128
  41. dtlpy/dlp/parser.py +651 -651
  42. dtlpy/entities/__init__.py +83 -83
  43. dtlpy/entities/analytic.py +311 -311
  44. dtlpy/entities/annotation.py +1879 -1879
  45. dtlpy/entities/annotation_collection.py +699 -699
  46. dtlpy/entities/annotation_definitions/__init__.py +20 -20
  47. dtlpy/entities/annotation_definitions/base_annotation_definition.py +100 -100
  48. dtlpy/entities/annotation_definitions/box.py +195 -195
  49. dtlpy/entities/annotation_definitions/classification.py +67 -67
  50. dtlpy/entities/annotation_definitions/comparison.py +72 -72
  51. dtlpy/entities/annotation_definitions/cube.py +204 -204
  52. dtlpy/entities/annotation_definitions/cube_3d.py +149 -149
  53. dtlpy/entities/annotation_definitions/description.py +32 -32
  54. dtlpy/entities/annotation_definitions/ellipse.py +124 -124
  55. dtlpy/entities/annotation_definitions/free_text.py +62 -62
  56. dtlpy/entities/annotation_definitions/gis.py +69 -69
  57. dtlpy/entities/annotation_definitions/note.py +139 -139
  58. dtlpy/entities/annotation_definitions/point.py +117 -117
  59. dtlpy/entities/annotation_definitions/polygon.py +182 -182
  60. dtlpy/entities/annotation_definitions/polyline.py +111 -111
  61. dtlpy/entities/annotation_definitions/pose.py +92 -92
  62. dtlpy/entities/annotation_definitions/ref_image.py +86 -86
  63. dtlpy/entities/annotation_definitions/segmentation.py +240 -240
  64. dtlpy/entities/annotation_definitions/subtitle.py +34 -34
  65. dtlpy/entities/annotation_definitions/text.py +85 -85
  66. dtlpy/entities/annotation_definitions/undefined_annotation.py +74 -74
  67. dtlpy/entities/app.py +220 -220
  68. dtlpy/entities/app_module.py +107 -107
  69. dtlpy/entities/artifact.py +174 -174
  70. dtlpy/entities/assignment.py +399 -399
  71. dtlpy/entities/base_entity.py +214 -214
  72. dtlpy/entities/bot.py +113 -113
  73. dtlpy/entities/codebase.py +296 -296
  74. dtlpy/entities/collection.py +38 -38
  75. dtlpy/entities/command.py +169 -169
  76. dtlpy/entities/compute.py +442 -442
  77. dtlpy/entities/dataset.py +1285 -1285
  78. dtlpy/entities/directory_tree.py +44 -44
  79. dtlpy/entities/dpk.py +470 -470
  80. dtlpy/entities/driver.py +222 -222
  81. dtlpy/entities/execution.py +397 -397
  82. dtlpy/entities/feature.py +124 -124
  83. dtlpy/entities/feature_set.py +145 -145
  84. dtlpy/entities/filters.py +641 -641
  85. dtlpy/entities/gis_item.py +107 -107
  86. dtlpy/entities/integration.py +184 -184
  87. dtlpy/entities/item.py +953 -953
  88. dtlpy/entities/label.py +123 -123
  89. dtlpy/entities/links.py +85 -85
  90. dtlpy/entities/message.py +175 -175
  91. dtlpy/entities/model.py +694 -691
  92. dtlpy/entities/node.py +1005 -1005
  93. dtlpy/entities/ontology.py +803 -803
  94. dtlpy/entities/organization.py +287 -287
  95. dtlpy/entities/package.py +657 -657
  96. dtlpy/entities/package_defaults.py +5 -5
  97. dtlpy/entities/package_function.py +185 -185
  98. dtlpy/entities/package_module.py +113 -113
  99. dtlpy/entities/package_slot.py +118 -118
  100. dtlpy/entities/paged_entities.py +290 -267
  101. dtlpy/entities/pipeline.py +593 -593
  102. dtlpy/entities/pipeline_execution.py +279 -279
  103. dtlpy/entities/project.py +394 -394
  104. dtlpy/entities/prompt_item.py +499 -499
  105. dtlpy/entities/recipe.py +301 -301
  106. dtlpy/entities/reflect_dict.py +102 -102
  107. dtlpy/entities/resource_execution.py +138 -138
  108. dtlpy/entities/service.py +958 -958
  109. dtlpy/entities/service_driver.py +117 -117
  110. dtlpy/entities/setting.py +294 -294
  111. dtlpy/entities/task.py +491 -491
  112. dtlpy/entities/time_series.py +143 -143
  113. dtlpy/entities/trigger.py +426 -426
  114. dtlpy/entities/user.py +118 -118
  115. dtlpy/entities/webhook.py +124 -124
  116. dtlpy/examples/__init__.py +19 -19
  117. dtlpy/examples/add_labels.py +135 -135
  118. dtlpy/examples/add_metadata_to_item.py +21 -21
  119. dtlpy/examples/annotate_items_using_model.py +65 -65
  120. dtlpy/examples/annotate_video_using_model_and_tracker.py +75 -75
  121. dtlpy/examples/annotations_convert_to_voc.py +9 -9
  122. dtlpy/examples/annotations_convert_to_yolo.py +9 -9
  123. dtlpy/examples/convert_annotation_types.py +51 -51
  124. dtlpy/examples/converter.py +143 -143
  125. dtlpy/examples/copy_annotations.py +22 -22
  126. dtlpy/examples/copy_folder.py +31 -31
  127. dtlpy/examples/create_annotations.py +51 -51
  128. dtlpy/examples/create_video_annotations.py +83 -83
  129. dtlpy/examples/delete_annotations.py +26 -26
  130. dtlpy/examples/filters.py +113 -113
  131. dtlpy/examples/move_item.py +23 -23
  132. dtlpy/examples/play_video_annotation.py +13 -13
  133. dtlpy/examples/show_item_and_mask.py +53 -53
  134. dtlpy/examples/triggers.py +49 -49
  135. dtlpy/examples/upload_batch_of_items.py +20 -20
  136. dtlpy/examples/upload_items_and_custom_format_annotations.py +55 -55
  137. dtlpy/examples/upload_items_with_modalities.py +43 -43
  138. dtlpy/examples/upload_segmentation_annotations_from_mask_image.py +44 -44
  139. dtlpy/examples/upload_yolo_format_annotations.py +70 -70
  140. dtlpy/exceptions.py +125 -125
  141. dtlpy/miscellaneous/__init__.py +20 -20
  142. dtlpy/miscellaneous/dict_differ.py +95 -95
  143. dtlpy/miscellaneous/git_utils.py +217 -217
  144. dtlpy/miscellaneous/json_utils.py +14 -14
  145. dtlpy/miscellaneous/list_print.py +105 -105
  146. dtlpy/miscellaneous/zipping.py +130 -130
  147. dtlpy/ml/__init__.py +20 -20
  148. dtlpy/ml/base_feature_extractor_adapter.py +27 -27
  149. dtlpy/ml/base_model_adapter.py +945 -940
  150. dtlpy/ml/metrics.py +461 -461
  151. dtlpy/ml/predictions_utils.py +274 -274
  152. dtlpy/ml/summary_writer.py +57 -57
  153. dtlpy/ml/train_utils.py +60 -60
  154. dtlpy/new_instance.py +252 -252
  155. dtlpy/repositories/__init__.py +56 -56
  156. dtlpy/repositories/analytics.py +85 -85
  157. dtlpy/repositories/annotations.py +916 -916
  158. dtlpy/repositories/apps.py +383 -383
  159. dtlpy/repositories/artifacts.py +452 -452
  160. dtlpy/repositories/assignments.py +599 -599
  161. dtlpy/repositories/bots.py +213 -213
  162. dtlpy/repositories/codebases.py +559 -559
  163. dtlpy/repositories/collections.py +332 -348
  164. dtlpy/repositories/commands.py +158 -158
  165. dtlpy/repositories/compositions.py +61 -61
  166. dtlpy/repositories/computes.py +434 -406
  167. dtlpy/repositories/datasets.py +1291 -1291
  168. dtlpy/repositories/downloader.py +895 -895
  169. dtlpy/repositories/dpks.py +433 -433
  170. dtlpy/repositories/drivers.py +266 -266
  171. dtlpy/repositories/executions.py +817 -817
  172. dtlpy/repositories/feature_sets.py +226 -226
  173. dtlpy/repositories/features.py +238 -238
  174. dtlpy/repositories/integrations.py +484 -484
  175. dtlpy/repositories/items.py +909 -915
  176. dtlpy/repositories/messages.py +94 -94
  177. dtlpy/repositories/models.py +877 -867
  178. dtlpy/repositories/nodes.py +80 -80
  179. dtlpy/repositories/ontologies.py +511 -511
  180. dtlpy/repositories/organizations.py +525 -525
  181. dtlpy/repositories/packages.py +1941 -1941
  182. dtlpy/repositories/pipeline_executions.py +448 -448
  183. dtlpy/repositories/pipelines.py +642 -642
  184. dtlpy/repositories/projects.py +539 -539
  185. dtlpy/repositories/recipes.py +399 -399
  186. dtlpy/repositories/resource_executions.py +137 -137
  187. dtlpy/repositories/schema.py +120 -120
  188. dtlpy/repositories/service_drivers.py +213 -213
  189. dtlpy/repositories/services.py +1704 -1704
  190. dtlpy/repositories/settings.py +339 -339
  191. dtlpy/repositories/tasks.py +1124 -1124
  192. dtlpy/repositories/times_series.py +278 -278
  193. dtlpy/repositories/triggers.py +536 -536
  194. dtlpy/repositories/upload_element.py +257 -257
  195. dtlpy/repositories/uploader.py +651 -651
  196. dtlpy/repositories/webhooks.py +249 -249
  197. dtlpy/services/__init__.py +22 -22
  198. dtlpy/services/aihttp_retry.py +131 -131
  199. dtlpy/services/api_client.py +1782 -1782
  200. dtlpy/services/api_reference.py +40 -40
  201. dtlpy/services/async_utils.py +133 -133
  202. dtlpy/services/calls_counter.py +44 -44
  203. dtlpy/services/check_sdk.py +68 -68
  204. dtlpy/services/cookie.py +115 -115
  205. dtlpy/services/create_logger.py +156 -156
  206. dtlpy/services/events.py +84 -84
  207. dtlpy/services/logins.py +235 -235
  208. dtlpy/services/reporter.py +256 -256
  209. dtlpy/services/service_defaults.py +91 -91
  210. dtlpy/utilities/__init__.py +20 -20
  211. dtlpy/utilities/annotations/__init__.py +16 -16
  212. dtlpy/utilities/annotations/annotation_converters.py +269 -269
  213. dtlpy/utilities/base_package_runner.py +264 -264
  214. dtlpy/utilities/converter.py +1650 -1650
  215. dtlpy/utilities/dataset_generators/__init__.py +1 -1
  216. dtlpy/utilities/dataset_generators/dataset_generator.py +670 -670
  217. dtlpy/utilities/dataset_generators/dataset_generator_tensorflow.py +23 -23
  218. dtlpy/utilities/dataset_generators/dataset_generator_torch.py +21 -21
  219. dtlpy/utilities/local_development/__init__.py +1 -1
  220. dtlpy/utilities/local_development/local_session.py +179 -179
  221. dtlpy/utilities/reports/__init__.py +2 -2
  222. dtlpy/utilities/reports/figures.py +343 -343
  223. dtlpy/utilities/reports/report.py +71 -71
  224. dtlpy/utilities/videos/__init__.py +17 -17
  225. dtlpy/utilities/videos/video_player.py +598 -598
  226. dtlpy/utilities/videos/videos.py +470 -470
  227. {dtlpy-1.113.10.data → dtlpy-1.114.13.data}/scripts/dlp +1 -1
  228. dtlpy-1.114.13.data/scripts/dlp.bat +2 -0
  229. {dtlpy-1.113.10.data → dtlpy-1.114.13.data}/scripts/dlp.py +128 -128
  230. {dtlpy-1.113.10.dist-info → dtlpy-1.114.13.dist-info}/LICENSE +200 -200
  231. {dtlpy-1.113.10.dist-info → dtlpy-1.114.13.dist-info}/METADATA +172 -172
  232. dtlpy-1.114.13.dist-info/RECORD +240 -0
  233. {dtlpy-1.113.10.dist-info → dtlpy-1.114.13.dist-info}/WHEEL +1 -1
  234. tests/features/environment.py +551 -550
  235. dtlpy-1.113.10.data/scripts/dlp.bat +0 -2
  236. dtlpy-1.113.10.dist-info/RECORD +0 -244
  237. tests/assets/__init__.py +0 -0
  238. tests/assets/models_flow/__init__.py +0 -0
  239. tests/assets/models_flow/failedmain.py +0 -52
  240. tests/assets/models_flow/main.py +0 -62
  241. tests/assets/models_flow/main_model.py +0 -54
  242. {dtlpy-1.113.10.dist-info → dtlpy-1.114.13.dist-info}/entry_points.txt +0 -0
  243. {dtlpy-1.113.10.dist-info → dtlpy-1.114.13.dist-info}/top_level.txt +0 -0
@@ -1,593 +1,593 @@
1
- from collections import namedtuple
2
- import logging
3
- import traceback
4
- from enum import Enum
5
- from typing import List
6
- import attr
7
- from .node import PipelineNode, PipelineConnection, TaskNode, CodeNode, FunctionNode, DatasetNode
8
- from .. import repositories, entities
9
- from ..services.api_client import ApiClient
10
- from .package_function import PackageInputType
11
- import copy
12
-
13
- logger = logging.getLogger(name='dtlpy')
14
-
15
-
16
- class PipelineResumeOption(str, Enum):
17
- TERMINATE_EXISTING_CYCLES = 'terminateExistingCycles',
18
- RESUME_EXISTING_CYCLES = 'resumeExistingCycles'
19
-
20
-
21
- class CompositionStatus(str, Enum):
22
- CREATED = "Created",
23
- INITIALIZING = "Initializing",
24
- INSTALLED = "Installed",
25
- ACTIVATED = "Activated",
26
- DEACTIVATED = "Deactivated",
27
- UNINSTALLED = "Uninstalled",
28
- TERMINATING = "Terminating",
29
- TERMINATED = "Terminated",
30
- UPDATING = "Updating",
31
- FAILURE = "Failure"
32
-
33
-
34
- class PipelineSettings:
35
-
36
- def __init__(
37
- self,
38
- default_resume_option: PipelineResumeOption = None,
39
- keep_triggers_active: bool = None,
40
- active_trigger_ask_again: bool = None,
41
- last_update: dict = None
42
- ):
43
- self.default_resume_option = default_resume_option
44
- self.keep_triggers_active = keep_triggers_active
45
- self.active_trigger_ask_again = active_trigger_ask_again
46
- self.last_update = last_update
47
-
48
- @classmethod
49
- def from_json(cls, _json: dict = None):
50
- if _json is None:
51
- _json = dict()
52
- return cls(
53
- default_resume_option=_json.get('defaultResumeOption', None),
54
- keep_triggers_active=_json.get('keepTriggersActive', None),
55
- active_trigger_ask_again=_json.get('activeTriggerAskAgain', None),
56
- last_update=_json.get('lastUpdate', None)
57
- )
58
-
59
- def to_json(self):
60
- _json = dict()
61
-
62
- if self.default_resume_option is not None:
63
- _json['defaultResumeOption'] = self.default_resume_option
64
-
65
- if self.default_resume_option is not None:
66
- _json['keepTriggersActive'] = self.default_resume_option
67
-
68
- if self.default_resume_option is not None:
69
- _json['activeTriggerAskAgain'] = self.default_resume_option
70
-
71
- if self.default_resume_option is not None:
72
- _json['lastUpdate'] = self.default_resume_option
73
-
74
- return _json
75
-
76
-
77
- class Variable(entities.DlEntity):
78
- """
79
- Pipeline Variables
80
- """
81
- id: str = entities.DlProperty(location=['id'], _type=str)
82
- created_at: str = entities.DlProperty(location=['createdAt'], _type=str)
83
- updated_at: str = entities.DlProperty(location=['updatedAt'], _type=str)
84
- reference: str = entities.DlProperty(location=['reference'], _type=str)
85
- creator: str = entities.DlProperty(location=['creator'], _type=str)
86
- variable_type: PackageInputType = entities.DlProperty(location=['type'], _type=PackageInputType)
87
- name: str = entities.DlProperty(location=['name'], _type=str)
88
- value = entities.DlProperty(location=['value'])
89
-
90
- @classmethod
91
- def from_json(cls, _json):
92
- """
93
- Turn platform representation of variable into a pipeline variable entity
94
-
95
- :param dict _json: platform representation of pipeline variable
96
- :return: pipeline variable entity
97
- :rtype: dtlpy.entities.pipeline.PipelineVariables
98
- """
99
-
100
- inst = cls(_dict=_json)
101
- return inst
102
-
103
- def to_json(self):
104
- """
105
- :return: variable of pipeline
106
- :rtype: dict
107
- """
108
- _json = self._dict.copy()
109
- return _json
110
-
111
-
112
- class PipelineAverages:
113
- def __init__(
114
- self,
115
- avg_time_per_execution: float,
116
- avg_execution_per_day: float
117
- ):
118
- self.avg_time_per_execution = avg_time_per_execution
119
- self.avg_execution_per_day = avg_execution_per_day
120
-
121
- @classmethod
122
- def from_json(cls, _json: dict = None):
123
- if _json is None:
124
- _json = dict()
125
- return cls(
126
- avg_time_per_execution=_json.get('avgTimePerExecution', 'NA'),
127
- avg_execution_per_day=_json.get('avgExecutionsPerDay', 'NA')
128
- )
129
-
130
-
131
- class NodeAverages:
132
- def __init__(
133
- self,
134
- node_id: str,
135
- averages: PipelineAverages
136
- ):
137
- self.node_id = node_id
138
- self.averages = averages
139
-
140
- @classmethod
141
- def from_json(cls, _json: dict):
142
- return cls(
143
- node_id=_json.get('nodeId', None),
144
- averages=PipelineAverages.from_json(_json.get('executionStatistics'))
145
- )
146
-
147
-
148
- class PipelineCounter:
149
- def __init__(
150
- self,
151
- status: str,
152
- count: int
153
- ):
154
- self.status = status
155
- self.count = count
156
-
157
-
158
- class NodeCounters:
159
- def __init__(
160
- self,
161
- node_id: str,
162
- counters: List[PipelineCounter]
163
- ):
164
- self.node_id = node_id
165
- self.counters = counters
166
-
167
- @classmethod
168
- def from_json(cls, _json: dict):
169
- return cls(
170
- node_id=_json.get('nodeId', None),
171
- counters=[PipelineCounter(**c) for c in _json.get('statusCount', list())],
172
- )
173
-
174
-
175
- class PipelineStats:
176
- def __init__(
177
- self,
178
- pipeline_counters: List[PipelineCounter],
179
- node_counters: List[NodeCounters],
180
- pipeline_averages: PipelineAverages,
181
- node_averages: List[NodeAverages]
182
- ):
183
- self.pipeline_counters = pipeline_counters
184
- self.node_counters = node_counters
185
- self.pipeline_averages = pipeline_averages
186
- self.node_averages = node_averages
187
-
188
- @classmethod
189
- def from_json(cls, _json: dict):
190
- return cls(
191
- pipeline_counters=[PipelineCounter(**c) for c in _json.get('pipelineExecutionCounters', list())],
192
- node_counters=[NodeCounters.from_json(_json=c) for c in _json.get('nodeExecutionsCounters', list())],
193
- pipeline_averages=PipelineAverages.from_json(_json.get('pipelineExecutionStatistics', None)),
194
- node_averages=[NodeAverages.from_json(_json=c) for c in _json.get('nodeExecutionStatistics', list())]
195
- )
196
-
197
-
198
- @attr.s
199
- class Pipeline(entities.BaseEntity):
200
- """
201
- Pipeline object
202
- """
203
- # platform
204
- id = attr.ib()
205
- name = attr.ib()
206
- creator = attr.ib()
207
- org_id = attr.ib()
208
- connections = attr.ib()
209
- settings = attr.ib(type=PipelineSettings)
210
- variables = attr.ib(type=List[Variable])
211
-
212
- status = attr.ib(type=CompositionStatus)
213
-
214
- # name change
215
- created_at = attr.ib()
216
- updated_at = attr.ib(repr=False)
217
- start_nodes = attr.ib()
218
- project_id = attr.ib()
219
- composition_id = attr.ib()
220
- url = attr.ib()
221
- preview = attr.ib()
222
- description = attr.ib()
223
- revisions = attr.ib()
224
-
225
- # sdk
226
- _project = attr.ib(repr=False)
227
- _client_api = attr.ib(type=ApiClient, repr=False)
228
- _original_settings = attr.ib(repr=False, type=PipelineSettings)
229
- _original_variables = attr.ib(repr=False, type=List[Variable])
230
- _repositories = attr.ib(repr=False)
231
-
232
- updated_by = attr.ib(default=None)
233
-
234
- @staticmethod
235
- def _protected_from_json(_json, client_api, project=None, is_fetched=True):
236
- """
237
- Same as from_json but with try-except to catch if error
238
- :param _json: platform json
239
- :param client_api: ApiClient entity
240
- :param dtlpy.entities.project.Project project: entity
241
- :param is_fetched: is Entity fetched from Platform
242
- :return:
243
- """
244
- try:
245
- pipeline = Pipeline.from_json(
246
- _json=_json,
247
- client_api=client_api,
248
- project=project,
249
- is_fetched=is_fetched
250
- )
251
- status = True
252
- except Exception:
253
- pipeline = traceback.format_exc()
254
- status = False
255
- return status, pipeline
256
-
257
- @classmethod
258
- def from_json(cls, _json, client_api, project=None, is_fetched=True):
259
- """
260
- Turn platform representation of pipeline into a pipeline entity
261
-
262
- :param dict _json: platform representation of package
263
- :param dl.ApiClient client_api: ApiClient entity
264
- :param dtlpy.entities.project.Project project: entity
265
- :param bool is_fetched: is Entity fetched from Platform
266
- :return: Pipeline entity
267
- :rtype: dtlpy.entities.pipeline.Pipeline
268
- """
269
- if project is not None:
270
- if project.id != _json.get('projectId', None):
271
- logger.warning('Pipeline has been fetched from a project that is not belong to it')
272
- project = None
273
-
274
- connections = [PipelineConnection.from_json(_json=con) for con in _json.get('connections', list())]
275
- json_variables = _json.get('variables', None) or list()
276
- variables = list()
277
- if json_variables:
278
- copy_json_variables = copy.deepcopy(json_variables)
279
- variables = [Variable.from_json(_json=v) for v in copy_json_variables]
280
-
281
- settings = PipelineSettings.from_json(_json=_json.get('settings', dict()))
282
- inst = cls(
283
- created_at=_json.get('createdAt', None),
284
- updated_at=_json.get('updatedAt', None),
285
- project_id=_json.get('projectId', None),
286
- org_id=_json.get('orgId', None),
287
- composition_id=_json.get('compositionId', None),
288
- creator=_json.get('creator', None),
289
- client_api=client_api,
290
- name=_json.get('name', None),
291
- project=project,
292
- id=_json.get('id', None),
293
- connections=connections,
294
- start_nodes=_json.get('startNodes', None),
295
- url=_json.get('url', None),
296
- preview=_json.get('preview', None),
297
- description=_json.get('description', None),
298
- revisions=_json.get('revisions', None),
299
- settings=settings,
300
- variables=variables,
301
- status=_json.get('status', None),
302
- original_settings=settings,
303
- original_variables=json_variables,
304
- updated_by=_json.get('updatedBy', None),
305
- )
306
- for node in _json.get('nodes', list()):
307
- inst.nodes.add(node=cls.pipeline_node(node))
308
- inst.is_fetched = is_fetched
309
- return inst
310
-
311
- @classmethod
312
- def pipeline_node(cls, _json):
313
- node_type = _json.get('type')
314
- if node_type == 'task':
315
- return TaskNode.from_json(_json)
316
- elif node_type == 'code':
317
- return CodeNode.from_json(_json)
318
- elif node_type == 'function':
319
- return FunctionNode.from_json(_json)
320
- elif node_type == 'storage':
321
- return DatasetNode.from_json(_json)
322
- else:
323
- return PipelineNode.from_json(_json)
324
-
325
- def settings_changed(self) -> bool:
326
- return self.settings.to_json() != self._original_settings.to_json()
327
-
328
- def variables_changed(self) -> bool:
329
- new_vars = [var.to_json() for var in self.variables]
330
- old_vars = self._original_variables or list()
331
- return new_vars != old_vars
332
-
333
- def to_json(self):
334
- """
335
- Turn Package entity into a platform representation of Package
336
-
337
- :return: platform json of package
338
- :rtype: dict
339
- """
340
- _json = attr.asdict(self,
341
- filter=attr.filters.exclude(attr.fields(Pipeline)._project,
342
- attr.fields(Pipeline)._repositories,
343
- attr.fields(Pipeline)._client_api,
344
- attr.fields(Pipeline).project_id,
345
- attr.fields(Pipeline).org_id,
346
- attr.fields(Pipeline).connections,
347
- attr.fields(Pipeline).created_at,
348
- attr.fields(Pipeline).updated_at,
349
- attr.fields(Pipeline).start_nodes,
350
- attr.fields(Pipeline).project_id,
351
- attr.fields(Pipeline).composition_id,
352
- attr.fields(Pipeline).url,
353
- attr.fields(Pipeline).preview,
354
- attr.fields(Pipeline).description,
355
- attr.fields(Pipeline).revisions,
356
- attr.fields(Pipeline).settings,
357
- attr.fields(Pipeline).variables,
358
- attr.fields(Pipeline)._original_settings,
359
- attr.fields(Pipeline)._original_variables,
360
- attr.fields(Pipeline).updated_by,
361
- ))
362
-
363
- _json['projectId'] = self.project_id
364
- _json['createdAt'] = self.created_at
365
- _json['updatedAt'] = self.updated_at
366
- _json['compositionId'] = self.composition_id
367
- _json['startNodes'] = self.start_nodes
368
- _json['orgId'] = self.org_id
369
- _json['nodes'] = [node.to_json() for node in self.nodes]
370
- _json['connections'] = [con.to_json() for con in self.connections]
371
- if self.variables:
372
- _json['variables'] = [v.to_json() for v in self.variables]
373
- _json['url'] = self.url
374
-
375
- settings_json = self.settings.to_json()
376
- if settings_json:
377
- _json['settings'] = settings_json
378
-
379
- if self.preview is not None:
380
- _json['preview'] = self.preview
381
- if self.description is not None:
382
- _json['description'] = self.description
383
- if self.revisions is not None:
384
- _json['revisions'] = self.revisions
385
- if self.updated_by is not None:
386
- _json['updatedBy'] = self.updated_by
387
-
388
- return _json
389
-
390
- #########
391
- # Props #
392
- #########
393
-
394
- @property
395
- def platform_url(self):
396
- return self._client_api._get_resource_url("projects/{}/pipelines/{}".format(self.project_id, self.id))
397
-
398
- @property
399
- def project(self):
400
- if self._project is None:
401
- self._project = self.projects.get(project_id=self.project_id, fetch=None)
402
- assert isinstance(self._project, entities.Project)
403
- return self._project
404
-
405
- ################
406
- # repositories #
407
- ################
408
- @_repositories.default
409
- def set_repositories(self):
410
- reps = namedtuple('repositories',
411
- field_names=['projects', 'pipelines', 'pipeline_executions', 'triggers', 'nodes'])
412
-
413
- r = reps(
414
- projects=repositories.Projects(client_api=self._client_api),
415
- pipelines=repositories.Pipelines(client_api=self._client_api, project=self._project),
416
- pipeline_executions=repositories.PipelineExecutions(
417
- client_api=self._client_api, project=self._project, pipeline=self
418
- ),
419
- triggers=repositories.Triggers(client_api=self._client_api, pipeline=self),
420
- nodes=repositories.Nodes(client_api=self._client_api, pipeline=self)
421
- )
422
- return r
423
-
424
- @property
425
- def projects(self):
426
- assert isinstance(self._repositories.projects, repositories.Projects)
427
- return self._repositories.projects
428
-
429
- @property
430
- def triggers(self):
431
- assert isinstance(self._repositories.triggers, repositories.Triggers)
432
- return self._repositories.triggers
433
-
434
- @property
435
- def nodes(self):
436
- assert isinstance(self._repositories.nodes, repositories.Nodes)
437
- return self._repositories.nodes
438
-
439
- @property
440
- def pipelines(self):
441
- assert isinstance(self._repositories.pipelines, repositories.Pipelines)
442
- return self._repositories.pipelines
443
-
444
- @property
445
- def pipeline_executions(self):
446
- assert isinstance(self._repositories.pipeline_executions, repositories.PipelineExecutions)
447
- return self._repositories.pipeline_executions
448
-
449
- ###########
450
- # methods #
451
- ###########
452
- def update(self):
453
- """
454
- Update pipeline changes to platform
455
-
456
- :return: pipeline entity
457
- """
458
- return self.pipelines.update(pipeline=self)
459
-
460
- def delete(self):
461
- """
462
- Delete pipeline object
463
-
464
- :return: True
465
- """
466
- return self.pipelines.delete(pipeline=self)
467
-
468
- def open_in_web(self):
469
- """
470
- Open the pipeline in web platform
471
-
472
- :return:
473
- """
474
- self._client_api._open_in_web(url=self.platform_url)
475
-
476
- def install(self, resume_option: PipelineResumeOption = None):
477
- """
478
- install pipeline
479
-
480
- :return: Composition entity
481
- """
482
- return self.pipelines.install(pipeline=self, resume_option=resume_option)
483
-
484
- def pause(self, keep_triggers_active: bool = None):
485
- """
486
- pause pipeline
487
-
488
- :return: Composition entity
489
- """
490
- return self.pipelines.pause(pipeline=self, keep_triggers_active=keep_triggers_active)
491
-
492
- def execute(self, execution_input=None, node_id: str = None):
493
- """
494
- execute a pipeline and return to execute
495
-
496
- :param execution_input: list of the dl.FunctionIO or dict of pipeline input - example {'item': 'item_id'}
497
- :param str node_id: node id to execute
498
- :return: entities.PipelineExecution object
499
- """
500
- execution = self.pipeline_executions.create(
501
- pipeline_id=self.id,
502
- execution_input=execution_input,
503
- node_id=node_id
504
- )
505
- return execution
506
-
507
- def execute_batch(
508
- self,
509
- filters,
510
- execution_inputs=None,
511
- wait=True,
512
- node_id: str = None
513
- ):
514
- """
515
- execute a pipeline and return to execute
516
-
517
- :param execution_inputs: list of the dl.FunctionIO or dict of pipeline input - example {'item': 'item_id'}, that represent the extra inputs of the function
518
- :param filters: Filters entity for a filtering before execute
519
- :param bool wait: wait until create task finish
520
- :param str node_id: node id to execute
521
- :return: entities.PipelineExecution object
522
-
523
- **Example**:
524
-
525
- .. code-block:: python
526
-
527
- command = pipeline.execute_batch(
528
- execution_inputs=dl.FunctionIO(type=dl.PackageInputType.STRING, value='test', name='string'),
529
- filters=dl.Filters(field='dir', values='/test', context={'datasets': [dataset.id]))
530
- """
531
- command = self.pipeline_executions.create_batch(
532
- pipeline_id=self.id,
533
- execution_inputs=execution_inputs,
534
- filters=filters,
535
- wait=wait,
536
- node_id=node_id
537
- )
538
- return command
539
-
540
- def reset(self, stop_if_running: bool = False):
541
- """
542
- Resets pipeline counters
543
-
544
- :param bool stop_if_running: If the pipeline is installed it will stop the pipeline and reset the counters.
545
- :return: bool
546
- """
547
- return self.pipelines.reset(pipeline_id=self.id, stop_if_running=stop_if_running)
548
-
549
- def stats(self):
550
- """
551
- Get pipeline counters
552
-
553
- :return: PipelineStats
554
- :rtype: dtlpy.entities.pipeline.PipelineStats
555
- """
556
- return self.pipelines.stats(pipeline_id=self.id)
557
-
558
- def set_start_node(self, node: PipelineNode):
559
- """
560
- Set the start node of the pipeline
561
-
562
- :param PipelineNode node: node to be the start node
563
- """
564
- connections = [connection for connection in self.connections if connection.target.node_id == node.node_id]
565
- if connections:
566
- raise Exception(
567
- 'Connections cannot be added to Pipeline start-node. To add a connection, please reposition the start sign')
568
- if self.start_nodes:
569
- for pipe_node in self.start_nodes:
570
- if pipe_node['type'] == 'root':
571
- pipe_node['nodeId'] = node.node_id
572
- else:
573
- self.start_nodes = [{"nodeId": node.node_id,
574
- "type": "root", }]
575
-
576
- def update_variables_values(self, **kwargs):
577
- """
578
- Update pipeline variables values for the given keyword arguments.
579
-
580
- **Example**:
581
-
582
- .. code-block:: python
583
- pipeline.update_variables_values(
584
- dataset=dataset.id,
585
- model=model.id,
586
- threshold=0.9
587
- )
588
- pipeline.update()
589
- """
590
- keys = kwargs.keys()
591
- for variable in self.variables:
592
- if variable.name in keys:
593
- variable.value = kwargs[variable.name]
1
+ from collections import namedtuple
2
+ import logging
3
+ import traceback
4
+ from enum import Enum
5
+ from typing import List
6
+ import attr
7
+ from .node import PipelineNode, PipelineConnection, TaskNode, CodeNode, FunctionNode, DatasetNode
8
+ from .. import repositories, entities
9
+ from ..services.api_client import ApiClient
10
+ from .package_function import PackageInputType
11
+ import copy
12
+
13
+ logger = logging.getLogger(name='dtlpy')
14
+
15
+
16
+ class PipelineResumeOption(str, Enum):
17
+ TERMINATE_EXISTING_CYCLES = 'terminateExistingCycles',
18
+ RESUME_EXISTING_CYCLES = 'resumeExistingCycles'
19
+
20
+
21
+ class CompositionStatus(str, Enum):
22
+ CREATED = "Created",
23
+ INITIALIZING = "Initializing",
24
+ INSTALLED = "Installed",
25
+ ACTIVATED = "Activated",
26
+ DEACTIVATED = "Deactivated",
27
+ UNINSTALLED = "Uninstalled",
28
+ TERMINATING = "Terminating",
29
+ TERMINATED = "Terminated",
30
+ UPDATING = "Updating",
31
+ FAILURE = "Failure"
32
+
33
+
34
+ class PipelineSettings:
35
+
36
+ def __init__(
37
+ self,
38
+ default_resume_option: PipelineResumeOption = None,
39
+ keep_triggers_active: bool = None,
40
+ active_trigger_ask_again: bool = None,
41
+ last_update: dict = None
42
+ ):
43
+ self.default_resume_option = default_resume_option
44
+ self.keep_triggers_active = keep_triggers_active
45
+ self.active_trigger_ask_again = active_trigger_ask_again
46
+ self.last_update = last_update
47
+
48
+ @classmethod
49
+ def from_json(cls, _json: dict = None):
50
+ if _json is None:
51
+ _json = dict()
52
+ return cls(
53
+ default_resume_option=_json.get('defaultResumeOption', None),
54
+ keep_triggers_active=_json.get('keepTriggersActive', None),
55
+ active_trigger_ask_again=_json.get('activeTriggerAskAgain', None),
56
+ last_update=_json.get('lastUpdate', None)
57
+ )
58
+
59
+ def to_json(self):
60
+ _json = dict()
61
+
62
+ if self.default_resume_option is not None:
63
+ _json['defaultResumeOption'] = self.default_resume_option
64
+
65
+ if self.default_resume_option is not None:
66
+ _json['keepTriggersActive'] = self.default_resume_option
67
+
68
+ if self.default_resume_option is not None:
69
+ _json['activeTriggerAskAgain'] = self.default_resume_option
70
+
71
+ if self.default_resume_option is not None:
72
+ _json['lastUpdate'] = self.default_resume_option
73
+
74
+ return _json
75
+
76
+
77
+ class Variable(entities.DlEntity):
78
+ """
79
+ Pipeline Variables
80
+ """
81
+ id: str = entities.DlProperty(location=['id'], _type=str)
82
+ created_at: str = entities.DlProperty(location=['createdAt'], _type=str)
83
+ updated_at: str = entities.DlProperty(location=['updatedAt'], _type=str)
84
+ reference: str = entities.DlProperty(location=['reference'], _type=str)
85
+ creator: str = entities.DlProperty(location=['creator'], _type=str)
86
+ variable_type: PackageInputType = entities.DlProperty(location=['type'], _type=PackageInputType)
87
+ name: str = entities.DlProperty(location=['name'], _type=str)
88
+ value = entities.DlProperty(location=['value'])
89
+
90
+ @classmethod
91
+ def from_json(cls, _json):
92
+ """
93
+ Turn platform representation of variable into a pipeline variable entity
94
+
95
+ :param dict _json: platform representation of pipeline variable
96
+ :return: pipeline variable entity
97
+ :rtype: dtlpy.entities.pipeline.PipelineVariables
98
+ """
99
+
100
+ inst = cls(_dict=_json)
101
+ return inst
102
+
103
+ def to_json(self):
104
+ """
105
+ :return: variable of pipeline
106
+ :rtype: dict
107
+ """
108
+ _json = self._dict.copy()
109
+ return _json
110
+
111
+
112
+ class PipelineAverages:
113
+ def __init__(
114
+ self,
115
+ avg_time_per_execution: float,
116
+ avg_execution_per_day: float
117
+ ):
118
+ self.avg_time_per_execution = avg_time_per_execution
119
+ self.avg_execution_per_day = avg_execution_per_day
120
+
121
+ @classmethod
122
+ def from_json(cls, _json: dict = None):
123
+ if _json is None:
124
+ _json = dict()
125
+ return cls(
126
+ avg_time_per_execution=_json.get('avgTimePerExecution', 'NA'),
127
+ avg_execution_per_day=_json.get('avgExecutionsPerDay', 'NA')
128
+ )
129
+
130
+
131
+ class NodeAverages:
132
+ def __init__(
133
+ self,
134
+ node_id: str,
135
+ averages: PipelineAverages
136
+ ):
137
+ self.node_id = node_id
138
+ self.averages = averages
139
+
140
+ @classmethod
141
+ def from_json(cls, _json: dict):
142
+ return cls(
143
+ node_id=_json.get('nodeId', None),
144
+ averages=PipelineAverages.from_json(_json.get('executionStatistics'))
145
+ )
146
+
147
+
148
+ class PipelineCounter:
149
+ def __init__(
150
+ self,
151
+ status: str,
152
+ count: int
153
+ ):
154
+ self.status = status
155
+ self.count = count
156
+
157
+
158
+ class NodeCounters:
159
+ def __init__(
160
+ self,
161
+ node_id: str,
162
+ counters: List[PipelineCounter]
163
+ ):
164
+ self.node_id = node_id
165
+ self.counters = counters
166
+
167
+ @classmethod
168
+ def from_json(cls, _json: dict):
169
+ return cls(
170
+ node_id=_json.get('nodeId', None),
171
+ counters=[PipelineCounter(**c) for c in _json.get('statusCount', list())],
172
+ )
173
+
174
+
175
+ class PipelineStats:
176
+ def __init__(
177
+ self,
178
+ pipeline_counters: List[PipelineCounter],
179
+ node_counters: List[NodeCounters],
180
+ pipeline_averages: PipelineAverages,
181
+ node_averages: List[NodeAverages]
182
+ ):
183
+ self.pipeline_counters = pipeline_counters
184
+ self.node_counters = node_counters
185
+ self.pipeline_averages = pipeline_averages
186
+ self.node_averages = node_averages
187
+
188
+ @classmethod
189
+ def from_json(cls, _json: dict):
190
+ return cls(
191
+ pipeline_counters=[PipelineCounter(**c) for c in _json.get('pipelineExecutionCounters', list())],
192
+ node_counters=[NodeCounters.from_json(_json=c) for c in _json.get('nodeExecutionsCounters', list())],
193
+ pipeline_averages=PipelineAverages.from_json(_json.get('pipelineExecutionStatistics', None)),
194
+ node_averages=[NodeAverages.from_json(_json=c) for c in _json.get('nodeExecutionStatistics', list())]
195
+ )
196
+
197
+
198
+ @attr.s
199
+ class Pipeline(entities.BaseEntity):
200
+ """
201
+ Pipeline object
202
+ """
203
+ # platform
204
+ id = attr.ib()
205
+ name = attr.ib()
206
+ creator = attr.ib()
207
+ org_id = attr.ib()
208
+ connections = attr.ib()
209
+ settings = attr.ib(type=PipelineSettings)
210
+ variables = attr.ib(type=List[Variable])
211
+
212
+ status = attr.ib(type=CompositionStatus)
213
+
214
+ # name change
215
+ created_at = attr.ib()
216
+ updated_at = attr.ib(repr=False)
217
+ start_nodes = attr.ib()
218
+ project_id = attr.ib()
219
+ composition_id = attr.ib()
220
+ url = attr.ib()
221
+ preview = attr.ib()
222
+ description = attr.ib()
223
+ revisions = attr.ib()
224
+
225
+ # sdk
226
+ _project = attr.ib(repr=False)
227
+ _client_api = attr.ib(type=ApiClient, repr=False)
228
+ _original_settings = attr.ib(repr=False, type=PipelineSettings)
229
+ _original_variables = attr.ib(repr=False, type=List[Variable])
230
+ _repositories = attr.ib(repr=False)
231
+
232
+ updated_by = attr.ib(default=None)
233
+
234
+ @staticmethod
235
+ def _protected_from_json(_json, client_api, project=None, is_fetched=True):
236
+ """
237
+ Same as from_json but with try-except to catch if error
238
+ :param _json: platform json
239
+ :param client_api: ApiClient entity
240
+ :param dtlpy.entities.project.Project project: entity
241
+ :param is_fetched: is Entity fetched from Platform
242
+ :return:
243
+ """
244
+ try:
245
+ pipeline = Pipeline.from_json(
246
+ _json=_json,
247
+ client_api=client_api,
248
+ project=project,
249
+ is_fetched=is_fetched
250
+ )
251
+ status = True
252
+ except Exception:
253
+ pipeline = traceback.format_exc()
254
+ status = False
255
+ return status, pipeline
256
+
257
+ @classmethod
258
+ def from_json(cls, _json, client_api, project=None, is_fetched=True):
259
+ """
260
+ Turn platform representation of pipeline into a pipeline entity
261
+
262
+ :param dict _json: platform representation of package
263
+ :param dl.ApiClient client_api: ApiClient entity
264
+ :param dtlpy.entities.project.Project project: entity
265
+ :param bool is_fetched: is Entity fetched from Platform
266
+ :return: Pipeline entity
267
+ :rtype: dtlpy.entities.pipeline.Pipeline
268
+ """
269
+ if project is not None:
270
+ if project.id != _json.get('projectId', None):
271
+ logger.warning('Pipeline has been fetched from a project that is not belong to it')
272
+ project = None
273
+
274
+ connections = [PipelineConnection.from_json(_json=con) for con in _json.get('connections', list())]
275
+ json_variables = _json.get('variables', None) or list()
276
+ variables = list()
277
+ if json_variables:
278
+ copy_json_variables = copy.deepcopy(json_variables)
279
+ variables = [Variable.from_json(_json=v) for v in copy_json_variables]
280
+
281
+ settings = PipelineSettings.from_json(_json=_json.get('settings', dict()))
282
+ inst = cls(
283
+ created_at=_json.get('createdAt', None),
284
+ updated_at=_json.get('updatedAt', None),
285
+ project_id=_json.get('projectId', None),
286
+ org_id=_json.get('orgId', None),
287
+ composition_id=_json.get('compositionId', None),
288
+ creator=_json.get('creator', None),
289
+ client_api=client_api,
290
+ name=_json.get('name', None),
291
+ project=project,
292
+ id=_json.get('id', None),
293
+ connections=connections,
294
+ start_nodes=_json.get('startNodes', None),
295
+ url=_json.get('url', None),
296
+ preview=_json.get('preview', None),
297
+ description=_json.get('description', None),
298
+ revisions=_json.get('revisions', None),
299
+ settings=settings,
300
+ variables=variables,
301
+ status=_json.get('status', None),
302
+ original_settings=settings,
303
+ original_variables=json_variables,
304
+ updated_by=_json.get('updatedBy', None),
305
+ )
306
+ for node in _json.get('nodes', list()):
307
+ inst.nodes.add(node=cls.pipeline_node(node))
308
+ inst.is_fetched = is_fetched
309
+ return inst
310
+
311
+ @classmethod
312
+ def pipeline_node(cls, _json):
313
+ node_type = _json.get('type')
314
+ if node_type == 'task':
315
+ return TaskNode.from_json(_json)
316
+ elif node_type == 'code':
317
+ return CodeNode.from_json(_json)
318
+ elif node_type == 'function':
319
+ return FunctionNode.from_json(_json)
320
+ elif node_type == 'storage':
321
+ return DatasetNode.from_json(_json)
322
+ else:
323
+ return PipelineNode.from_json(_json)
324
+
325
+ def settings_changed(self) -> bool:
326
+ return self.settings.to_json() != self._original_settings.to_json()
327
+
328
+ def variables_changed(self) -> bool:
329
+ new_vars = [var.to_json() for var in self.variables]
330
+ old_vars = self._original_variables or list()
331
+ return new_vars != old_vars
332
+
333
+ def to_json(self):
334
+ """
335
+ Turn Package entity into a platform representation of Package
336
+
337
+ :return: platform json of package
338
+ :rtype: dict
339
+ """
340
+ _json = attr.asdict(self,
341
+ filter=attr.filters.exclude(attr.fields(Pipeline)._project,
342
+ attr.fields(Pipeline)._repositories,
343
+ attr.fields(Pipeline)._client_api,
344
+ attr.fields(Pipeline).project_id,
345
+ attr.fields(Pipeline).org_id,
346
+ attr.fields(Pipeline).connections,
347
+ attr.fields(Pipeline).created_at,
348
+ attr.fields(Pipeline).updated_at,
349
+ attr.fields(Pipeline).start_nodes,
350
+ attr.fields(Pipeline).project_id,
351
+ attr.fields(Pipeline).composition_id,
352
+ attr.fields(Pipeline).url,
353
+ attr.fields(Pipeline).preview,
354
+ attr.fields(Pipeline).description,
355
+ attr.fields(Pipeline).revisions,
356
+ attr.fields(Pipeline).settings,
357
+ attr.fields(Pipeline).variables,
358
+ attr.fields(Pipeline)._original_settings,
359
+ attr.fields(Pipeline)._original_variables,
360
+ attr.fields(Pipeline).updated_by,
361
+ ))
362
+
363
+ _json['projectId'] = self.project_id
364
+ _json['createdAt'] = self.created_at
365
+ _json['updatedAt'] = self.updated_at
366
+ _json['compositionId'] = self.composition_id
367
+ _json['startNodes'] = self.start_nodes
368
+ _json['orgId'] = self.org_id
369
+ _json['nodes'] = [node.to_json() for node in self.nodes]
370
+ _json['connections'] = [con.to_json() for con in self.connections]
371
+ if self.variables:
372
+ _json['variables'] = [v.to_json() for v in self.variables]
373
+ _json['url'] = self.url
374
+
375
+ settings_json = self.settings.to_json()
376
+ if settings_json:
377
+ _json['settings'] = settings_json
378
+
379
+ if self.preview is not None:
380
+ _json['preview'] = self.preview
381
+ if self.description is not None:
382
+ _json['description'] = self.description
383
+ if self.revisions is not None:
384
+ _json['revisions'] = self.revisions
385
+ if self.updated_by is not None:
386
+ _json['updatedBy'] = self.updated_by
387
+
388
+ return _json
389
+
390
+ #########
391
+ # Props #
392
+ #########
393
+
394
+ @property
395
+ def platform_url(self):
396
+ return self._client_api._get_resource_url("projects/{}/pipelines/{}".format(self.project_id, self.id))
397
+
398
+ @property
399
+ def project(self):
400
+ if self._project is None:
401
+ self._project = self.projects.get(project_id=self.project_id, fetch=None)
402
+ assert isinstance(self._project, entities.Project)
403
+ return self._project
404
+
405
+ ################
406
+ # repositories #
407
+ ################
408
+ @_repositories.default
409
+ def set_repositories(self):
410
+ reps = namedtuple('repositories',
411
+ field_names=['projects', 'pipelines', 'pipeline_executions', 'triggers', 'nodes'])
412
+
413
+ r = reps(
414
+ projects=repositories.Projects(client_api=self._client_api),
415
+ pipelines=repositories.Pipelines(client_api=self._client_api, project=self._project),
416
+ pipeline_executions=repositories.PipelineExecutions(
417
+ client_api=self._client_api, project=self._project, pipeline=self
418
+ ),
419
+ triggers=repositories.Triggers(client_api=self._client_api, pipeline=self),
420
+ nodes=repositories.Nodes(client_api=self._client_api, pipeline=self)
421
+ )
422
+ return r
423
+
424
+ @property
425
+ def projects(self):
426
+ assert isinstance(self._repositories.projects, repositories.Projects)
427
+ return self._repositories.projects
428
+
429
+ @property
430
+ def triggers(self):
431
+ assert isinstance(self._repositories.triggers, repositories.Triggers)
432
+ return self._repositories.triggers
433
+
434
+ @property
435
+ def nodes(self):
436
+ assert isinstance(self._repositories.nodes, repositories.Nodes)
437
+ return self._repositories.nodes
438
+
439
+ @property
440
+ def pipelines(self):
441
+ assert isinstance(self._repositories.pipelines, repositories.Pipelines)
442
+ return self._repositories.pipelines
443
+
444
+ @property
445
+ def pipeline_executions(self):
446
+ assert isinstance(self._repositories.pipeline_executions, repositories.PipelineExecutions)
447
+ return self._repositories.pipeline_executions
448
+
449
+ ###########
450
+ # methods #
451
+ ###########
452
+ def update(self):
453
+ """
454
+ Update pipeline changes to platform
455
+
456
+ :return: pipeline entity
457
+ """
458
+ return self.pipelines.update(pipeline=self)
459
+
460
+ def delete(self):
461
+ """
462
+ Delete pipeline object
463
+
464
+ :return: True
465
+ """
466
+ return self.pipelines.delete(pipeline=self)
467
+
468
+ def open_in_web(self):
469
+ """
470
+ Open the pipeline in web platform
471
+
472
+ :return:
473
+ """
474
+ self._client_api._open_in_web(url=self.platform_url)
475
+
476
+ def install(self, resume_option: PipelineResumeOption = None):
477
+ """
478
+ install pipeline
479
+
480
+ :return: Composition entity
481
+ """
482
+ return self.pipelines.install(pipeline=self, resume_option=resume_option)
483
+
484
+ def pause(self, keep_triggers_active: bool = None):
485
+ """
486
+ pause pipeline
487
+
488
+ :return: Composition entity
489
+ """
490
+ return self.pipelines.pause(pipeline=self, keep_triggers_active=keep_triggers_active)
491
+
492
+ def execute(self, execution_input=None, node_id: str = None):
493
+ """
494
+ execute a pipeline and return to execute
495
+
496
+ :param execution_input: list of the dl.FunctionIO or dict of pipeline input - example {'item': 'item_id'}
497
+ :param str node_id: node id to execute
498
+ :return: entities.PipelineExecution object
499
+ """
500
+ execution = self.pipeline_executions.create(
501
+ pipeline_id=self.id,
502
+ execution_input=execution_input,
503
+ node_id=node_id
504
+ )
505
+ return execution
506
+
507
+ def execute_batch(
508
+ self,
509
+ filters,
510
+ execution_inputs=None,
511
+ wait=True,
512
+ node_id: str = None
513
+ ):
514
+ """
515
+ execute a pipeline and return to execute
516
+
517
+ :param execution_inputs: list of the dl.FunctionIO or dict of pipeline input - example {'item': 'item_id'}, that represent the extra inputs of the function
518
+ :param filters: Filters entity for a filtering before execute
519
+ :param bool wait: wait until create task finish
520
+ :param str node_id: node id to execute
521
+ :return: entities.PipelineExecution object
522
+
523
+ **Example**:
524
+
525
+ .. code-block:: python
526
+
527
+ command = pipeline.execute_batch(
528
+ execution_inputs=dl.FunctionIO(type=dl.PackageInputType.STRING, value='test', name='string'),
529
+ filters=dl.Filters(field='dir', values='/test', context={'datasets': [dataset.id]))
530
+ """
531
+ command = self.pipeline_executions.create_batch(
532
+ pipeline_id=self.id,
533
+ execution_inputs=execution_inputs,
534
+ filters=filters,
535
+ wait=wait,
536
+ node_id=node_id
537
+ )
538
+ return command
539
+
540
+ def reset(self, stop_if_running: bool = False):
541
+ """
542
+ Resets pipeline counters
543
+
544
+ :param bool stop_if_running: If the pipeline is installed it will stop the pipeline and reset the counters.
545
+ :return: bool
546
+ """
547
+ return self.pipelines.reset(pipeline_id=self.id, stop_if_running=stop_if_running)
548
+
549
+ def stats(self):
550
+ """
551
+ Get pipeline counters
552
+
553
+ :return: PipelineStats
554
+ :rtype: dtlpy.entities.pipeline.PipelineStats
555
+ """
556
+ return self.pipelines.stats(pipeline_id=self.id)
557
+
558
+ def set_start_node(self, node: PipelineNode):
559
+ """
560
+ Set the start node of the pipeline
561
+
562
+ :param PipelineNode node: node to be the start node
563
+ """
564
+ connections = [connection for connection in self.connections if connection.target.node_id == node.node_id]
565
+ if connections:
566
+ raise Exception(
567
+ 'Connections cannot be added to Pipeline start-node. To add a connection, please reposition the start sign')
568
+ if self.start_nodes:
569
+ for pipe_node in self.start_nodes:
570
+ if pipe_node['type'] == 'root':
571
+ pipe_node['nodeId'] = node.node_id
572
+ else:
573
+ self.start_nodes = [{"nodeId": node.node_id,
574
+ "type": "root", }]
575
+
576
+ def update_variables_values(self, **kwargs):
577
+ """
578
+ Update pipeline variables values for the given keyword arguments.
579
+
580
+ **Example**:
581
+
582
+ .. code-block:: python
583
+ pipeline.update_variables_values(
584
+ dataset=dataset.id,
585
+ model=model.id,
586
+ threshold=0.9
587
+ )
588
+ pipeline.update()
589
+ """
590
+ keys = kwargs.keys()
591
+ for variable in self.variables:
592
+ if variable.name in keys:
593
+ variable.value = kwargs[variable.name]