runnable 0.35.0__py3-none-any.whl → 0.36.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. extensions/job_executor/__init__.py +3 -4
  2. extensions/job_executor/emulate.py +106 -0
  3. extensions/job_executor/k8s.py +8 -8
  4. extensions/job_executor/local_container.py +13 -14
  5. extensions/nodes/__init__.py +0 -0
  6. extensions/nodes/conditional.py +7 -5
  7. extensions/nodes/fail.py +72 -0
  8. extensions/nodes/map.py +350 -0
  9. extensions/nodes/parallel.py +159 -0
  10. extensions/nodes/stub.py +89 -0
  11. extensions/nodes/success.py +72 -0
  12. extensions/nodes/task.py +92 -0
  13. extensions/pipeline_executor/__init__.py +24 -26
  14. extensions/pipeline_executor/argo.py +18 -15
  15. extensions/pipeline_executor/emulate.py +112 -0
  16. extensions/pipeline_executor/local.py +4 -4
  17. extensions/pipeline_executor/local_container.py +19 -79
  18. extensions/pipeline_executor/mocked.py +4 -4
  19. extensions/pipeline_executor/retry.py +6 -10
  20. extensions/tasks/torch.py +1 -1
  21. runnable/__init__.py +0 -8
  22. runnable/catalog.py +1 -21
  23. runnable/cli.py +0 -59
  24. runnable/context.py +519 -28
  25. runnable/datastore.py +51 -54
  26. runnable/defaults.py +12 -34
  27. runnable/entrypoints.py +82 -440
  28. runnable/exceptions.py +35 -34
  29. runnable/executor.py +13 -20
  30. runnable/names.py +1 -1
  31. runnable/nodes.py +16 -15
  32. runnable/parameters.py +2 -2
  33. runnable/sdk.py +66 -163
  34. runnable/tasks.py +62 -21
  35. runnable/utils.py +6 -268
  36. {runnable-0.35.0.dist-info → runnable-0.36.0.dist-info}/METADATA +1 -1
  37. runnable-0.36.0.dist-info/RECORD +74 -0
  38. {runnable-0.35.0.dist-info → runnable-0.36.0.dist-info}/entry_points.txt +8 -7
  39. extensions/nodes/nodes.py +0 -778
  40. runnable-0.35.0.dist-info/RECORD +0 -66
  41. {runnable-0.35.0.dist-info → runnable-0.36.0.dist-info}/WHEEL +0 -0
  42. {runnable-0.35.0.dist-info → runnable-0.36.0.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,350 @@
1
+ import importlib
2
+ import logging
3
+ import os
4
+ import sys
5
+ from collections import OrderedDict
6
+ from copy import deepcopy
7
+ from typing import Any, Callable, Dict, List, Optional, Tuple, Union, cast
8
+
9
+ from pydantic import Field
10
+
11
+ from extensions.nodes.task import TaskNode
12
+ from runnable import console, defaults, utils
13
+ from runnable.datastore import (
14
+ JsonParameter,
15
+ MetricParameter,
16
+ ObjectParameter,
17
+ Parameter,
18
+ )
19
+ from runnable.defaults import MapVariableType
20
+ from runnable.graph import Graph, create_graph
21
+ from runnable.nodes import CompositeNode
22
+
23
+ logger = logging.getLogger(defaults.LOGGER_NAME)
24
+
25
+
26
+ class MapNode(CompositeNode):
27
+ """
28
+ A composite node that contains ONE graph object within itself that has to be executed with an iterable.
29
+
30
+ The structure is generally:
31
+ MapNode:
32
+ branch
33
+
34
+ The config is expected to have a variable 'iterate_on' and iterate_as which are looked for in the parameters.
35
+ for iter_variable in parameters['iterate_on']:
36
+ Execute the Branch by sending {'iterate_as': iter_variable}
37
+
38
+ The internal naming convention creates branches dynamically based on the iteration value
39
+ """
40
+
41
+ # TODO: Should it be one function or a dict of functions indexed by the return name
42
+
43
+ node_type: str = Field(default="map", serialization_alias="type")
44
+ iterate_on: str
45
+ iterate_as: str
46
+ reducer: Optional[str] = Field(default=None)
47
+ branch: Graph
48
+
49
+ def get_summary(self) -> Dict[str, Any]:
50
+ summary = {
51
+ "name": self.name,
52
+ "type": self.node_type,
53
+ "branch": self.branch.get_summary(),
54
+ "iterate_on": self.iterate_on,
55
+ "iterate_as": self.iterate_as,
56
+ "reducer": self.reducer,
57
+ }
58
+
59
+ return summary
60
+
61
+ def get_reducer_function(self):
62
+ if not self.reducer:
63
+ return lambda *x: list(x) # returns a list of the args
64
+
65
+ # try a lambda function
66
+ try:
67
+ f = eval(self.reducer)
68
+ if callable(f):
69
+ return f
70
+ except SyntaxError:
71
+ logger.info(f"{self.reducer} is not a lambda function")
72
+
73
+ # Load the reducer function from dotted path
74
+ mod, func = utils.get_module_and_attr_names(self.reducer)
75
+ sys.path.insert(0, os.getcwd()) # Need to add the current directory to path
76
+ imported_module = importlib.import_module(mod)
77
+ f = getattr(imported_module, func)
78
+
79
+ return f
80
+
81
+ @classmethod
82
+ def parse_from_config(cls, config: Dict[str, Any]) -> "MapNode":
83
+ internal_name = cast(str, config.get("internal_name"))
84
+
85
+ config_branch = config.pop("branch", {})
86
+ if not config_branch:
87
+ raise Exception("A map node should have a branch")
88
+
89
+ branch = create_graph(
90
+ deepcopy(config_branch),
91
+ internal_branch_name=internal_name + "." + defaults.MAP_PLACEHOLDER,
92
+ )
93
+ return cls(branch=branch, **config)
94
+
95
+ @property
96
+ def branch_returns(self):
97
+ branch_returns: List[
98
+ Tuple[str, Union[ObjectParameter, MetricParameter, JsonParameter]]
99
+ ] = []
100
+ for _, node in self.branch.nodes.items():
101
+ if isinstance(node, TaskNode):
102
+ for task_return in node.executable.returns:
103
+ if task_return.kind == "json":
104
+ branch_returns.append(
105
+ (
106
+ task_return.name,
107
+ JsonParameter(kind="json", value="", reduced=False),
108
+ )
109
+ )
110
+ elif task_return.kind == "object":
111
+ branch_returns.append(
112
+ (
113
+ task_return.name,
114
+ ObjectParameter(
115
+ kind="object",
116
+ value="Will be reduced",
117
+ reduced=False,
118
+ ),
119
+ )
120
+ )
121
+ elif task_return.kind == "metric":
122
+ branch_returns.append(
123
+ (
124
+ task_return.name,
125
+ MetricParameter(kind="metric", value="", reduced=False),
126
+ )
127
+ )
128
+ else:
129
+ raise Exception("kind should be either json or object")
130
+
131
+ return branch_returns
132
+
133
+ def _get_branch_by_name(self, branch_name: str) -> Graph:
134
+ """
135
+ Retrieve a branch by name.
136
+
137
+ In the case of a Map Object, the branch naming is dynamic as it is parameterized on iterable.
138
+ This method takes no responsibility in checking the validity of the naming.
139
+
140
+ Returns a Graph Object
141
+
142
+ Args:
143
+ branch_name (str): The name of the branch to retrieve
144
+
145
+ Raises:
146
+ Exception: If the branch by that name does not exist
147
+ """
148
+ return self.branch
149
+
150
+ def fan_out(self, map_variable: MapVariableType = None):
151
+ """
152
+ The general method to fan out for a node of type map.
153
+ This method assumes that the step log has already been created.
154
+
155
+ 3rd party orchestrators should call this method to create the individual branch logs.
156
+
157
+ Args:
158
+ executor (BaseExecutor): The executor class as defined by the config
159
+ map_variable (dict, optional): If the node is part of map. Defaults to None.
160
+ """
161
+ iterate_on = self._context.run_log_store.get_parameters(self._context.run_id)[
162
+ self.iterate_on
163
+ ].get_value()
164
+
165
+ # Prepare the branch logs
166
+ for iter_variable in iterate_on:
167
+ effective_branch_name = self._resolve_map_placeholders(
168
+ self.internal_name + "." + str(iter_variable), map_variable=map_variable
169
+ )
170
+ branch_log = self._context.run_log_store.create_branch_log(
171
+ effective_branch_name
172
+ )
173
+
174
+ console.print(
175
+ f"Branch log created for {effective_branch_name}: {branch_log}"
176
+ )
177
+ branch_log.status = defaults.PROCESSING
178
+ self._context.run_log_store.add_branch_log(branch_log, self._context.run_id)
179
+
180
+ # Gather all the returns of the task nodes and create parameters in reduced=False state.
181
+ # TODO: Why are we preemptively creating the parameters?
182
+ raw_parameters = {}
183
+ if map_variable:
184
+ # If we are in a map state already, the param should have an index of the map variable.
185
+ for _, v in map_variable.items():
186
+ for branch_return in self.branch_returns:
187
+ param_name, param_type = branch_return
188
+ raw_parameters[f"{v}_{param_name}"] = param_type.copy()
189
+ else:
190
+ for branch_return in self.branch_returns:
191
+ param_name, param_type = branch_return
192
+ raw_parameters[f"{param_name}"] = param_type.copy()
193
+
194
+ self._context.run_log_store.set_parameters(
195
+ parameters=raw_parameters, run_id=self._context.run_id
196
+ )
197
+
198
+ def execute_as_graph(self, map_variable: MapVariableType = None):
199
+ """
200
+ This function does the actual execution of the branch of the map node.
201
+
202
+ From a design perspective, this function should not be called if the execution is 3rd party orchestrated.
203
+
204
+ The modes that render the job specifications, do not need to interact with this node at all as
205
+ they have their own internal mechanisms of handing map states or dynamic parallel states.
206
+ If they do not, you can find a way using as-is nodes as hack nodes.
207
+
208
+ The actual logic is :
209
+ * We iterate over the iterable as mentioned in the config
210
+ * For every value in the iterable we call the executor.execute_graph(branch, iterate_as: iter_variable)
211
+
212
+ The execution of a dag, could result in
213
+ * The dag being completely executed with a definite (fail, success) state in case of local
214
+ or local-container execution
215
+ * The dag being in a processing state with PROCESSING status in case of local-aws-batch
216
+
217
+ Only fail state is considered failure during this phase of execution.
218
+
219
+ Args:
220
+ executor (Executor): The Executor as per the use config
221
+ map_variable (dict): The map variables the graph belongs to
222
+ **kwargs: Optional kwargs passed around
223
+ """
224
+
225
+ iterate_on = None
226
+ try:
227
+ iterate_on = self._context.run_log_store.get_parameters(
228
+ self._context.run_id
229
+ )[self.iterate_on].get_value()
230
+ except KeyError as e:
231
+ raise Exception(
232
+ (
233
+ f"Expected parameter {self.iterate_on}",
234
+ "not present in Run Log parameters",
235
+ "was it ever set before?",
236
+ )
237
+ ) from e
238
+
239
+ if not isinstance(iterate_on, list):
240
+ raise Exception("Only list is allowed as a valid iterator type")
241
+
242
+ self.fan_out(map_variable=map_variable)
243
+
244
+ for iter_variable in iterate_on:
245
+ effective_map_variable = map_variable or OrderedDict()
246
+ effective_map_variable[self.iterate_as] = iter_variable
247
+
248
+ self._context.pipeline_executor.execute_graph(
249
+ self.branch, map_variable=effective_map_variable
250
+ )
251
+
252
+ self.fan_in(map_variable=map_variable)
253
+
254
+ def fan_in(self, map_variable: MapVariableType = None):
255
+ """
256
+ The general method to fan in for a node of type map.
257
+
258
+ 3rd party orchestrators should call this method to find the status of the step log.
259
+
260
+ Args:
261
+ executor (BaseExecutor): The executor class as defined by the config
262
+ map_variable (dict, optional): If the node is part of map node. Defaults to None.
263
+ """
264
+ params = self._context.run_log_store.get_parameters(self._context.run_id)
265
+ iterate_on = params[self.iterate_on].get_value()
266
+ # # Find status of the branches
267
+ step_success_bool = True
268
+ effective_internal_name = self._resolve_map_placeholders(
269
+ self.internal_name, map_variable=map_variable
270
+ )
271
+
272
+ for iter_variable in iterate_on:
273
+ effective_branch_name = self._resolve_map_placeholders(
274
+ self.internal_name + "." + str(iter_variable), map_variable=map_variable
275
+ )
276
+ branch_log = self._context.run_log_store.get_branch_log(
277
+ effective_branch_name, self._context.run_id
278
+ )
279
+ # console.print(f"Branch log for {effective_branch_name}: {branch_log}")
280
+
281
+ if branch_log.status != defaults.SUCCESS:
282
+ step_success_bool = False
283
+
284
+ # Collate all the results and update the status of the step
285
+ step_log = self._context.run_log_store.get_step_log(
286
+ effective_internal_name, self._context.run_id
287
+ )
288
+
289
+ if step_success_bool: #  If none failed and nothing is waiting
290
+ step_log.status = defaults.SUCCESS
291
+ else:
292
+ step_log.status = defaults.FAIL
293
+
294
+ self._context.run_log_store.add_step_log(step_log, self._context.run_id)
295
+
296
+ # If we failed, we return without any collection
297
+ if not step_log.status == defaults.SUCCESS:
298
+ return
299
+
300
+ # Apply the reduce function and reduce the returns of the task nodes.
301
+ # The final value of the parameter is the result of the reduce function.
302
+ reducer_f = self.get_reducer_function()
303
+
304
+ def update_param(
305
+ params: Dict[str, Parameter], reducer_f: Callable, map_prefix: str = ""
306
+ ):
307
+ for branch_return in self.branch_returns:
308
+ param_name, _ = branch_return
309
+
310
+ to_reduce = []
311
+ for iter_variable in iterate_on:
312
+ try:
313
+ to_reduce.append(
314
+ params[f"{iter_variable}_{param_name}"].get_value()
315
+ )
316
+ except KeyError as e:
317
+ from extensions.pipeline_executor.mocked import MockedExecutor
318
+
319
+ if isinstance(self._context.pipeline_executor, MockedExecutor):
320
+ pass
321
+ else:
322
+ raise Exception(
323
+ (
324
+ f"Expected parameter {iter_variable}_{param_name}",
325
+ "not present in Run Log parameters",
326
+ "was it ever set before?",
327
+ )
328
+ ) from e
329
+
330
+ param_name = f"{map_prefix}{param_name}"
331
+ if to_reduce:
332
+ params[param_name].value = reducer_f(*to_reduce)
333
+ else:
334
+ params[param_name].value = ""
335
+ params[param_name].reduced = True
336
+
337
+ if map_variable:
338
+ # If we are in a map state already, the param should have an index of the map variable.
339
+ for _, v in map_variable.items():
340
+ update_param(params, reducer_f, map_prefix=f"{v}_")
341
+ else:
342
+ update_param(params, reducer_f)
343
+
344
+ self._context.run_log_store.set_parameters(
345
+ parameters=params, run_id=self._context.run_id
346
+ )
347
+
348
+ self._context.run_log_store.set_parameters(
349
+ parameters=params, run_id=self._context.run_id
350
+ )
@@ -0,0 +1,159 @@
1
+ from copy import deepcopy
2
+ from typing import Any, Dict, cast
3
+
4
+ from pydantic import Field, field_serializer
5
+
6
+ from runnable import defaults
7
+ from runnable.defaults import MapVariableType
8
+ from runnable.graph import Graph, create_graph
9
+ from runnable.nodes import CompositeNode
10
+
11
+
12
+ class ParallelNode(CompositeNode):
13
+ """
14
+ A composite node containing many graph objects within itself.
15
+
16
+ The structure is generally:
17
+ ParallelNode:
18
+ Branch A:
19
+ Sub graph definition
20
+ Branch B:
21
+ Sub graph definition
22
+ . . .
23
+
24
+ """
25
+
26
+ node_type: str = Field(default="parallel", serialization_alias="type")
27
+ branches: Dict[str, Graph]
28
+
29
+ def get_summary(self) -> Dict[str, Any]:
30
+ summary = {
31
+ "name": self.name,
32
+ "type": self.node_type,
33
+ "branches": [branch.get_summary() for branch in self.branches.values()],
34
+ }
35
+
36
+ return summary
37
+
38
+ @field_serializer("branches")
39
+ def ser_branches(self, branches: Dict[str, Graph]) -> Dict[str, Graph]:
40
+ ret: Dict[str, Graph] = {}
41
+
42
+ for branch_name, branch in branches.items():
43
+ ret[branch_name.split(".")[-1]] = branch
44
+
45
+ return ret
46
+
47
+ @classmethod
48
+ def parse_from_config(cls, config: Dict[str, Any]) -> "ParallelNode":
49
+ internal_name = cast(str, config.get("internal_name"))
50
+
51
+ config_branches = config.pop("branches", {})
52
+ branches = {}
53
+ for branch_name, branch_config in config_branches.items():
54
+ sub_graph = create_graph(
55
+ deepcopy(branch_config),
56
+ internal_branch_name=internal_name + "." + branch_name,
57
+ )
58
+ branches[internal_name + "." + branch_name] = sub_graph
59
+
60
+ if not branches:
61
+ raise Exception("A parallel node should have branches")
62
+ return cls(branches=branches, **config)
63
+
64
+ def _get_branch_by_name(self, branch_name: str) -> Graph:
65
+ if branch_name in self.branches:
66
+ return self.branches[branch_name]
67
+
68
+ raise Exception(f"Branch {branch_name} does not exist")
69
+
70
+ def fan_out(self, map_variable: MapVariableType = None):
71
+ """
72
+ The general fan out method for a node of type Parallel.
73
+ This method assumes that the step log has already been created.
74
+
75
+ 3rd party orchestrators should create the step log and use this method to create the branch logs.
76
+
77
+ Args:
78
+ executor (BaseExecutor): The executor class as defined by the config
79
+ map_variable (dict, optional): If the node is part of a map node. Defaults to None.
80
+ """
81
+ # Prepare the branch logs
82
+ for internal_branch_name, _ in self.branches.items():
83
+ effective_branch_name = self._resolve_map_placeholders(
84
+ internal_branch_name, map_variable=map_variable
85
+ )
86
+
87
+ branch_log = self._context.run_log_store.create_branch_log(
88
+ effective_branch_name
89
+ )
90
+ branch_log.status = defaults.PROCESSING
91
+ self._context.run_log_store.add_branch_log(branch_log, self._context.run_id)
92
+
93
+ def execute_as_graph(self, map_variable: MapVariableType = None):
94
+ """
95
+ This function does the actual execution of the sub-branches of the parallel node.
96
+
97
+ From a design perspective, this function should not be called if the execution is 3rd party orchestrated.
98
+
99
+ The modes that render the job specifications, do not need to interact with this node at all as they have their
100
+ own internal mechanisms of handing parallel states.
101
+ If they do not, you can find a way using as-is nodes as hack nodes.
102
+
103
+ The execution of a dag, could result in
104
+ * The dag being completely executed with a definite (fail, success) state in case of
105
+ local or local-container execution
106
+ * The dag being in a processing state with PROCESSING status in case of local-aws-batch
107
+
108
+ Only fail state is considered failure during this phase of execution.
109
+
110
+ Args:
111
+ executor (Executor): The Executor as per the use config
112
+ **kwargs: Optional kwargs passed around
113
+ """
114
+ self.fan_out(map_variable=map_variable)
115
+
116
+ for _, branch in self.branches.items():
117
+ self._context.pipeline_executor.execute_graph(
118
+ branch, map_variable=map_variable
119
+ )
120
+
121
+ self.fan_in(map_variable=map_variable)
122
+
123
+ def fan_in(self, map_variable: MapVariableType = None):
124
+ """
125
+ The general fan in method for a node of type Parallel.
126
+
127
+ 3rd party orchestrators should use this method to find the status of the composite step.
128
+
129
+ Args:
130
+ executor (BaseExecutor): The executor class as defined by the config
131
+ map_variable (dict, optional): If the node is part of a map. Defaults to None.
132
+ """
133
+ effective_internal_name = self._resolve_map_placeholders(
134
+ self.internal_name, map_variable=map_variable
135
+ )
136
+ step_success_bool = True
137
+ for internal_branch_name, _ in self.branches.items():
138
+ effective_branch_name = self._resolve_map_placeholders(
139
+ internal_branch_name, map_variable=map_variable
140
+ )
141
+ branch_log = self._context.run_log_store.get_branch_log(
142
+ effective_branch_name, self._context.run_id
143
+ )
144
+
145
+ if branch_log.status != defaults.SUCCESS:
146
+ step_success_bool = False
147
+
148
+ # Collate all the results and update the status of the step
149
+
150
+ step_log = self._context.run_log_store.get_step_log(
151
+ effective_internal_name, self._context.run_id
152
+ )
153
+
154
+ if step_success_bool: #  If none failed
155
+ step_log.status = defaults.SUCCESS
156
+ else:
157
+ step_log.status = defaults.FAIL
158
+
159
+ self._context.run_log_store.add_step_log(step_log, self._context.run_id)
@@ -0,0 +1,89 @@
1
+ import logging
2
+ from datetime import datetime
3
+ from typing import Any, Dict
4
+
5
+ from pydantic import ConfigDict, Field
6
+
7
+ from runnable import datastore, defaults
8
+ from runnable.datastore import StepLog
9
+ from runnable.defaults import MapVariableType
10
+ from runnable.nodes import ExecutableNode
11
+
12
+ logger = logging.getLogger(defaults.LOGGER_NAME)
13
+
14
+
15
+ class StubNode(ExecutableNode):
16
+ """
17
+ Stub is a convenience design node.
18
+ It always returns success in the attempt log and does nothing.
19
+
20
+ This node is very similar to pass state in Step functions.
21
+
22
+ This node type could be handy when designing the pipeline and stubbing functions
23
+ --8<-- [start:stub_reference]
24
+ An stub execution node of the pipeline.
25
+ Please refer to define pipeline/tasks/stub for more information.
26
+
27
+ As part of the dag definition, a stub task is defined as follows:
28
+
29
+ dag:
30
+ steps:
31
+ stub_task: # The name of the node
32
+ type: stub
33
+ on_failure: The name of the step to traverse in case of failure
34
+ next: The next node to execute after this task, use "success" to terminate the pipeline successfully
35
+ or "fail" to terminate the pipeline with an error.
36
+
37
+ It can take arbritary number of parameters, which is handy to temporarily silence a task node.
38
+ --8<-- [end:stub_reference]
39
+ """
40
+
41
+ node_type: str = Field(default="stub", serialization_alias="type")
42
+ model_config = ConfigDict(extra="ignore")
43
+
44
+ def get_summary(self) -> Dict[str, Any]:
45
+ summary = {
46
+ "name": self.name,
47
+ "type": self.node_type,
48
+ }
49
+
50
+ return summary
51
+
52
+ @classmethod
53
+ def parse_from_config(cls, config: Dict[str, Any]) -> "StubNode":
54
+ return cls(**config)
55
+
56
+ def execute(
57
+ self,
58
+ mock=False,
59
+ map_variable: MapVariableType = None,
60
+ attempt_number: int = 1,
61
+ ) -> StepLog:
62
+ """
63
+ Do Nothing node.
64
+ We just send an success attempt log back to the caller
65
+
66
+ Args:
67
+ executor ([type]): [description]
68
+ mock (bool, optional): [description]. Defaults to False.
69
+ map_variable (str, optional): [description]. Defaults to ''.
70
+
71
+ Returns:
72
+ [type]: [description]
73
+ """
74
+ step_log = self._context.run_log_store.get_step_log(
75
+ self._get_step_log_name(map_variable), self._context.run_id
76
+ )
77
+
78
+ attempt_log = datastore.StepAttempt(
79
+ status=defaults.SUCCESS,
80
+ start_time=str(datetime.now()),
81
+ end_time=str(datetime.now()),
82
+ attempt_number=attempt_number,
83
+ )
84
+
85
+ step_log.status = attempt_log.status
86
+
87
+ step_log.attempts.append(attempt_log)
88
+
89
+ return step_log
@@ -0,0 +1,72 @@
1
+ from datetime import datetime
2
+ from typing import Any, Dict, cast
3
+
4
+ from pydantic import Field
5
+
6
+ from runnable import datastore, defaults
7
+ from runnable.datastore import StepLog
8
+ from runnable.defaults import MapVariableType
9
+ from runnable.nodes import TerminalNode
10
+
11
+
12
+ class SuccessNode(TerminalNode):
13
+ """
14
+ A leaf node of the graph that represents a success node
15
+ """
16
+
17
+ node_type: str = Field(default="success", serialization_alias="type")
18
+
19
+ @classmethod
20
+ def parse_from_config(cls, config: Dict[str, Any]) -> "SuccessNode":
21
+ return cast("SuccessNode", super().parse_from_config(config))
22
+
23
+ def get_summary(self) -> Dict[str, Any]:
24
+ summary = {
25
+ "name": self.name,
26
+ "type": self.node_type,
27
+ }
28
+
29
+ return summary
30
+
31
+ def execute(
32
+ self,
33
+ mock=False,
34
+ map_variable: MapVariableType = None,
35
+ attempt_number: int = 1,
36
+ ) -> StepLog:
37
+ """
38
+ Execute the success node.
39
+ Set the run or branch log status to success.
40
+
41
+ Args:
42
+ executor (_type_): The executor class
43
+ mock (bool, optional): If we should just mock and not perform anything. Defaults to False.
44
+ map_variable (dict, optional): If the node belongs to an internal branch. Defaults to None.
45
+
46
+ Returns:
47
+ StepAttempt: The step attempt object
48
+ """
49
+ step_log = self._context.run_log_store.get_step_log(
50
+ self._get_step_log_name(map_variable), self._context.run_id
51
+ )
52
+
53
+ attempt_log = datastore.StepAttempt(
54
+ status=defaults.SUCCESS,
55
+ start_time=str(datetime.now()),
56
+ end_time=str(datetime.now()),
57
+ attempt_number=attempt_number,
58
+ )
59
+
60
+ run_or_branch_log = self._context.run_log_store.get_branch_log(
61
+ self._get_branch_log_name(map_variable), self._context.run_id
62
+ )
63
+ run_or_branch_log.status = defaults.SUCCESS
64
+ self._context.run_log_store.add_branch_log(
65
+ run_or_branch_log, self._context.run_id
66
+ )
67
+
68
+ step_log.status = attempt_log.status
69
+
70
+ step_log.attempts.append(attempt_log)
71
+
72
+ return step_log