runnable 0.13.0__py3-none-any.whl → 0.16.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- runnable/__init__.py +1 -12
- runnable/catalog.py +29 -5
- runnable/cli.py +268 -215
- runnable/context.py +10 -3
- runnable/datastore.py +212 -53
- runnable/defaults.py +13 -55
- runnable/entrypoints.py +270 -183
- runnable/exceptions.py +28 -2
- runnable/executor.py +133 -86
- runnable/graph.py +37 -13
- runnable/nodes.py +50 -22
- runnable/parameters.py +27 -8
- runnable/pickler.py +1 -1
- runnable/sdk.py +230 -66
- runnable/secrets.py +3 -1
- runnable/tasks.py +99 -41
- runnable/utils.py +59 -39
- {runnable-0.13.0.dist-info → runnable-0.16.0.dist-info}/METADATA +28 -31
- runnable-0.16.0.dist-info/RECORD +23 -0
- {runnable-0.13.0.dist-info → runnable-0.16.0.dist-info}/WHEEL +1 -1
- runnable-0.16.0.dist-info/entry_points.txt +45 -0
- runnable/extensions/__init__.py +0 -0
- runnable/extensions/catalog/__init__.py +0 -21
- runnable/extensions/catalog/file_system/__init__.py +0 -0
- runnable/extensions/catalog/file_system/implementation.py +0 -234
- runnable/extensions/catalog/k8s_pvc/__init__.py +0 -0
- runnable/extensions/catalog/k8s_pvc/implementation.py +0 -16
- runnable/extensions/catalog/k8s_pvc/integration.py +0 -59
- runnable/extensions/executor/__init__.py +0 -649
- runnable/extensions/executor/argo/__init__.py +0 -0
- runnable/extensions/executor/argo/implementation.py +0 -1194
- runnable/extensions/executor/argo/specification.yaml +0 -51
- runnable/extensions/executor/k8s_job/__init__.py +0 -0
- runnable/extensions/executor/k8s_job/implementation_FF.py +0 -259
- runnable/extensions/executor/k8s_job/integration_FF.py +0 -69
- runnable/extensions/executor/local.py +0 -69
- runnable/extensions/executor/local_container/__init__.py +0 -0
- runnable/extensions/executor/local_container/implementation.py +0 -446
- runnable/extensions/executor/mocked/__init__.py +0 -0
- runnable/extensions/executor/mocked/implementation.py +0 -154
- runnable/extensions/executor/retry/__init__.py +0 -0
- runnable/extensions/executor/retry/implementation.py +0 -168
- runnable/extensions/nodes.py +0 -870
- runnable/extensions/run_log_store/__init__.py +0 -0
- runnable/extensions/run_log_store/chunked_file_system/__init__.py +0 -0
- runnable/extensions/run_log_store/chunked_file_system/implementation.py +0 -111
- runnable/extensions/run_log_store/chunked_k8s_pvc/__init__.py +0 -0
- runnable/extensions/run_log_store/chunked_k8s_pvc/implementation.py +0 -21
- runnable/extensions/run_log_store/chunked_k8s_pvc/integration.py +0 -61
- runnable/extensions/run_log_store/db/implementation_FF.py +0 -157
- runnable/extensions/run_log_store/db/integration_FF.py +0 -0
- runnable/extensions/run_log_store/file_system/__init__.py +0 -0
- runnable/extensions/run_log_store/file_system/implementation.py +0 -140
- runnable/extensions/run_log_store/generic_chunked.py +0 -557
- runnable/extensions/run_log_store/k8s_pvc/__init__.py +0 -0
- runnable/extensions/run_log_store/k8s_pvc/implementation.py +0 -21
- runnable/extensions/run_log_store/k8s_pvc/integration.py +0 -56
- runnable/extensions/secrets/__init__.py +0 -0
- runnable/extensions/secrets/dotenv/__init__.py +0 -0
- runnable/extensions/secrets/dotenv/implementation.py +0 -100
- runnable/integration.py +0 -192
- runnable-0.13.0.dist-info/RECORD +0 -63
- runnable-0.13.0.dist-info/entry_points.txt +0 -41
- {runnable-0.13.0.dist-info → runnable-0.16.0.dist-info/licenses}/LICENSE +0 -0
runnable/exceptions.py
CHANGED
@@ -10,6 +10,18 @@ class RunLogExistsError(Exception): # pragma: no cover
|
|
10
10
|
self.message = f"Run id for {run_id} is already found in the datastore"
|
11
11
|
|
12
12
|
|
13
|
+
class JobLogNotFoundError(Exception):
|
14
|
+
"""
|
15
|
+
Exception class
|
16
|
+
Args:
|
17
|
+
Exception ([type]): [description]
|
18
|
+
"""
|
19
|
+
|
20
|
+
def __init__(self, run_id):
|
21
|
+
super().__init__()
|
22
|
+
self.message = f"Job for {run_id} is not found in the datastore"
|
23
|
+
|
24
|
+
|
13
25
|
class RunLogNotFoundError(Exception): # pragma: no cover
|
14
26
|
"""
|
15
27
|
Exception class
|
@@ -31,7 +43,9 @@ class StepLogNotFoundError(Exception): # pragma: no cover
|
|
31
43
|
|
32
44
|
def __init__(self, run_id, name):
|
33
45
|
super().__init__()
|
34
|
-
self.message =
|
46
|
+
self.message = (
|
47
|
+
f"Step log for {name} is not found in the datastore for Run id: {run_id}"
|
48
|
+
)
|
35
49
|
|
36
50
|
|
37
51
|
class BranchLogNotFoundError(Exception): # pragma: no cover
|
@@ -43,7 +57,9 @@ class BranchLogNotFoundError(Exception): # pragma: no cover
|
|
43
57
|
|
44
58
|
def __init__(self, run_id, name):
|
45
59
|
super().__init__()
|
46
|
-
self.message =
|
60
|
+
self.message = (
|
61
|
+
f"Branch log for {name} is not found in the datastore for Run id: {run_id}"
|
62
|
+
)
|
47
63
|
|
48
64
|
|
49
65
|
class NodeNotFoundError(Exception): # pragma: no cover
|
@@ -70,6 +86,16 @@ class BranchNotFoundError(Exception): # pragma: no cover
|
|
70
86
|
self.message = f"Branch of name {name} is not found the graph"
|
71
87
|
|
72
88
|
|
89
|
+
class NodeMethodCallError(Exception):
|
90
|
+
"""
|
91
|
+
Exception class
|
92
|
+
"""
|
93
|
+
|
94
|
+
def __init__(self, message):
|
95
|
+
super().__init__()
|
96
|
+
self.message = message
|
97
|
+
|
98
|
+
|
73
99
|
class TerminalNodeError(Exception): # pragma: no cover
|
74
100
|
def __init__(self):
|
75
101
|
super().__init__()
|
runnable/executor.py
CHANGED
@@ -5,17 +5,17 @@ import os
|
|
5
5
|
from abc import ABC, abstractmethod
|
6
6
|
from typing import TYPE_CHECKING, Any, Dict, List, Optional
|
7
7
|
|
8
|
-
from pydantic import BaseModel, ConfigDict
|
8
|
+
from pydantic import BaseModel, ConfigDict, PrivateAttr
|
9
9
|
|
10
10
|
import runnable.context as context
|
11
11
|
from runnable import defaults
|
12
|
-
from runnable.datastore import DataCatalog, StepLog
|
12
|
+
from runnable.datastore import DataCatalog, JobLog, StepLog
|
13
13
|
from runnable.defaults import TypeMapVariable
|
14
14
|
from runnable.graph import Graph
|
15
15
|
|
16
16
|
if TYPE_CHECKING: # pragma: no cover
|
17
|
-
from runnable.extensions.nodes import TaskNode
|
18
17
|
from runnable.nodes import BaseNode
|
18
|
+
from runnable.tasks import BaseTaskType
|
19
19
|
|
20
20
|
logger = logging.getLogger(defaults.LOGGER_NAME)
|
21
21
|
|
@@ -34,11 +34,10 @@ class BaseExecutor(ABC, BaseModel):
|
|
34
34
|
service_name: str = ""
|
35
35
|
service_type: str = "executor"
|
36
36
|
|
37
|
-
|
38
|
-
|
39
|
-
|
37
|
+
_is_local: bool = (
|
38
|
+
False # This is a flag to indicate whether the executor is local or not.
|
39
|
+
)
|
40
40
|
|
41
|
-
_context_node = None # type: BaseNode
|
42
41
|
model_config = ConfigDict(extra="forbid")
|
43
42
|
|
44
43
|
@property
|
@@ -65,32 +64,63 @@ class BaseExecutor(ABC, BaseModel):
|
|
65
64
|
"""
|
66
65
|
...
|
67
66
|
|
67
|
+
# TODO: Make this attempt number
|
68
|
+
@property
|
69
|
+
def step_attempt_number(self) -> int:
|
70
|
+
"""
|
71
|
+
The attempt number of the current step.
|
72
|
+
Orchestrators should use this step to submit multiple attempts of the job.
|
73
|
+
|
74
|
+
Returns:
|
75
|
+
int: The attempt number of the current step. Defaults to 1.
|
76
|
+
"""
|
77
|
+
return int(os.environ.get(defaults.ATTEMPT_NUMBER, 1))
|
78
|
+
|
68
79
|
@abstractmethod
|
69
|
-
def
|
80
|
+
def send_return_code(self, stage="traversal"):
|
81
|
+
"""
|
82
|
+
Convenience function used by pipeline to send return code to the caller of the cli
|
83
|
+
|
84
|
+
Raises:
|
85
|
+
Exception: If the pipeline execution failed
|
70
86
|
"""
|
71
|
-
|
72
|
-
|
87
|
+
...
|
88
|
+
|
73
89
|
|
74
|
-
|
90
|
+
class BaseJobExecutor(BaseExecutor):
|
91
|
+
service_type: str = "job_executor"
|
75
92
|
|
76
|
-
|
77
|
-
|
93
|
+
@abstractmethod
|
94
|
+
def submit_job(self, job: BaseTaskType, catalog_settings: Optional[List[str]]):
|
95
|
+
"""
|
96
|
+
Local executors should
|
97
|
+
- create the run log
|
98
|
+
- and call an execute_job
|
99
|
+
|
100
|
+
Non local executors should
|
101
|
+
- transpile the job to the platform specific job spec
|
102
|
+
- submit the job to call execute_job
|
78
103
|
"""
|
79
104
|
...
|
80
105
|
|
81
106
|
@abstractmethod
|
82
|
-
def
|
107
|
+
def add_code_identities(self, job_log: JobLog, **kwargs):
|
83
108
|
"""
|
84
|
-
|
109
|
+
Add code identities specific to the implementation.
|
110
|
+
|
111
|
+
The Base class has an implementation of adding git code identities.
|
85
112
|
|
86
113
|
Args:
|
87
|
-
|
88
|
-
|
114
|
+
step_log (object): The step log object
|
115
|
+
node (BaseNode): The node we are adding the step log for
|
89
116
|
"""
|
90
117
|
...
|
91
118
|
|
92
119
|
@abstractmethod
|
93
|
-
def _sync_catalog(
|
120
|
+
def _sync_catalog(
|
121
|
+
self,
|
122
|
+
catalog_settings: Optional[List[str]],
|
123
|
+
) -> Optional[List[DataCatalog]]:
|
94
124
|
"""
|
95
125
|
1). Identify the catalog settings by over-riding node settings with the global settings.
|
96
126
|
2). For stage = get:
|
@@ -112,6 +142,34 @@ class BaseExecutor(ABC, BaseModel):
|
|
112
142
|
"""
|
113
143
|
...
|
114
144
|
|
145
|
+
@abstractmethod
|
146
|
+
def execute_job(self, job: BaseTaskType, catalog_settings: Optional[List[str]]):
|
147
|
+
"""
|
148
|
+
Focusses only on execution of the job.
|
149
|
+
"""
|
150
|
+
...
|
151
|
+
|
152
|
+
|
153
|
+
# TODO: Consolidate execute_node, trigger_node_execution, _execute_node
|
154
|
+
class BasePipelineExecutor(BaseExecutor):
|
155
|
+
service_type: str = "pipeline_executor"
|
156
|
+
overrides: dict = {}
|
157
|
+
|
158
|
+
_context_node: Optional[BaseNode] = PrivateAttr(default=None)
|
159
|
+
|
160
|
+
@abstractmethod
|
161
|
+
def add_code_identities(self, node: BaseNode, step_log: StepLog, **kwargs):
|
162
|
+
"""
|
163
|
+
Add code identities specific to the implementation.
|
164
|
+
|
165
|
+
The Base class has an implementation of adding git code identities.
|
166
|
+
|
167
|
+
Args:
|
168
|
+
step_log (object): The step log object
|
169
|
+
node (BaseNode): The node we are adding the step log for
|
170
|
+
"""
|
171
|
+
...
|
172
|
+
|
115
173
|
@abstractmethod
|
116
174
|
def get_effective_compute_data_folder(self) -> Optional[str]:
|
117
175
|
"""
|
@@ -129,19 +187,39 @@ class BaseExecutor(ABC, BaseModel):
|
|
129
187
|
"""
|
130
188
|
...
|
131
189
|
|
132
|
-
@
|
133
|
-
def
|
190
|
+
@abstractmethod
|
191
|
+
def _sync_catalog(
|
192
|
+
self, stage: str, synced_catalogs=None
|
193
|
+
) -> Optional[List[DataCatalog]]:
|
134
194
|
"""
|
135
|
-
|
136
|
-
|
195
|
+
1). Identify the catalog settings by over-riding node settings with the global settings.
|
196
|
+
2). For stage = get:
|
197
|
+
Identify the catalog items that are being asked to get from the catalog
|
198
|
+
And copy them to the local compute data folder
|
199
|
+
3). For stage = put:
|
200
|
+
Identify the catalog items that are being asked to put into the catalog
|
201
|
+
Copy the items from local compute folder to the catalog
|
202
|
+
4). Add the items onto the step log according to the stage
|
203
|
+
|
204
|
+
Args:
|
205
|
+
node (Node): The current node being processed
|
206
|
+
step_log (StepLog): The step log corresponding to that node
|
207
|
+
stage (str): One of get or put
|
208
|
+
|
209
|
+
Raises:
|
210
|
+
Exception: If the stage is not in one of get/put
|
137
211
|
|
138
|
-
Returns:
|
139
|
-
int: The attempt number of the current step. Defaults to 1.
|
140
212
|
"""
|
141
|
-
|
213
|
+
...
|
142
214
|
|
143
215
|
@abstractmethod
|
144
|
-
def _execute_node(
|
216
|
+
def _execute_node(
|
217
|
+
self,
|
218
|
+
node: BaseNode,
|
219
|
+
map_variable: TypeMapVariable = None,
|
220
|
+
mock: bool = False,
|
221
|
+
**kwargs,
|
222
|
+
):
|
145
223
|
"""
|
146
224
|
This is the entry point when we do the actual execution of the function.
|
147
225
|
|
@@ -163,7 +241,9 @@ class BaseExecutor(ABC, BaseModel):
|
|
163
241
|
...
|
164
242
|
|
165
243
|
@abstractmethod
|
166
|
-
def execute_node(
|
244
|
+
def execute_node(
|
245
|
+
self, node: BaseNode, map_variable: TypeMapVariable = None, **kwargs
|
246
|
+
):
|
167
247
|
"""
|
168
248
|
The entry point for all executors apart from local.
|
169
249
|
We have already prepared for node execution.
|
@@ -178,20 +258,9 @@ class BaseExecutor(ABC, BaseModel):
|
|
178
258
|
...
|
179
259
|
|
180
260
|
@abstractmethod
|
181
|
-
def
|
182
|
-
|
183
|
-
|
184
|
-
|
185
|
-
The Base class has an implementation of adding git code identities.
|
186
|
-
|
187
|
-
Args:
|
188
|
-
step_log (object): The step log object
|
189
|
-
node (BaseNode): The node we are adding the step log for
|
190
|
-
"""
|
191
|
-
...
|
192
|
-
|
193
|
-
@abstractmethod
|
194
|
-
def execute_from_graph(self, node: BaseNode, map_variable: TypeMapVariable = None, **kwargs):
|
261
|
+
def execute_from_graph(
|
262
|
+
self, node: BaseNode, map_variable: TypeMapVariable = None, **kwargs
|
263
|
+
):
|
195
264
|
"""
|
196
265
|
This is the entry point to from the graph execution.
|
197
266
|
|
@@ -219,24 +288,9 @@ class BaseExecutor(ABC, BaseModel):
|
|
219
288
|
...
|
220
289
|
|
221
290
|
@abstractmethod
|
222
|
-
def
|
223
|
-
|
224
|
-
|
225
|
-
|
226
|
-
Transpilers will NEVER use this method and will NEVER call them.
|
227
|
-
Only interactive executors who need execute_from_graph will ever implement it.
|
228
|
-
|
229
|
-
Args:
|
230
|
-
node (BaseNode): The node to execute
|
231
|
-
map_variable (str, optional): If the node if of a map state, this corresponds to the value of iterable.
|
232
|
-
Defaults to ''.
|
233
|
-
|
234
|
-
NOTE: We do not raise an exception as this method is not required by many extensions
|
235
|
-
"""
|
236
|
-
...
|
237
|
-
|
238
|
-
@abstractmethod
|
239
|
-
def _get_status_and_next_node_name(self, current_node: BaseNode, dag: Graph, map_variable: TypeMapVariable = None):
|
291
|
+
def _get_status_and_next_node_name(
|
292
|
+
self, current_node: BaseNode, dag: Graph, map_variable: TypeMapVariable = None
|
293
|
+
) -> tuple[str, str]:
|
240
294
|
"""
|
241
295
|
Given the current node and the graph, returns the name of the next node to execute.
|
242
296
|
|
@@ -275,17 +329,7 @@ class BaseExecutor(ABC, BaseModel):
|
|
275
329
|
...
|
276
330
|
|
277
331
|
@abstractmethod
|
278
|
-
def
|
279
|
-
"""
|
280
|
-
Convenience function used by pipeline to send return code to the caller of the cli
|
281
|
-
|
282
|
-
Raises:
|
283
|
-
Exception: If the pipeline execution failed
|
284
|
-
"""
|
285
|
-
...
|
286
|
-
|
287
|
-
@abstractmethod
|
288
|
-
def _resolve_executor_config(self, node: BaseNode):
|
332
|
+
def _resolve_executor_config(self, node: BaseNode) -> Dict[str, Any]:
|
289
333
|
"""
|
290
334
|
The overrides section can contain specific over-rides to an global executor config.
|
291
335
|
To avoid too much clutter in the dag definition, we allow the configuration file to have overrides block.
|
@@ -318,22 +362,6 @@ class BaseExecutor(ABC, BaseModel):
|
|
318
362
|
"""
|
319
363
|
...
|
320
364
|
|
321
|
-
@abstractmethod
|
322
|
-
def execute_job(self, node: TaskNode):
|
323
|
-
"""
|
324
|
-
Executor specific way of executing a job (python function or a notebook).
|
325
|
-
|
326
|
-
Interactive executors should execute the job.
|
327
|
-
Transpilers should write the instructions.
|
328
|
-
|
329
|
-
Args:
|
330
|
-
node (BaseNode): The job node to execute
|
331
|
-
|
332
|
-
Raises:
|
333
|
-
NotImplementedError: Executors should choose to extend this functionality or not.
|
334
|
-
"""
|
335
|
-
...
|
336
|
-
|
337
365
|
@abstractmethod
|
338
366
|
def fan_out(self, node: BaseNode, map_variable: TypeMapVariable = None):
|
339
367
|
"""
|
@@ -378,3 +406,22 @@ class BaseExecutor(ABC, BaseModel):
|
|
378
406
|
|
379
407
|
"""
|
380
408
|
...
|
409
|
+
|
410
|
+
@abstractmethod
|
411
|
+
def trigger_node_execution(
|
412
|
+
self, node: BaseNode, map_variable: TypeMapVariable = None, **kwargs
|
413
|
+
):
|
414
|
+
"""
|
415
|
+
Executor specific way of triggering jobs when runnable does both traversal and execution
|
416
|
+
|
417
|
+
Transpilers will NEVER use this method and will NEVER call them.
|
418
|
+
Only interactive executors who need execute_from_graph will ever implement it.
|
419
|
+
|
420
|
+
Args:
|
421
|
+
node (BaseNode): The node to execute
|
422
|
+
map_variable (str, optional): If the node if of a map state, this corresponds to the value of iterable.
|
423
|
+
Defaults to ''.
|
424
|
+
|
425
|
+
NOTE: We do not raise an exception as this method is not required by many extensions
|
426
|
+
"""
|
427
|
+
...
|
runnable/graph.py
CHANGED
@@ -24,7 +24,9 @@ class Graph(BaseModel):
|
|
24
24
|
name: str = ""
|
25
25
|
description: Optional[str] = ""
|
26
26
|
internal_branch_name: str = Field(default="", exclude=True)
|
27
|
-
nodes: SerializeAsAny[Dict[str, "BaseNode"]] = Field(
|
27
|
+
nodes: SerializeAsAny[Dict[str, "BaseNode"]] = Field(
|
28
|
+
default_factory=dict, serialization_alias="steps"
|
29
|
+
)
|
28
30
|
|
29
31
|
def get_summary(self) -> Dict[str, Any]:
|
30
32
|
"""
|
@@ -229,7 +231,9 @@ class Graph(BaseModel):
|
|
229
231
|
return False
|
230
232
|
return True
|
231
233
|
|
232
|
-
def is_cyclic_util(
|
234
|
+
def is_cyclic_util(
|
235
|
+
self, node: "BaseNode", visited: Dict[str, bool], recstack: Dict[str, bool]
|
236
|
+
) -> bool:
|
233
237
|
"""
|
234
238
|
Recursive utility that determines if a node and neighbors has a cycle. Is used in is_dag method.
|
235
239
|
|
@@ -327,7 +331,9 @@ def create_graph(dag_config: Dict[str, Any], internal_branch_name: str = "") ->
|
|
327
331
|
Graph: The created graph object
|
328
332
|
"""
|
329
333
|
description: str = dag_config.get("description", None)
|
330
|
-
start_at: str = cast(
|
334
|
+
start_at: str = cast(
|
335
|
+
str, dag_config.get("start_at")
|
336
|
+
) # Let the start_at be relative to the graph
|
331
337
|
|
332
338
|
graph = Graph(
|
333
339
|
start_at=start_at,
|
@@ -339,7 +345,9 @@ def create_graph(dag_config: Dict[str, Any], internal_branch_name: str = "") ->
|
|
339
345
|
for name, step_config in dag_config.get("steps", {}).items():
|
340
346
|
logger.info(f"Adding node {name} with :{step_config}")
|
341
347
|
|
342
|
-
node = create_node(
|
348
|
+
node = create_node(
|
349
|
+
name, step_config=step_config, internal_branch_name=internal_branch_name
|
350
|
+
)
|
343
351
|
graph.add_node(node)
|
344
352
|
|
345
353
|
graph.add_terminal_nodes(internal_branch_name=internal_branch_name)
|
@@ -369,8 +377,12 @@ def create_node(name: str, step_config: dict, internal_branch_name: Optional[str
|
|
369
377
|
internal_name = internal_branch_name + "." + name
|
370
378
|
|
371
379
|
try:
|
372
|
-
node_type = step_config.pop(
|
373
|
-
|
380
|
+
node_type = step_config.pop(
|
381
|
+
"type"
|
382
|
+
) # Remove the type as it is not used in node creation.
|
383
|
+
node_mgr: BaseNode = driver.DriverManager(
|
384
|
+
namespace="nodes", name=node_type
|
385
|
+
).driver
|
374
386
|
|
375
387
|
next_node = step_config.pop("next", None)
|
376
388
|
|
@@ -386,7 +398,6 @@ def create_node(name: str, step_config: dict, internal_branch_name: Optional[str
|
|
386
398
|
node = node_mgr.parse_from_config(config=invoke_kwds)
|
387
399
|
return node
|
388
400
|
except KeyError:
|
389
|
-
# type is missing!!
|
390
401
|
msg = "The node configuration does not contain the required key 'type'."
|
391
402
|
logger.exception(step_config)
|
392
403
|
raise Exception(msg)
|
@@ -424,11 +435,18 @@ def search_node_by_internal_name(dag: Graph, internal_name: str):
|
|
424
435
|
for i in range(len(dot_path)):
|
425
436
|
if i % 2:
|
426
437
|
# Its odd, so we are in brach name
|
427
|
-
|
428
|
-
|
438
|
+
|
439
|
+
current_branch = current_node._get_branch_by_name( # type: ignore
|
440
|
+
".".join(dot_path[: i + 1])
|
441
|
+
)
|
442
|
+
logger.debug(
|
443
|
+
f"Finding step for {internal_name} in branch: {current_branch}"
|
444
|
+
)
|
429
445
|
else:
|
430
446
|
# Its even, so we are in Step, we start here!
|
431
|
-
current_node = current_branch.get_node_by_internal_name(
|
447
|
+
current_node = current_branch.get_node_by_internal_name(
|
448
|
+
".".join(dot_path[: i + 1])
|
449
|
+
)
|
432
450
|
logger.debug(f"Finding {internal_name} in node: {current_node}")
|
433
451
|
|
434
452
|
logger.debug(f"current branch : {current_branch}, current step {current_node}")
|
@@ -463,12 +481,18 @@ def search_branch_by_internal_name(dag: Graph, internal_name: str):
|
|
463
481
|
for i in range(len(dot_path)):
|
464
482
|
if i % 2:
|
465
483
|
# Its odd, so we are in brach name
|
466
|
-
current_branch = current_node._get_branch_by_name(
|
467
|
-
|
484
|
+
current_branch = current_node._get_branch_by_name( # type: ignore
|
485
|
+
".".join(dot_path[: i + 1])
|
486
|
+
)
|
487
|
+
logger.debug(
|
488
|
+
f"Finding step for {internal_name} in branch: {current_branch}"
|
489
|
+
)
|
468
490
|
|
469
491
|
else:
|
470
492
|
# Its even, so we are in Step, we start here!
|
471
|
-
current_node = current_branch.get_node_by_internal_name(
|
493
|
+
current_node = current_branch.get_node_by_internal_name(
|
494
|
+
".".join(dot_path[: i + 1])
|
495
|
+
)
|
472
496
|
logger.debug(f"Finding {internal_name} in node: {current_node}")
|
473
497
|
|
474
498
|
logger.debug(f"current branch : {current_branch}, current step {current_node}")
|
runnable/nodes.py
CHANGED
@@ -51,7 +51,9 @@ class BaseNode(ABC, BaseModel):
|
|
51
51
|
raise ValueError("Node names cannot have . or '%' in them")
|
52
52
|
return name
|
53
53
|
|
54
|
-
def _command_friendly_name(
|
54
|
+
def _command_friendly_name(
|
55
|
+
self, replace_with=defaults.COMMAND_FRIENDLY_CHARACTER
|
56
|
+
) -> str:
|
55
57
|
"""
|
56
58
|
Replace spaces with special character for spaces.
|
57
59
|
Spaces in the naming of the node is convenient for the user but causes issues when used programmatically.
|
@@ -76,7 +78,9 @@ class BaseNode(ABC, BaseModel):
|
|
76
78
|
return command_name.replace(defaults.COMMAND_FRIENDLY_CHARACTER, " ")
|
77
79
|
|
78
80
|
@classmethod
|
79
|
-
def _resolve_map_placeholders(
|
81
|
+
def _resolve_map_placeholders(
|
82
|
+
cls, name: str, map_variable: TypeMapVariable = None
|
83
|
+
) -> str:
|
80
84
|
"""
|
81
85
|
If there is no map step used, then we just return the name as we find it.
|
82
86
|
|
@@ -141,7 +145,9 @@ class BaseNode(ABC, BaseModel):
|
|
141
145
|
Returns:
|
142
146
|
str: The dot path name of the step log name
|
143
147
|
"""
|
144
|
-
return self._resolve_map_placeholders(
|
148
|
+
return self._resolve_map_placeholders(
|
149
|
+
self.internal_name, map_variable=map_variable
|
150
|
+
)
|
145
151
|
|
146
152
|
def _get_branch_log_name(self, map_variable: TypeMapVariable = None) -> str:
|
147
153
|
"""
|
@@ -158,7 +164,9 @@ class BaseNode(ABC, BaseModel):
|
|
158
164
|
Returns:
|
159
165
|
str: The dot path name of the branch log
|
160
166
|
"""
|
161
|
-
return self._resolve_map_placeholders(
|
167
|
+
return self._resolve_map_placeholders(
|
168
|
+
self.internal_branch_name, map_variable=map_variable
|
169
|
+
)
|
162
170
|
|
163
171
|
def __str__(self) -> str: # pragma: no cover
|
164
172
|
"""
|
@@ -180,7 +188,6 @@ class BaseNode(ABC, BaseModel):
|
|
180
188
|
str: The on_failure node defined by the dag or ''
|
181
189
|
This is a base implementation which the BaseNode does not satisfy
|
182
190
|
"""
|
183
|
-
...
|
184
191
|
|
185
192
|
@abstractmethod
|
186
193
|
def _get_next_node(self) -> str:
|
@@ -190,7 +197,6 @@ class BaseNode(ABC, BaseModel):
|
|
190
197
|
Returns:
|
191
198
|
str: The node name, relative to the dag, as defined by the config
|
192
199
|
"""
|
193
|
-
...
|
194
200
|
|
195
201
|
@abstractmethod
|
196
202
|
def _is_terminal_node(self) -> bool:
|
@@ -200,7 +206,6 @@ class BaseNode(ABC, BaseModel):
|
|
200
206
|
Returns:
|
201
207
|
bool: True or False of whether there is next node.
|
202
208
|
"""
|
203
|
-
...
|
204
209
|
|
205
210
|
@abstractmethod
|
206
211
|
def _get_catalog_settings(self) -> Dict[str, Any]:
|
@@ -210,7 +215,6 @@ class BaseNode(ABC, BaseModel):
|
|
210
215
|
Returns:
|
211
216
|
dict: catalog settings defined as per the node or None
|
212
217
|
"""
|
213
|
-
...
|
214
218
|
|
215
219
|
@abstractmethod
|
216
220
|
def _get_branch_by_name(self, branch_name: str):
|
@@ -225,7 +229,6 @@ class BaseNode(ABC, BaseModel):
|
|
225
229
|
Raises:
|
226
230
|
Exception: [description]
|
227
231
|
"""
|
228
|
-
...
|
229
232
|
|
230
233
|
def _get_neighbors(self) -> List[str]:
|
231
234
|
"""
|
@@ -261,7 +264,6 @@ class BaseNode(ABC, BaseModel):
|
|
261
264
|
Returns:
|
262
265
|
dict: The executor config, if defined or an empty dict
|
263
266
|
"""
|
264
|
-
...
|
265
267
|
|
266
268
|
@abstractmethod
|
267
269
|
def _get_max_attempts(self) -> int:
|
@@ -271,7 +273,6 @@ class BaseNode(ABC, BaseModel):
|
|
271
273
|
Returns:
|
272
274
|
int: The number of maximum retries as defined by the config or 1.
|
273
275
|
"""
|
274
|
-
...
|
275
276
|
|
276
277
|
@abstractmethod
|
277
278
|
def execute(
|
@@ -296,7 +297,6 @@ class BaseNode(ABC, BaseModel):
|
|
296
297
|
Raises:
|
297
298
|
NotImplementedError: Base class, hence not implemented.
|
298
299
|
"""
|
299
|
-
...
|
300
300
|
|
301
301
|
@abstractmethod
|
302
302
|
def execute_as_graph(self, map_variable: TypeMapVariable = None, **kwargs):
|
@@ -312,7 +312,6 @@ class BaseNode(ABC, BaseModel):
|
|
312
312
|
Raises:
|
313
313
|
NotImplementedError: Base class, hence not implemented.
|
314
314
|
"""
|
315
|
-
...
|
316
315
|
|
317
316
|
@abstractmethod
|
318
317
|
def fan_out(self, map_variable: TypeMapVariable = None, **kwargs):
|
@@ -329,7 +328,6 @@ class BaseNode(ABC, BaseModel):
|
|
329
328
|
Raises:
|
330
329
|
Exception: If the node is not a composite node.
|
331
330
|
"""
|
332
|
-
...
|
333
331
|
|
334
332
|
@abstractmethod
|
335
333
|
def fan_in(self, map_variable: TypeMapVariable = None, **kwargs):
|
@@ -346,7 +344,6 @@ class BaseNode(ABC, BaseModel):
|
|
346
344
|
Raises:
|
347
345
|
Exception: If the node is not a composite node.
|
348
346
|
"""
|
349
|
-
...
|
350
347
|
|
351
348
|
@classmethod
|
352
349
|
@abstractmethod
|
@@ -360,7 +357,6 @@ class BaseNode(ABC, BaseModel):
|
|
360
357
|
Returns:
|
361
358
|
BaseNode: The corresponding node.
|
362
359
|
"""
|
363
|
-
...
|
364
360
|
|
365
361
|
@abstractmethod
|
366
362
|
def get_summary(self) -> Dict[str, Any]:
|
@@ -439,16 +435,34 @@ class ExecutableNode(TraversalNode):
|
|
439
435
|
return self.max_attempts
|
440
436
|
|
441
437
|
def _get_branch_by_name(self, branch_name: str):
|
442
|
-
raise
|
438
|
+
raise exceptions.NodeMethodCallError(
|
439
|
+
"This is an executable node and does not have branches"
|
440
|
+
)
|
443
441
|
|
444
442
|
def execute_as_graph(self, map_variable: TypeMapVariable = None, **kwargs):
|
445
|
-
raise
|
443
|
+
raise exceptions.NodeMethodCallError(
|
444
|
+
"This is an executable node and does not have a graph"
|
445
|
+
)
|
446
446
|
|
447
447
|
def fan_in(self, map_variable: TypeMapVariable = None, **kwargs):
|
448
|
-
raise
|
448
|
+
raise exceptions.NodeMethodCallError(
|
449
|
+
"This is an executable node and does not have a fan in"
|
450
|
+
)
|
449
451
|
|
450
452
|
def fan_out(self, map_variable: TypeMapVariable = None, **kwargs):
|
451
|
-
raise
|
453
|
+
raise exceptions.NodeMethodCallError(
|
454
|
+
"This is an executable node and does not have a fan out"
|
455
|
+
)
|
456
|
+
|
457
|
+
def prepare_for_job_execution(self):
|
458
|
+
raise exceptions.NodeMethodCallError(
|
459
|
+
"This is an executable node and does not have a prepare_for_job_execution"
|
460
|
+
)
|
461
|
+
|
462
|
+
def tear_down_after_job_execution(self):
|
463
|
+
raise exceptions.NodeMethodCallError(
|
464
|
+
"This is an executable node and does not have a tear_down_after_job_execution",
|
465
|
+
)
|
452
466
|
|
453
467
|
|
454
468
|
class CompositeNode(TraversalNode):
|
@@ -459,7 +473,9 @@ class CompositeNode(TraversalNode):
|
|
459
473
|
Returns:
|
460
474
|
dict: catalog settings defined as per the node or None
|
461
475
|
"""
|
462
|
-
raise
|
476
|
+
raise exceptions.NodeMethodCallError(
|
477
|
+
"This is a composite node and does not have a catalog settings"
|
478
|
+
)
|
463
479
|
|
464
480
|
def _get_max_attempts(self) -> int:
|
465
481
|
raise Exception("This is a composite node and does not have a max_attempts")
|
@@ -471,7 +487,19 @@ class CompositeNode(TraversalNode):
|
|
471
487
|
attempt_number: int = 1,
|
472
488
|
**kwargs,
|
473
489
|
) -> StepLog:
|
474
|
-
raise
|
490
|
+
raise exceptions.NodeMethodCallError(
|
491
|
+
"This is a composite node and does not have an execute function"
|
492
|
+
)
|
493
|
+
|
494
|
+
def prepare_for_job_execution(self):
|
495
|
+
raise exceptions.NodeMethodCallError(
|
496
|
+
"This is an executable node and does not have a prepare_for_job_execution"
|
497
|
+
)
|
498
|
+
|
499
|
+
def tear_down_after_job_execution(self):
|
500
|
+
raise exceptions.NodeMethodCallError(
|
501
|
+
"This is an executable node and does not have a tear_down_after_job_execution"
|
502
|
+
)
|
475
503
|
|
476
504
|
|
477
505
|
class TerminalNode(BaseNode):
|