runnable 0.34.0a3__py3-none-any.whl → 0.36.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- extensions/job_executor/__init__.py +3 -4
- extensions/job_executor/emulate.py +106 -0
- extensions/job_executor/k8s.py +8 -8
- extensions/job_executor/local_container.py +13 -14
- extensions/nodes/__init__.py +0 -0
- extensions/nodes/conditional.py +243 -0
- extensions/nodes/fail.py +72 -0
- extensions/nodes/map.py +350 -0
- extensions/nodes/parallel.py +159 -0
- extensions/nodes/stub.py +89 -0
- extensions/nodes/success.py +72 -0
- extensions/nodes/task.py +92 -0
- extensions/pipeline_executor/__init__.py +24 -26
- extensions/pipeline_executor/argo.py +50 -41
- extensions/pipeline_executor/emulate.py +112 -0
- extensions/pipeline_executor/local.py +4 -4
- extensions/pipeline_executor/local_container.py +19 -79
- extensions/pipeline_executor/mocked.py +4 -4
- extensions/pipeline_executor/retry.py +6 -10
- extensions/tasks/torch.py +1 -1
- runnable/__init__.py +2 -9
- runnable/catalog.py +1 -21
- runnable/cli.py +0 -59
- runnable/context.py +519 -28
- runnable/datastore.py +51 -54
- runnable/defaults.py +12 -34
- runnable/entrypoints.py +82 -440
- runnable/exceptions.py +35 -34
- runnable/executor.py +13 -20
- runnable/names.py +1 -1
- runnable/nodes.py +18 -16
- runnable/parameters.py +2 -2
- runnable/sdk.py +117 -164
- runnable/tasks.py +62 -21
- runnable/utils.py +6 -268
- {runnable-0.34.0a3.dist-info → runnable-0.36.0.dist-info}/METADATA +1 -2
- runnable-0.36.0.dist-info/RECORD +74 -0
- {runnable-0.34.0a3.dist-info → runnable-0.36.0.dist-info}/entry_points.txt +9 -8
- extensions/nodes/nodes.py +0 -778
- extensions/nodes/torch.py +0 -273
- extensions/nodes/torch_config.py +0 -76
- runnable-0.34.0a3.dist-info/RECORD +0 -67
- {runnable-0.34.0a3.dist-info → runnable-0.36.0.dist-info}/WHEEL +0 -0
- {runnable-0.34.0a3.dist-info → runnable-0.36.0.dist-info}/licenses/LICENSE +0 -0
runnable/exceptions.py
CHANGED
@@ -6,8 +6,9 @@ class RunLogExistsError(Exception): # pragma: no cover
|
|
6
6
|
"""
|
7
7
|
|
8
8
|
def __init__(self, run_id):
|
9
|
-
|
10
|
-
|
9
|
+
self.run_id = run_id
|
10
|
+
message = f"Run id for {run_id} is already found in the datastore"
|
11
|
+
super().__init__(message)
|
11
12
|
|
12
13
|
|
13
14
|
class JobLogNotFoundError(Exception):
|
@@ -18,8 +19,9 @@ class JobLogNotFoundError(Exception):
|
|
18
19
|
"""
|
19
20
|
|
20
21
|
def __init__(self, run_id):
|
21
|
-
|
22
|
-
|
22
|
+
self.run_id = run_id
|
23
|
+
message = f"Job for {run_id} is not found in the datastore"
|
24
|
+
super().__init__(message)
|
23
25
|
|
24
26
|
|
25
27
|
class RunLogNotFoundError(Exception): # pragma: no cover
|
@@ -30,8 +32,9 @@ class RunLogNotFoundError(Exception): # pragma: no cover
|
|
30
32
|
"""
|
31
33
|
|
32
34
|
def __init__(self, run_id):
|
33
|
-
|
34
|
-
|
35
|
+
self.run_id = run_id
|
36
|
+
message = f"Run id for {run_id} is not found in the datastore"
|
37
|
+
super().__init__(message)
|
35
38
|
|
36
39
|
|
37
40
|
class StepLogNotFoundError(Exception): # pragma: no cover
|
@@ -41,11 +44,11 @@ class StepLogNotFoundError(Exception): # pragma: no cover
|
|
41
44
|
Exception ([type]): [description]
|
42
45
|
"""
|
43
46
|
|
44
|
-
def __init__(self, run_id,
|
45
|
-
|
46
|
-
self.
|
47
|
-
|
48
|
-
)
|
47
|
+
def __init__(self, run_id, step_name):
|
48
|
+
self.run_id = run_id
|
49
|
+
self.step_name = step_name
|
50
|
+
message = f"Step log for {step_name} is not found in the datastore for Run id: {run_id}"
|
51
|
+
super().__init__(message)
|
49
52
|
|
50
53
|
|
51
54
|
class BranchLogNotFoundError(Exception): # pragma: no cover
|
@@ -55,11 +58,11 @@ class BranchLogNotFoundError(Exception): # pragma: no cover
|
|
55
58
|
Exception ([type]): [description]
|
56
59
|
"""
|
57
60
|
|
58
|
-
def __init__(self, run_id,
|
59
|
-
|
60
|
-
self.
|
61
|
-
|
62
|
-
)
|
61
|
+
def __init__(self, run_id, branch_name):
|
62
|
+
self.run_id = run_id
|
63
|
+
self.branch_name = branch_name
|
64
|
+
message = f"Branch log for {branch_name} is not found in the datastore for Run id: {run_id}"
|
65
|
+
super().__init__(message)
|
63
66
|
|
64
67
|
|
65
68
|
class NodeNotFoundError(Exception): # pragma: no cover
|
@@ -70,8 +73,9 @@ class NodeNotFoundError(Exception): # pragma: no cover
|
|
70
73
|
"""
|
71
74
|
|
72
75
|
def __init__(self, name):
|
73
|
-
|
74
|
-
|
76
|
+
self.name = name
|
77
|
+
message = f"Node of name {name} is not found the graph"
|
78
|
+
super().__init__(message)
|
75
79
|
|
76
80
|
|
77
81
|
class BranchNotFoundError(Exception): # pragma: no cover
|
@@ -82,8 +86,9 @@ class BranchNotFoundError(Exception): # pragma: no cover
|
|
82
86
|
"""
|
83
87
|
|
84
88
|
def __init__(self, name):
|
85
|
-
|
86
|
-
|
89
|
+
self.name = name
|
90
|
+
message = f"Branch of name {name} is not found the graph"
|
91
|
+
super().__init__(message)
|
87
92
|
|
88
93
|
|
89
94
|
class NodeMethodCallError(Exception):
|
@@ -92,32 +97,28 @@ class NodeMethodCallError(Exception):
|
|
92
97
|
"""
|
93
98
|
|
94
99
|
def __init__(self, message):
|
95
|
-
super().__init__()
|
96
|
-
self.message = message
|
100
|
+
super().__init__(message)
|
97
101
|
|
98
102
|
|
99
103
|
class TerminalNodeError(Exception): # pragma: no cover
|
100
104
|
def __init__(self):
|
101
|
-
|
102
|
-
|
105
|
+
message = "Terminal Nodes do not have next node"
|
106
|
+
super().__init__(message)
|
103
107
|
|
104
108
|
|
105
109
|
class SecretNotFoundError(Exception): # pragma: no cover
|
106
|
-
"""
|
107
|
-
Exception class
|
108
|
-
Args:
|
109
|
-
Exception ([type]): [description]
|
110
|
-
"""
|
111
|
-
|
112
110
|
def __init__(self, secret_name, secret_setting):
|
113
|
-
|
114
|
-
self.
|
111
|
+
self.secret_name = secret_name
|
112
|
+
self.secret_setting = secret_setting
|
113
|
+
message = f"No secret found by name:{secret_name} in {secret_setting}"
|
114
|
+
super().__init__(message)
|
115
115
|
|
116
116
|
|
117
117
|
class ExecutionFailedError(Exception): # pragma: no cover
|
118
118
|
def __init__(self, run_id: str):
|
119
|
-
|
120
|
-
|
119
|
+
self.run_id = run_id
|
120
|
+
message = f"Execution failed for run id: {run_id}"
|
121
|
+
super().__init__(message)
|
121
122
|
|
122
123
|
|
123
124
|
class CommandCallError(Exception): # pragma: no cover
|
runnable/executor.py
CHANGED
@@ -10,7 +10,7 @@ from pydantic import BaseModel, ConfigDict, PrivateAttr
|
|
10
10
|
import runnable.context as context
|
11
11
|
from runnable import defaults
|
12
12
|
from runnable.datastore import DataCatalog, JobLog, StepLog
|
13
|
-
from runnable.defaults import
|
13
|
+
from runnable.defaults import MapVariableType
|
14
14
|
|
15
15
|
if TYPE_CHECKING: # pragma: no cover
|
16
16
|
from runnable.graph import Graph
|
@@ -34,7 +34,9 @@ class BaseExecutor(ABC, BaseModel):
|
|
34
34
|
service_name: str = ""
|
35
35
|
service_type: str = "executor"
|
36
36
|
|
37
|
-
|
37
|
+
# Should have _should_setup_run_log_at_traversal, local, local_container, emulator is true
|
38
|
+
# False for everything else
|
39
|
+
_should_setup_run_log_at_traversal: bool = PrivateAttr(default=True)
|
38
40
|
|
39
41
|
model_config = ConfigDict(extra="forbid")
|
40
42
|
|
@@ -86,7 +88,7 @@ class BaseExecutor(ABC, BaseModel):
|
|
86
88
|
|
87
89
|
@abstractmethod
|
88
90
|
def add_task_log_to_catalog(
|
89
|
-
self, name: str, map_variable: Optional[
|
91
|
+
self, name: str, map_variable: Optional[MapVariableType] = None
|
90
92
|
): ...
|
91
93
|
|
92
94
|
|
@@ -153,16 +155,7 @@ class BaseJobExecutor(BaseExecutor):
|
|
153
155
|
"""
|
154
156
|
...
|
155
157
|
|
156
|
-
# @abstractmethod
|
157
|
-
# def scale_up(self, job: BaseTaskType):
|
158
|
-
# """
|
159
|
-
# Scale up the job to run on max_nodes
|
160
|
-
# This has to also call the entry point
|
161
|
-
# """
|
162
|
-
# ...
|
163
158
|
|
164
|
-
|
165
|
-
# TODO: Consolidate execute_node, trigger_node_execution, _execute_node
|
166
159
|
class BasePipelineExecutor(BaseExecutor):
|
167
160
|
service_type: str = "pipeline_executor"
|
168
161
|
overrides: dict[str, Any] = {}
|
@@ -214,7 +207,7 @@ class BasePipelineExecutor(BaseExecutor):
|
|
214
207
|
def _execute_node(
|
215
208
|
self,
|
216
209
|
node: BaseNode,
|
217
|
-
map_variable:
|
210
|
+
map_variable: MapVariableType = None,
|
218
211
|
mock: bool = False,
|
219
212
|
):
|
220
213
|
"""
|
@@ -238,7 +231,7 @@ class BasePipelineExecutor(BaseExecutor):
|
|
238
231
|
...
|
239
232
|
|
240
233
|
@abstractmethod
|
241
|
-
def execute_node(self, node: BaseNode, map_variable:
|
234
|
+
def execute_node(self, node: BaseNode, map_variable: MapVariableType = None):
|
242
235
|
"""
|
243
236
|
The entry point for all executors apart from local.
|
244
237
|
We have already prepared for node execution.
|
@@ -253,7 +246,7 @@ class BasePipelineExecutor(BaseExecutor):
|
|
253
246
|
...
|
254
247
|
|
255
248
|
@abstractmethod
|
256
|
-
def execute_from_graph(self, node: BaseNode, map_variable:
|
249
|
+
def execute_from_graph(self, node: BaseNode, map_variable: MapVariableType = None):
|
257
250
|
"""
|
258
251
|
This is the entry point to from the graph execution.
|
259
252
|
|
@@ -282,7 +275,7 @@ class BasePipelineExecutor(BaseExecutor):
|
|
282
275
|
|
283
276
|
@abstractmethod
|
284
277
|
def _get_status_and_next_node_name(
|
285
|
-
self, current_node: BaseNode, dag: Graph, map_variable:
|
278
|
+
self, current_node: BaseNode, dag: Graph, map_variable: MapVariableType = None
|
286
279
|
) -> tuple[str, str]:
|
287
280
|
"""
|
288
281
|
Given the current node and the graph, returns the name of the next node to execute.
|
@@ -301,7 +294,7 @@ class BasePipelineExecutor(BaseExecutor):
|
|
301
294
|
...
|
302
295
|
|
303
296
|
@abstractmethod
|
304
|
-
def execute_graph(self, dag: Graph, map_variable:
|
297
|
+
def execute_graph(self, dag: Graph, map_variable: MapVariableType = None):
|
305
298
|
"""
|
306
299
|
The parallelization is controlled by the nodes and not by this function.
|
307
300
|
|
@@ -356,7 +349,7 @@ class BasePipelineExecutor(BaseExecutor):
|
|
356
349
|
...
|
357
350
|
|
358
351
|
@abstractmethod
|
359
|
-
def fan_out(self, node: BaseNode, map_variable:
|
352
|
+
def fan_out(self, node: BaseNode, map_variable: MapVariableType = None):
|
360
353
|
"""
|
361
354
|
This method is used to appropriately fan-out the execution of a composite node.
|
362
355
|
This is only useful when we want to execute a composite node during 3rd party orchestrators.
|
@@ -379,7 +372,7 @@ class BasePipelineExecutor(BaseExecutor):
|
|
379
372
|
...
|
380
373
|
|
381
374
|
@abstractmethod
|
382
|
-
def fan_in(self, node: BaseNode, map_variable:
|
375
|
+
def fan_in(self, node: BaseNode, map_variable: MapVariableType = None):
|
383
376
|
"""
|
384
377
|
This method is used to appropriately fan-in after the execution of a composite node.
|
385
378
|
This is only useful when we want to execute a composite node during 3rd party orchestrators.
|
@@ -402,7 +395,7 @@ class BasePipelineExecutor(BaseExecutor):
|
|
402
395
|
|
403
396
|
@abstractmethod
|
404
397
|
def trigger_node_execution(
|
405
|
-
self, node: BaseNode, map_variable:
|
398
|
+
self, node: BaseNode, map_variable: MapVariableType = None
|
406
399
|
):
|
407
400
|
"""
|
408
401
|
Executor specific way of triggering jobs when runnable does both traversal and execution
|
runnable/names.py
CHANGED
runnable/nodes.py
CHANGED
@@ -7,7 +7,8 @@ from pydantic import BaseModel, ConfigDict, Field, field_validator
|
|
7
7
|
import runnable.context as context
|
8
8
|
from runnable import defaults, exceptions
|
9
9
|
from runnable.datastore import StepLog
|
10
|
-
from runnable.defaults import
|
10
|
+
from runnable.defaults import MapVariableType
|
11
|
+
from runnable.graph import Graph
|
11
12
|
|
12
13
|
logger = logging.getLogger(defaults.LOGGER_NAME)
|
13
14
|
|
@@ -41,6 +42,7 @@ class BaseNode(ABC, BaseModel):
|
|
41
42
|
|
42
43
|
@property
|
43
44
|
def _context(self):
|
45
|
+
assert isinstance(context.run_context, context.PipelineContext)
|
44
46
|
return context.run_context
|
45
47
|
|
46
48
|
model_config = ConfigDict(extra="forbid", arbitrary_types_allowed=False)
|
@@ -80,7 +82,7 @@ class BaseNode(ABC, BaseModel):
|
|
80
82
|
|
81
83
|
@classmethod
|
82
84
|
def _resolve_map_placeholders(
|
83
|
-
cls, name: str, map_variable:
|
85
|
+
cls, name: str, map_variable: MapVariableType = None
|
84
86
|
) -> str:
|
85
87
|
"""
|
86
88
|
If there is no map step used, then we just return the name as we find it.
|
@@ -131,7 +133,7 @@ class BaseNode(ABC, BaseModel):
|
|
131
133
|
|
132
134
|
return name
|
133
135
|
|
134
|
-
def _get_step_log_name(self, map_variable:
|
136
|
+
def _get_step_log_name(self, map_variable: MapVariableType = None) -> str:
|
135
137
|
"""
|
136
138
|
For every step in the dag, there is a corresponding step log name.
|
137
139
|
This method returns the step log name in dot path convention.
|
@@ -150,7 +152,7 @@ class BaseNode(ABC, BaseModel):
|
|
150
152
|
self.internal_name, map_variable=map_variable
|
151
153
|
)
|
152
154
|
|
153
|
-
def _get_branch_log_name(self, map_variable:
|
155
|
+
def _get_branch_log_name(self, map_variable: MapVariableType = None) -> str:
|
154
156
|
"""
|
155
157
|
For nodes that are internally branches, this method returns the branch log name.
|
156
158
|
The branch log name is in dot path convention.
|
@@ -218,7 +220,7 @@ class BaseNode(ABC, BaseModel):
|
|
218
220
|
"""
|
219
221
|
|
220
222
|
@abstractmethod
|
221
|
-
def _get_branch_by_name(self, branch_name: str):
|
223
|
+
def _get_branch_by_name(self, branch_name: str) -> Graph:
|
222
224
|
"""
|
223
225
|
Retrieve a branch by name.
|
224
226
|
|
@@ -279,7 +281,7 @@ class BaseNode(ABC, BaseModel):
|
|
279
281
|
def execute(
|
280
282
|
self,
|
281
283
|
mock=False,
|
282
|
-
map_variable:
|
284
|
+
map_variable: MapVariableType = None,
|
283
285
|
attempt_number: int = 1,
|
284
286
|
) -> StepLog:
|
285
287
|
"""
|
@@ -299,7 +301,7 @@ class BaseNode(ABC, BaseModel):
|
|
299
301
|
"""
|
300
302
|
|
301
303
|
@abstractmethod
|
302
|
-
def execute_as_graph(self, map_variable:
|
304
|
+
def execute_as_graph(self, map_variable: MapVariableType = None):
|
303
305
|
"""
|
304
306
|
This function would be called to set up the execution of the individual
|
305
307
|
branches of a composite node.
|
@@ -314,7 +316,7 @@ class BaseNode(ABC, BaseModel):
|
|
314
316
|
"""
|
315
317
|
|
316
318
|
@abstractmethod
|
317
|
-
def fan_out(self, map_variable:
|
319
|
+
def fan_out(self, map_variable: MapVariableType = None):
|
318
320
|
"""
|
319
321
|
This function would be called to set up the execution of the individual
|
320
322
|
branches of a composite node.
|
@@ -330,7 +332,7 @@ class BaseNode(ABC, BaseModel):
|
|
330
332
|
"""
|
331
333
|
|
332
334
|
@abstractmethod
|
333
|
-
def fan_in(self, map_variable:
|
335
|
+
def fan_in(self, map_variable: MapVariableType = None):
|
334
336
|
"""
|
335
337
|
This function would be called to tear down the execution of the individual
|
336
338
|
branches of a composite node.
|
@@ -439,17 +441,17 @@ class ExecutableNode(TraversalNode):
|
|
439
441
|
"This is an executable node and does not have branches"
|
440
442
|
)
|
441
443
|
|
442
|
-
def execute_as_graph(self, map_variable:
|
444
|
+
def execute_as_graph(self, map_variable: MapVariableType = None):
|
443
445
|
raise exceptions.NodeMethodCallError(
|
444
446
|
"This is an executable node and does not have a graph"
|
445
447
|
)
|
446
448
|
|
447
|
-
def fan_in(self, map_variable:
|
449
|
+
def fan_in(self, map_variable: MapVariableType = None):
|
448
450
|
raise exceptions.NodeMethodCallError(
|
449
451
|
"This is an executable node and does not have a fan in"
|
450
452
|
)
|
451
453
|
|
452
|
-
def fan_out(self, map_variable:
|
454
|
+
def fan_out(self, map_variable: MapVariableType = None):
|
453
455
|
raise exceptions.NodeMethodCallError(
|
454
456
|
"This is an executable node and does not have a fan out"
|
455
457
|
)
|
@@ -475,7 +477,7 @@ class CompositeNode(TraversalNode):
|
|
475
477
|
def execute(
|
476
478
|
self,
|
477
479
|
mock=False,
|
478
|
-
map_variable:
|
480
|
+
map_variable: MapVariableType = None,
|
479
481
|
attempt_number: int = 1,
|
480
482
|
) -> StepLog:
|
481
483
|
raise exceptions.NodeMethodCallError(
|
@@ -505,15 +507,15 @@ class TerminalNode(BaseNode):
|
|
505
507
|
def _get_max_attempts(self) -> int:
|
506
508
|
return 1
|
507
509
|
|
508
|
-
def execute_as_graph(self, map_variable:
|
510
|
+
def execute_as_graph(self, map_variable: MapVariableType = None):
|
509
511
|
raise exceptions.TerminalNodeError()
|
510
512
|
|
511
|
-
def fan_in(self, map_variable:
|
513
|
+
def fan_in(self, map_variable: MapVariableType = None):
|
512
514
|
raise exceptions.TerminalNodeError()
|
513
515
|
|
514
516
|
def fan_out(
|
515
517
|
self,
|
516
|
-
map_variable:
|
518
|
+
map_variable: MapVariableType = None,
|
517
519
|
):
|
518
520
|
raise exceptions.TerminalNodeError()
|
519
521
|
|
runnable/parameters.py
CHANGED
@@ -10,7 +10,7 @@ from typing_extensions import Callable
|
|
10
10
|
|
11
11
|
from runnable import defaults
|
12
12
|
from runnable.datastore import JsonParameter, ObjectParameter
|
13
|
-
from runnable.defaults import
|
13
|
+
from runnable.defaults import MapVariableType
|
14
14
|
from runnable.utils import remove_prefix
|
15
15
|
|
16
16
|
logger = logging.getLogger(defaults.LOGGER_NAME)
|
@@ -51,7 +51,7 @@ def get_user_set_parameters(remove: bool = False) -> Dict[str, JsonParameter]:
|
|
51
51
|
def filter_arguments_for_func(
|
52
52
|
func: Callable[..., Any],
|
53
53
|
params: Dict[str, Any],
|
54
|
-
map_variable:
|
54
|
+
map_variable: MapVariableType = None,
|
55
55
|
) -> Dict[str, Any]:
|
56
56
|
"""
|
57
57
|
Inspects the function to be called as part of the pipeline to find the arguments of the function.
|