runnable 0.2.0__py3-none-any.whl → 0.4.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- runnable/__init__.py +12 -1
- runnable/catalog.py +2 -2
- runnable/cli.py +5 -5
- runnable/datastore.py +3 -2
- runnable/defaults.py +21 -18
- runnable/entrypoints.py +41 -77
- runnable/executor.py +6 -16
- runnable/extensions/catalog/file_system/implementation.py +2 -1
- runnable/extensions/executor/__init__.py +20 -9
- runnable/extensions/executor/argo/implementation.py +6 -5
- runnable/extensions/executor/argo/specification.yaml +1 -1
- runnable/extensions/executor/k8s_job/implementation_FF.py +4 -4
- runnable/extensions/executor/local/implementation.py +1 -0
- runnable/extensions/executor/local_container/implementation.py +4 -10
- runnable/extensions/executor/mocked/implementation.py +2 -33
- runnable/extensions/nodes.py +40 -60
- runnable/integration.py +2 -2
- runnable/interaction.py +9 -4
- runnable/nodes.py +19 -7
- runnable/parameters.py +1 -1
- runnable/sdk.py +181 -59
- runnable/tasks.py +124 -121
- runnable/utils.py +11 -11
- {runnable-0.2.0.dist-info → runnable-0.4.0.dist-info}/METADATA +53 -53
- {runnable-0.2.0.dist-info → runnable-0.4.0.dist-info}/RECORD +28 -28
- {runnable-0.2.0.dist-info → runnable-0.4.0.dist-info}/WHEEL +1 -1
- {runnable-0.2.0.dist-info → runnable-0.4.0.dist-info}/LICENSE +0 -0
- {runnable-0.2.0.dist-info → runnable-0.4.0.dist-info}/entry_points.txt +0 -0
@@ -292,7 +292,7 @@ class ContainerTemplate(BaseModel):
|
|
292
292
|
|
293
293
|
class DagTemplate(BaseModel):
|
294
294
|
# These are used for parallel, map nodes dag definition
|
295
|
-
name: str = "
|
295
|
+
name: str = "runnable-dag"
|
296
296
|
tasks: List[DagTaskTemplate] = Field(default=[], exclude=True)
|
297
297
|
inputs: Optional[List[Parameter]] = Field(default=None, serialization_alias="inputs")
|
298
298
|
parallelism: Optional[int] = None
|
@@ -561,7 +561,7 @@ def get_renderer(node):
|
|
561
561
|
|
562
562
|
|
563
563
|
class MetaData(BaseModel):
|
564
|
-
generate_name: str = Field(default="
|
564
|
+
generate_name: str = Field(default="runnable-dag-", serialization_alias="generateName")
|
565
565
|
annotations: Optional[Dict[str, str]] = Field(default_factory=dict)
|
566
566
|
labels: Optional[Dict[str, str]] = Field(default_factory=dict)
|
567
567
|
namespace: Optional[str] = Field(default=None)
|
@@ -569,7 +569,7 @@ class MetaData(BaseModel):
|
|
569
569
|
|
570
570
|
class Spec(BaseModel):
|
571
571
|
active_deadline_seconds: int = Field(serialization_alias="activeDeadlineSeconds")
|
572
|
-
entrypoint: str = Field(default="
|
572
|
+
entrypoint: str = Field(default="runnable-dag")
|
573
573
|
node_selector: Optional[Dict[str, str]] = Field(default_factory=dict, serialization_alias="nodeSelector")
|
574
574
|
tolerations: Optional[List[Toleration]] = Field(default=None, serialization_alias="tolerations")
|
575
575
|
parallelism: Optional[int] = Field(default=None, serialization_alias="parallelism")
|
@@ -665,6 +665,7 @@ class Override(BaseModel):
|
|
665
665
|
|
666
666
|
class ArgoExecutor(GenericExecutor):
|
667
667
|
service_name: str = "argo"
|
668
|
+
_local: bool = False
|
668
669
|
|
669
670
|
model_config = ConfigDict(extra="forbid")
|
670
671
|
|
@@ -674,7 +675,7 @@ class ArgoExecutor(GenericExecutor):
|
|
674
675
|
output_file: str = "argo-pipeline.yaml"
|
675
676
|
|
676
677
|
# Metadata related fields
|
677
|
-
name: str = Field(default="
|
678
|
+
name: str = Field(default="runnable-dag-", description="Used as an identifier for the workflow")
|
678
679
|
annotations: Dict[str, str] = Field(default_factory=dict)
|
679
680
|
labels: Dict[str, str] = Field(default_factory=dict)
|
680
681
|
|
@@ -994,7 +995,7 @@ class ArgoExecutor(GenericExecutor):
|
|
994
995
|
return DagTaskTemplate(name=f"{clean_name}-fan-in", template=f"{clean_name}-fan-in")
|
995
996
|
|
996
997
|
def _gather_task_templates_of_dag(
|
997
|
-
self, dag: Graph, dag_name="
|
998
|
+
self, dag: Graph, dag_name="runnable-dag", list_of_iter_values: Optional[List] = None
|
998
999
|
):
|
999
1000
|
current_node = dag.start_at
|
1000
1001
|
previous_node = None
|
@@ -4,10 +4,10 @@
|
|
4
4
|
|
5
5
|
# from pydantic import BaseModel
|
6
6
|
|
7
|
-
# from
|
8
|
-
# from
|
9
|
-
# from
|
10
|
-
# from
|
7
|
+
# from runnable import defaults, integration, utils
|
8
|
+
# from runnable.executor import BaseExecutor
|
9
|
+
# from runnable.graph import Graph
|
10
|
+
# from runnable.nodes import BaseNode
|
11
11
|
|
12
12
|
# logger = logging.getLogger(defaults.NAME)
|
13
13
|
|
@@ -55,6 +55,8 @@ class LocalContainerExecutor(GenericExecutor):
|
|
55
55
|
run_in_local: bool = False
|
56
56
|
environment: Dict[str, str] = Field(default_factory=dict)
|
57
57
|
|
58
|
+
_local: bool = False
|
59
|
+
|
58
60
|
_container_log_location = "/tmp/run_logs/"
|
59
61
|
_container_catalog_location = "/tmp/catalog/"
|
60
62
|
_container_secrets_location = "/tmp/dotenv"
|
@@ -131,7 +133,7 @@ class LocalContainerExecutor(GenericExecutor):
|
|
131
133
|
|
132
134
|
|
133
135
|
If the config has "run_in_local: True", we compute it on local system instead of container.
|
134
|
-
In local container execution, we just spin the container to execute
|
136
|
+
In local container execution, we just spin the container to execute runnable execute_single_node.
|
135
137
|
|
136
138
|
Args:
|
137
139
|
node (BaseNode): The node we are currently executing
|
@@ -198,6 +200,7 @@ class LocalContainerExecutor(GenericExecutor):
|
|
198
200
|
|
199
201
|
try:
|
200
202
|
logger.info(f"Running the command {command}")
|
203
|
+
print(command)
|
201
204
|
# Overrides global config with local
|
202
205
|
executor_config = self._resolve_executor_config(node)
|
203
206
|
|
@@ -256,15 +259,6 @@ class LocalContainerComputeFileSystemRunLogstore(BaseIntegration):
|
|
256
259
|
service_type = "run_log_store" # One of secret, catalog, datastore
|
257
260
|
service_provider = "file-system" # The actual implementation of the service
|
258
261
|
|
259
|
-
def validate(self, **kwargs):
|
260
|
-
if self.executor._is_parallel_execution(): # pragma: no branch
|
261
|
-
msg = (
|
262
|
-
"Run log generated by file-system run log store are not thread safe. "
|
263
|
-
"Inconsistent results are possible because of race conditions to write to the same file.\n"
|
264
|
-
"Consider using partitioned run log store like database for consistent results."
|
265
|
-
)
|
266
|
-
logger.warning(msg)
|
267
|
-
|
268
262
|
def configure_for_traversal(self, **kwargs):
|
269
263
|
from runnable.extensions.run_log_store.file_system.implementation import FileSystemRunLogstore
|
270
264
|
|
@@ -8,7 +8,6 @@ from runnable import context, defaults
|
|
8
8
|
from runnable.defaults import TypeMapVariable
|
9
9
|
from runnable.extensions.executor import GenericExecutor
|
10
10
|
from runnable.extensions.nodes import TaskNode
|
11
|
-
from runnable.integration import BaseIntegration
|
12
11
|
from runnable.nodes import BaseNode
|
13
12
|
from runnable.tasks import BaseTaskType
|
14
13
|
|
@@ -25,8 +24,7 @@ def create_executable(params: Dict[str, Any], model: Type[BaseTaskType], node_na
|
|
25
24
|
|
26
25
|
class MockedExecutor(GenericExecutor):
|
27
26
|
service_name: str = "mocked"
|
28
|
-
|
29
|
-
enable_parallel: bool = defaults.ENABLE_PARALLEL
|
27
|
+
_local_executor: bool = True
|
30
28
|
|
31
29
|
patches: Dict[str, Any] = Field(default_factory=dict)
|
32
30
|
|
@@ -119,6 +117,7 @@ class MockedExecutor(GenericExecutor):
|
|
119
117
|
self.prepare_for_node_execution()
|
120
118
|
self.execute_node(node=node, map_variable=map_variable, **kwargs)
|
121
119
|
|
120
|
+
# TODO: This needs to go away
|
122
121
|
def _is_step_eligible_for_rerun(self, node: BaseNode, map_variable: TypeMapVariable = None):
|
123
122
|
"""
|
124
123
|
In case of a re-run, this method checks to see if the previous run step status to determine if a re-run is
|
@@ -188,33 +187,3 @@ class MockedExecutor(GenericExecutor):
|
|
188
187
|
map_variable (dict[str, str], optional): _description_. Defaults to None.
|
189
188
|
"""
|
190
189
|
self._execute_node(node=node, map_variable=map_variable, **kwargs)
|
191
|
-
|
192
|
-
|
193
|
-
class LocalContainerComputeFileSystemRunLogstore(BaseIntegration):
|
194
|
-
"""
|
195
|
-
Integration between local container and file system run log store
|
196
|
-
"""
|
197
|
-
|
198
|
-
executor_type = "local-container"
|
199
|
-
service_type = "run_log_store" # One of secret, catalog, datastore
|
200
|
-
service_provider = "file-system" # The actual implementation of the service
|
201
|
-
|
202
|
-
def validate(self, **kwargs):
|
203
|
-
if self.executor._is_parallel_execution(): # pragma: no branch
|
204
|
-
msg = "Mocked executor does not support parallel execution. "
|
205
|
-
logger.warning(msg)
|
206
|
-
|
207
|
-
|
208
|
-
class LocalContainerComputeChunkedFSRunLogstore(BaseIntegration):
|
209
|
-
"""
|
210
|
-
Integration between local container and file system run log store
|
211
|
-
"""
|
212
|
-
|
213
|
-
executor_type = "local-container"
|
214
|
-
service_type = "run_log_store" # One of secret, catalog, datastore
|
215
|
-
service_provider = "chunked-fs" # The actual implementation of the service
|
216
|
-
|
217
|
-
def validate(self, **kwargs):
|
218
|
-
if self.executor._is_parallel_execution(): # pragma: no branch
|
219
|
-
msg = "Mocked executor does not support parallel execution. "
|
220
|
-
logger.warning(msg)
|
runnable/extensions/nodes.py
CHANGED
@@ -1,10 +1,9 @@
|
|
1
|
-
import
|
1
|
+
import copy
|
2
2
|
import logging
|
3
|
-
import multiprocessing
|
4
3
|
from collections import OrderedDict
|
5
4
|
from copy import deepcopy
|
6
5
|
from datetime import datetime
|
7
|
-
from typing import Any, Dict, cast
|
6
|
+
from typing import Any, Dict, Optional, cast
|
8
7
|
|
9
8
|
from pydantic import ConfigDict, Field, ValidationInfo, field_serializer, field_validator
|
10
9
|
from typing_extensions import Annotated
|
@@ -44,9 +43,15 @@ class TaskNode(ExecutableNode):
|
|
44
43
|
executable = create_task(task_config)
|
45
44
|
return cls(executable=executable, **node_config, **task_config)
|
46
45
|
|
47
|
-
def execute(
|
46
|
+
def execute(
|
47
|
+
self,
|
48
|
+
mock=False,
|
49
|
+
params: Optional[Dict[str, Any]] = None,
|
50
|
+
map_variable: TypeMapVariable = None,
|
51
|
+
**kwargs,
|
52
|
+
) -> StepAttempt:
|
48
53
|
"""
|
49
|
-
All that we do in
|
54
|
+
All that we do in runnable is to come to this point where we actually execute the command.
|
50
55
|
|
51
56
|
Args:
|
52
57
|
executor (_type_): The executor class
|
@@ -62,9 +67,11 @@ class TaskNode(ExecutableNode):
|
|
62
67
|
try:
|
63
68
|
attempt_log.start_time = str(datetime.now())
|
64
69
|
attempt_log.status = defaults.SUCCESS
|
70
|
+
attempt_log.input_parameters = copy.deepcopy(params)
|
65
71
|
if not mock:
|
66
72
|
# Do not run if we are mocking the execution, could be useful for caching and dry runs
|
67
|
-
self.executable.execute_command(map_variable=map_variable)
|
73
|
+
output_parameters = self.executable.execute_command(map_variable=map_variable, params=params)
|
74
|
+
attempt_log.output_parameters = output_parameters
|
68
75
|
except Exception as _e: # pylint: disable=W0703
|
69
76
|
logger.exception("Task failed")
|
70
77
|
attempt_log.status = defaults.FAIL
|
@@ -88,7 +95,13 @@ class FailNode(TerminalNode):
|
|
88
95
|
def parse_from_config(cls, config: Dict[str, Any]) -> "FailNode":
|
89
96
|
return cast("FailNode", super().parse_from_config(config))
|
90
97
|
|
91
|
-
def execute(
|
98
|
+
def execute(
|
99
|
+
self,
|
100
|
+
mock=False,
|
101
|
+
params: Optional[Dict[str, Any]] = None,
|
102
|
+
map_variable: TypeMapVariable = None,
|
103
|
+
**kwargs,
|
104
|
+
) -> StepAttempt:
|
92
105
|
"""
|
93
106
|
Execute the failure node.
|
94
107
|
Set the run or branch log status to failure.
|
@@ -105,6 +118,7 @@ class FailNode(TerminalNode):
|
|
105
118
|
try:
|
106
119
|
attempt_log.start_time = str(datetime.now())
|
107
120
|
attempt_log.status = defaults.SUCCESS
|
121
|
+
attempt_log.input_parameters = params
|
108
122
|
# could be a branch or run log
|
109
123
|
run_or_branch_log = self._context.run_log_store.get_branch_log(
|
110
124
|
self._get_branch_log_name(map_variable), self._context.run_id
|
@@ -133,7 +147,13 @@ class SuccessNode(TerminalNode):
|
|
133
147
|
def parse_from_config(cls, config: Dict[str, Any]) -> "SuccessNode":
|
134
148
|
return cast("SuccessNode", super().parse_from_config(config))
|
135
149
|
|
136
|
-
def execute(
|
150
|
+
def execute(
|
151
|
+
self,
|
152
|
+
mock=False,
|
153
|
+
params: Optional[Dict[str, Any]] = None,
|
154
|
+
map_variable: TypeMapVariable = None,
|
155
|
+
**kwargs,
|
156
|
+
) -> StepAttempt:
|
137
157
|
"""
|
138
158
|
Execute the success node.
|
139
159
|
Set the run or branch log status to success.
|
@@ -150,6 +170,7 @@ class SuccessNode(TerminalNode):
|
|
150
170
|
try:
|
151
171
|
attempt_log.start_time = str(datetime.now())
|
152
172
|
attempt_log.status = defaults.SUCCESS
|
173
|
+
attempt_log.input_parameters = params
|
153
174
|
# could be a branch or run log
|
154
175
|
run_or_branch_log = self._context.run_log_store.get_branch_log(
|
155
176
|
self._get_branch_log_name(map_variable), self._context.run_id
|
@@ -257,35 +278,11 @@ class ParallelNode(CompositeNode):
|
|
257
278
|
executor (Executor): The Executor as per the use config
|
258
279
|
**kwargs: Optional kwargs passed around
|
259
280
|
"""
|
260
|
-
from runnable import entrypoints
|
261
281
|
|
262
282
|
self.fan_out(map_variable=map_variable, **kwargs)
|
263
283
|
|
264
|
-
|
265
|
-
|
266
|
-
# A better way is to actually submit the job to some process scheduler which does resource management
|
267
|
-
for internal_branch_name, branch in self.branches.items():
|
268
|
-
if self._context.executor._is_parallel_execution():
|
269
|
-
# Trigger parallel jobs
|
270
|
-
action = entrypoints.execute_single_brach
|
271
|
-
kwargs = {
|
272
|
-
"configuration_file": self._context.configuration_file,
|
273
|
-
"pipeline_file": self._context.pipeline_file,
|
274
|
-
"branch_name": internal_branch_name.replace(" ", defaults.COMMAND_FRIENDLY_CHARACTER),
|
275
|
-
"run_id": self._context.run_id,
|
276
|
-
"map_variable": json.dumps(map_variable),
|
277
|
-
"tag": self._context.tag,
|
278
|
-
}
|
279
|
-
process = multiprocessing.Process(target=action, kwargs=kwargs)
|
280
|
-
jobs.append(process)
|
281
|
-
process.start()
|
282
|
-
|
283
|
-
else:
|
284
|
-
# If parallel is not enabled, execute them sequentially
|
285
|
-
self._context.executor.execute_graph(branch, map_variable=map_variable, **kwargs)
|
286
|
-
|
287
|
-
for job in jobs:
|
288
|
-
job.join() # Find status of the branches
|
284
|
+
for _, branch in self.branches.items():
|
285
|
+
self._context.executor.execute_graph(branch, map_variable=map_variable, **kwargs)
|
289
286
|
|
290
287
|
self.fan_in(map_variable=map_variable, **kwargs)
|
291
288
|
|
@@ -418,7 +415,6 @@ class MapNode(CompositeNode):
|
|
418
415
|
map_variable (dict): The map variables the graph belongs to
|
419
416
|
**kwargs: Optional kwargs passed around
|
420
417
|
"""
|
421
|
-
from runnable import entrypoints
|
422
418
|
|
423
419
|
iterate_on = None
|
424
420
|
try:
|
@@ -433,34 +429,11 @@ class MapNode(CompositeNode):
|
|
433
429
|
|
434
430
|
self.fan_out(map_variable=map_variable, **kwargs)
|
435
431
|
|
436
|
-
jobs = []
|
437
|
-
# Given that we can have nesting and complex graphs, controlling the number of processess is hard.
|
438
|
-
# A better way is to actually submit the job to some process scheduler which does resource management
|
439
432
|
for iter_variable in iterate_on:
|
440
433
|
effective_map_variable = map_variable or OrderedDict()
|
441
434
|
effective_map_variable[self.iterate_as] = iter_variable
|
442
435
|
|
443
|
-
|
444
|
-
# Trigger parallel jobs
|
445
|
-
action = entrypoints.execute_single_brach
|
446
|
-
kwargs = {
|
447
|
-
"configuration_file": self._context.configuration_file,
|
448
|
-
"pipeline_file": self._context.pipeline_file,
|
449
|
-
"branch_name": self.branch.internal_branch_name.replace(" ", defaults.COMMAND_FRIENDLY_CHARACTER),
|
450
|
-
"run_id": self._context.run_id,
|
451
|
-
"map_variable": json.dumps(effective_map_variable),
|
452
|
-
"tag": self._context.tag,
|
453
|
-
}
|
454
|
-
process = multiprocessing.Process(target=action, kwargs=kwargs)
|
455
|
-
jobs.append(process)
|
456
|
-
process.start()
|
457
|
-
|
458
|
-
else:
|
459
|
-
# If parallel is not enabled, execute them sequentially
|
460
|
-
self._context.executor.execute_graph(self.branch, map_variable=effective_map_variable, **kwargs)
|
461
|
-
|
462
|
-
for job in jobs:
|
463
|
-
job.join()
|
436
|
+
self._context.executor.execute_graph(self.branch, map_variable=effective_map_variable, **kwargs)
|
464
437
|
|
465
438
|
self.fan_in(map_variable=map_variable, **kwargs)
|
466
439
|
|
@@ -652,7 +625,13 @@ class StubNode(ExecutableNode):
|
|
652
625
|
def parse_from_config(cls, config: Dict[str, Any]) -> "StubNode":
|
653
626
|
return cls(**config)
|
654
627
|
|
655
|
-
def execute(
|
628
|
+
def execute(
|
629
|
+
self,
|
630
|
+
mock=False,
|
631
|
+
params: Optional[Dict[str, Any]] = None,
|
632
|
+
map_variable: TypeMapVariable = None,
|
633
|
+
**kwargs,
|
634
|
+
) -> StepAttempt:
|
656
635
|
"""
|
657
636
|
Do Nothing node.
|
658
637
|
We just send an success attempt log back to the caller
|
@@ -666,6 +645,7 @@ class StubNode(ExecutableNode):
|
|
666
645
|
[type]: [description]
|
667
646
|
"""
|
668
647
|
attempt_log = self._context.run_log_store.create_attempt_log()
|
648
|
+
attempt_log.input_parameters = params
|
669
649
|
|
670
650
|
attempt_log.start_time = str(datetime.now())
|
671
651
|
attempt_log.status = defaults.SUCCESS # This is a dummy node and always will be success
|
runnable/integration.py
CHANGED
@@ -84,7 +84,7 @@ def get_integration_handler(executor: "BaseExecutor", service: object) -> BaseIn
|
|
84
84
|
logger.info(f"Identified an integration pattern {kls.obj}")
|
85
85
|
integrations.append(kls.obj)
|
86
86
|
|
87
|
-
# Get all the implementations defined by the
|
87
|
+
# Get all the implementations defined by the runnable package
|
88
88
|
for kls in BaseIntegration.__subclasses__():
|
89
89
|
# Match the exact service type
|
90
90
|
if kls.service_type == service_type and kls.service_provider == service_name:
|
@@ -95,7 +95,7 @@ def get_integration_handler(executor: "BaseExecutor", service: object) -> BaseIn
|
|
95
95
|
if len(integrations) > 1:
|
96
96
|
msg = (
|
97
97
|
f"Multiple integrations between {executor.service_name} and {service_name} of type {service_type} found. "
|
98
|
-
"If you defined an integration pattern, please ensure it is specific and does not conflict with
|
98
|
+
"If you defined an integration pattern, please ensure it is specific and does not conflict with runnable "
|
99
99
|
" implementations."
|
100
100
|
)
|
101
101
|
logger.exception(msg)
|
runnable/interaction.py
CHANGED
@@ -58,6 +58,11 @@ def track_this(step: int = 0, **kwargs):
|
|
58
58
|
os.environ[prefix + key + f"{defaults.STEP_INDICATOR}{step}"] = json.dumps(value)
|
59
59
|
|
60
60
|
|
61
|
+
# TODO: Do we need the API for parameters?
|
62
|
+
# If we still want them, what takes precedence? API or returns?
|
63
|
+
# Once we decide that, collect the parameters and update them in tasks
|
64
|
+
|
65
|
+
|
61
66
|
@check_context
|
62
67
|
def set_parameter(**kwargs) -> None:
|
63
68
|
"""
|
@@ -279,7 +284,7 @@ def get_run_id() -> str:
|
|
279
284
|
"""
|
280
285
|
Returns the run_id of the current run.
|
281
286
|
|
282
|
-
You can also access this from the environment variable `
|
287
|
+
You can also access this from the environment variable `runnable_RUN_ID`.
|
283
288
|
"""
|
284
289
|
return context.run_context.run_id
|
285
290
|
|
@@ -321,14 +326,14 @@ def get_experiment_tracker_context() -> ContextManager:
|
|
321
326
|
|
322
327
|
def start_interactive_session(run_id: str = "", config_file: str = "", tag: str = "", parameters_file: str = ""):
|
323
328
|
"""
|
324
|
-
During interactive python coding, either via notebooks or ipython, you can start a
|
329
|
+
During interactive python coding, either via notebooks or ipython, you can start a runnable session by calling
|
325
330
|
this function. The executor would always be local executor as its interactive.
|
326
331
|
|
327
332
|
If this was called during a pipeline/function/notebook execution, it will be ignored.
|
328
333
|
|
329
334
|
Args:
|
330
335
|
run_id (str, optional): The run id to use. Defaults to "" and would be created if not provided.
|
331
|
-
config_file (str, optional): The configuration file to use. Defaults to "" and
|
336
|
+
config_file (str, optional): The configuration file to use. Defaults to "" and runnable defaults.
|
332
337
|
tag (str, optional): The tag to attach to the run. Defaults to "".
|
333
338
|
parameters_file (str, optional): The parameters file to use. Defaults to "".
|
334
339
|
"""
|
@@ -350,7 +355,7 @@ def start_interactive_session(run_id: str = "", config_file: str = "", tag: str
|
|
350
355
|
|
351
356
|
executor = context.run_context.executor
|
352
357
|
|
353
|
-
utils.
|
358
|
+
utils.set_runnable_environment_variables(run_id=run_id, configuration_file=config_file, tag=tag)
|
354
359
|
|
355
360
|
context.run_context.execution_plan = defaults.EXECUTION_PLAN.INTERACTIVE.value
|
356
361
|
executor.prepare_for_graph_execution()
|
runnable/nodes.py
CHANGED
@@ -64,7 +64,7 @@ class BaseNode(ABC, BaseModel):
|
|
64
64
|
@classmethod
|
65
65
|
def _get_internal_name_from_command_name(cls, command_name: str) -> str:
|
66
66
|
"""
|
67
|
-
Replace
|
67
|
+
Replace runnable specific character (%) with whitespace.
|
68
68
|
The opposite of _command_friendly_name.
|
69
69
|
|
70
70
|
Args:
|
@@ -274,7 +274,13 @@ class BaseNode(ABC, BaseModel):
|
|
274
274
|
...
|
275
275
|
|
276
276
|
@abstractmethod
|
277
|
-
def execute(
|
277
|
+
def execute(
|
278
|
+
self,
|
279
|
+
mock=False,
|
280
|
+
params: Optional[Dict[str, Any]] = None,
|
281
|
+
map_variable: TypeMapVariable = None,
|
282
|
+
**kwargs,
|
283
|
+
) -> StepAttempt:
|
278
284
|
"""
|
279
285
|
The actual function that does the execution of the command in the config.
|
280
286
|
|
@@ -282,7 +288,7 @@ class BaseNode(ABC, BaseModel):
|
|
282
288
|
composite nodes.
|
283
289
|
|
284
290
|
Args:
|
285
|
-
executor (
|
291
|
+
executor (runnable.executor.BaseExecutor): The executor class
|
286
292
|
mock (bool, optional): Don't run, just pretend. Defaults to False.
|
287
293
|
map_variable (str, optional): The value of the map iteration variable, if part of a map node.
|
288
294
|
Defaults to ''.
|
@@ -301,7 +307,7 @@ class BaseNode(ABC, BaseModel):
|
|
301
307
|
Function should only be implemented for composite nodes like dag, map, parallel.
|
302
308
|
|
303
309
|
Args:
|
304
|
-
executor (
|
310
|
+
executor (runnable.executor.BaseExecutor): The executor.
|
305
311
|
|
306
312
|
Raises:
|
307
313
|
NotImplementedError: Base class, hence not implemented.
|
@@ -317,7 +323,7 @@ class BaseNode(ABC, BaseModel):
|
|
317
323
|
Function should only be implemented for composite nodes like dag, map, parallel.
|
318
324
|
|
319
325
|
Args:
|
320
|
-
executor (
|
326
|
+
executor (runnable.executor.BaseExecutor): The executor.
|
321
327
|
map_variable (str, optional): The value of the map iteration variable, if part of a map node.
|
322
328
|
|
323
329
|
Raises:
|
@@ -334,7 +340,7 @@ class BaseNode(ABC, BaseModel):
|
|
334
340
|
Function should only be implemented for composite nodes like dag, map, parallel.
|
335
341
|
|
336
342
|
Args:
|
337
|
-
executor (
|
343
|
+
executor (runnable.executor.BaseExecutor): The executor.
|
338
344
|
map_variable (str, optional): The value of the map iteration variable, if part of a map node.
|
339
345
|
|
340
346
|
Raises:
|
@@ -449,7 +455,13 @@ class CompositeNode(TraversalNode):
|
|
449
455
|
def _get_max_attempts(self) -> int:
|
450
456
|
raise Exception("This is a composite node and does not have a max_attempts")
|
451
457
|
|
452
|
-
def execute(
|
458
|
+
def execute(
|
459
|
+
self,
|
460
|
+
mock=False,
|
461
|
+
params: Optional[Dict[str, Any]] = None,
|
462
|
+
map_variable: TypeMapVariable = None,
|
463
|
+
**kwargs,
|
464
|
+
) -> StepAttempt:
|
453
465
|
raise Exception("This is a composite node and does not have an execute function")
|
454
466
|
|
455
467
|
|
runnable/parameters.py
CHANGED
@@ -16,7 +16,7 @@ logger = logging.getLogger(defaults.LOGGER_NAME)
|
|
16
16
|
|
17
17
|
def get_user_set_parameters(remove: bool = False) -> Dict[str, Any]:
|
18
18
|
"""
|
19
|
-
Scans the environment variables for any user returned parameters that have a prefix
|
19
|
+
Scans the environment variables for any user returned parameters that have a prefix runnable_PRM_.
|
20
20
|
|
21
21
|
This function does not deal with any type conversion of the parameters.
|
22
22
|
It just deserializes the parameters and returns them as a dictionary.
|