runnable 0.35.0__py3-none-any.whl → 0.36.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. extensions/job_executor/__init__.py +3 -4
  2. extensions/job_executor/emulate.py +106 -0
  3. extensions/job_executor/k8s.py +8 -8
  4. extensions/job_executor/local_container.py +13 -14
  5. extensions/nodes/__init__.py +0 -0
  6. extensions/nodes/conditional.py +7 -5
  7. extensions/nodes/fail.py +72 -0
  8. extensions/nodes/map.py +350 -0
  9. extensions/nodes/parallel.py +159 -0
  10. extensions/nodes/stub.py +89 -0
  11. extensions/nodes/success.py +72 -0
  12. extensions/nodes/task.py +92 -0
  13. extensions/pipeline_executor/__init__.py +24 -26
  14. extensions/pipeline_executor/argo.py +18 -15
  15. extensions/pipeline_executor/emulate.py +112 -0
  16. extensions/pipeline_executor/local.py +4 -4
  17. extensions/pipeline_executor/local_container.py +19 -79
  18. extensions/pipeline_executor/mocked.py +4 -4
  19. extensions/pipeline_executor/retry.py +6 -10
  20. extensions/tasks/torch.py +1 -1
  21. runnable/__init__.py +0 -8
  22. runnable/catalog.py +1 -21
  23. runnable/cli.py +0 -59
  24. runnable/context.py +519 -28
  25. runnable/datastore.py +51 -54
  26. runnable/defaults.py +12 -34
  27. runnable/entrypoints.py +82 -440
  28. runnable/exceptions.py +35 -34
  29. runnable/executor.py +13 -20
  30. runnable/names.py +1 -1
  31. runnable/nodes.py +16 -15
  32. runnable/parameters.py +2 -2
  33. runnable/sdk.py +66 -163
  34. runnable/tasks.py +62 -21
  35. runnable/utils.py +6 -268
  36. {runnable-0.35.0.dist-info → runnable-0.36.0.dist-info}/METADATA +1 -1
  37. runnable-0.36.0.dist-info/RECORD +74 -0
  38. {runnable-0.35.0.dist-info → runnable-0.36.0.dist-info}/entry_points.txt +8 -7
  39. extensions/nodes/nodes.py +0 -778
  40. runnable-0.35.0.dist-info/RECORD +0 -66
  41. {runnable-0.35.0.dist-info → runnable-0.36.0.dist-info}/WHEEL +0 -0
  42. {runnable-0.35.0.dist-info → runnable-0.36.0.dist-info}/licenses/LICENSE +0 -0
runnable/exceptions.py CHANGED
@@ -6,8 +6,9 @@ class RunLogExistsError(Exception): # pragma: no cover
6
6
  """
7
7
 
8
8
  def __init__(self, run_id):
9
- super().__init__()
10
- self.message = f"Run id for {run_id} is already found in the datastore"
9
+ self.run_id = run_id
10
+ message = f"Run id for {run_id} is already found in the datastore"
11
+ super().__init__(message)
11
12
 
12
13
 
13
14
  class JobLogNotFoundError(Exception):
@@ -18,8 +19,9 @@ class JobLogNotFoundError(Exception):
18
19
  """
19
20
 
20
21
  def __init__(self, run_id):
21
- super().__init__()
22
- self.message = f"Job for {run_id} is not found in the datastore"
22
+ self.run_id = run_id
23
+ message = f"Job for {run_id} is not found in the datastore"
24
+ super().__init__(message)
23
25
 
24
26
 
25
27
  class RunLogNotFoundError(Exception): # pragma: no cover
@@ -30,8 +32,9 @@ class RunLogNotFoundError(Exception): # pragma: no cover
30
32
  """
31
33
 
32
34
  def __init__(self, run_id):
33
- super().__init__()
34
- self.message = f"Run id for {run_id} is not found in the datastore"
35
+ self.run_id = run_id
36
+ message = f"Run id for {run_id} is not found in the datastore"
37
+ super().__init__(message)
35
38
 
36
39
 
37
40
  class StepLogNotFoundError(Exception): # pragma: no cover
@@ -41,11 +44,11 @@ class StepLogNotFoundError(Exception): # pragma: no cover
41
44
  Exception ([type]): [description]
42
45
  """
43
46
 
44
- def __init__(self, run_id, name):
45
- super().__init__()
46
- self.message = (
47
- f"Step log for {name} is not found in the datastore for Run id: {run_id}"
48
- )
47
+ def __init__(self, run_id, step_name):
48
+ self.run_id = run_id
49
+ self.step_name = step_name
50
+ message = f"Step log for {step_name} is not found in the datastore for Run id: {run_id}"
51
+ super().__init__(message)
49
52
 
50
53
 
51
54
  class BranchLogNotFoundError(Exception): # pragma: no cover
@@ -55,11 +58,11 @@ class BranchLogNotFoundError(Exception): # pragma: no cover
55
58
  Exception ([type]): [description]
56
59
  """
57
60
 
58
- def __init__(self, run_id, name):
59
- super().__init__()
60
- self.message = (
61
- f"Branch log for {name} is not found in the datastore for Run id: {run_id}"
62
- )
61
+ def __init__(self, run_id, branch_name):
62
+ self.run_id = run_id
63
+ self.branch_name = branch_name
64
+ message = f"Branch log for {branch_name} is not found in the datastore for Run id: {run_id}"
65
+ super().__init__(message)
63
66
 
64
67
 
65
68
  class NodeNotFoundError(Exception): # pragma: no cover
@@ -70,8 +73,9 @@ class NodeNotFoundError(Exception): # pragma: no cover
70
73
  """
71
74
 
72
75
  def __init__(self, name):
73
- super().__init__()
74
- self.message = f"Node of name {name} is not found the graph"
76
+ self.name = name
77
+ message = f"Node of name {name} is not found the graph"
78
+ super().__init__(message)
75
79
 
76
80
 
77
81
  class BranchNotFoundError(Exception): # pragma: no cover
@@ -82,8 +86,9 @@ class BranchNotFoundError(Exception): # pragma: no cover
82
86
  """
83
87
 
84
88
  def __init__(self, name):
85
- super().__init__()
86
- self.message = f"Branch of name {name} is not found the graph"
89
+ self.name = name
90
+ message = f"Branch of name {name} is not found the graph"
91
+ super().__init__(message)
87
92
 
88
93
 
89
94
  class NodeMethodCallError(Exception):
@@ -92,32 +97,28 @@ class NodeMethodCallError(Exception):
92
97
  """
93
98
 
94
99
  def __init__(self, message):
95
- super().__init__()
96
- self.message = message
100
+ super().__init__(message)
97
101
 
98
102
 
99
103
  class TerminalNodeError(Exception): # pragma: no cover
100
104
  def __init__(self):
101
- super().__init__()
102
- self.message = "Terminal Nodes do not have next node"
105
+ message = "Terminal Nodes do not have next node"
106
+ super().__init__(message)
103
107
 
104
108
 
105
109
  class SecretNotFoundError(Exception): # pragma: no cover
106
- """
107
- Exception class
108
- Args:
109
- Exception ([type]): [description]
110
- """
111
-
112
110
  def __init__(self, secret_name, secret_setting):
113
- super().__init__()
114
- self.message = f"No secret found by name:{secret_name} in {secret_setting}"
111
+ self.secret_name = secret_name
112
+ self.secret_setting = secret_setting
113
+ message = f"No secret found by name:{secret_name} in {secret_setting}"
114
+ super().__init__(message)
115
115
 
116
116
 
117
117
  class ExecutionFailedError(Exception): # pragma: no cover
118
118
  def __init__(self, run_id: str):
119
- super().__init__()
120
- self.message = f"Execution failed for run id: {run_id}"
119
+ self.run_id = run_id
120
+ message = f"Execution failed for run id: {run_id}"
121
+ super().__init__(message)
121
122
 
122
123
 
123
124
  class CommandCallError(Exception): # pragma: no cover
runnable/executor.py CHANGED
@@ -10,7 +10,7 @@ from pydantic import BaseModel, ConfigDict, PrivateAttr
10
10
  import runnable.context as context
11
11
  from runnable import defaults
12
12
  from runnable.datastore import DataCatalog, JobLog, StepLog
13
- from runnable.defaults import TypeMapVariable
13
+ from runnable.defaults import MapVariableType
14
14
 
15
15
  if TYPE_CHECKING: # pragma: no cover
16
16
  from runnable.graph import Graph
@@ -34,7 +34,9 @@ class BaseExecutor(ABC, BaseModel):
34
34
  service_name: str = ""
35
35
  service_type: str = "executor"
36
36
 
37
- _is_local: bool = PrivateAttr(default=False)
37
+ # Should have _should_setup_run_log_at_traversal, local, local_container, emulator is true
38
+ # False for everything else
39
+ _should_setup_run_log_at_traversal: bool = PrivateAttr(default=True)
38
40
 
39
41
  model_config = ConfigDict(extra="forbid")
40
42
 
@@ -86,7 +88,7 @@ class BaseExecutor(ABC, BaseModel):
86
88
 
87
89
  @abstractmethod
88
90
  def add_task_log_to_catalog(
89
- self, name: str, map_variable: Optional[TypeMapVariable] = None
91
+ self, name: str, map_variable: Optional[MapVariableType] = None
90
92
  ): ...
91
93
 
92
94
 
@@ -153,16 +155,7 @@ class BaseJobExecutor(BaseExecutor):
153
155
  """
154
156
  ...
155
157
 
156
- # @abstractmethod
157
- # def scale_up(self, job: BaseTaskType):
158
- # """
159
- # Scale up the job to run on max_nodes
160
- # This has to also call the entry point
161
- # """
162
- # ...
163
158
 
164
-
165
- # TODO: Consolidate execute_node, trigger_node_execution, _execute_node
166
159
  class BasePipelineExecutor(BaseExecutor):
167
160
  service_type: str = "pipeline_executor"
168
161
  overrides: dict[str, Any] = {}
@@ -214,7 +207,7 @@ class BasePipelineExecutor(BaseExecutor):
214
207
  def _execute_node(
215
208
  self,
216
209
  node: BaseNode,
217
- map_variable: TypeMapVariable = None,
210
+ map_variable: MapVariableType = None,
218
211
  mock: bool = False,
219
212
  ):
220
213
  """
@@ -238,7 +231,7 @@ class BasePipelineExecutor(BaseExecutor):
238
231
  ...
239
232
 
240
233
  @abstractmethod
241
- def execute_node(self, node: BaseNode, map_variable: TypeMapVariable = None):
234
+ def execute_node(self, node: BaseNode, map_variable: MapVariableType = None):
242
235
  """
243
236
  The entry point for all executors apart from local.
244
237
  We have already prepared for node execution.
@@ -253,7 +246,7 @@ class BasePipelineExecutor(BaseExecutor):
253
246
  ...
254
247
 
255
248
  @abstractmethod
256
- def execute_from_graph(self, node: BaseNode, map_variable: TypeMapVariable = None):
249
+ def execute_from_graph(self, node: BaseNode, map_variable: MapVariableType = None):
257
250
  """
258
251
  This is the entry point to from the graph execution.
259
252
 
@@ -282,7 +275,7 @@ class BasePipelineExecutor(BaseExecutor):
282
275
 
283
276
  @abstractmethod
284
277
  def _get_status_and_next_node_name(
285
- self, current_node: BaseNode, dag: Graph, map_variable: TypeMapVariable = None
278
+ self, current_node: BaseNode, dag: Graph, map_variable: MapVariableType = None
286
279
  ) -> tuple[str, str]:
287
280
  """
288
281
  Given the current node and the graph, returns the name of the next node to execute.
@@ -301,7 +294,7 @@ class BasePipelineExecutor(BaseExecutor):
301
294
  ...
302
295
 
303
296
  @abstractmethod
304
- def execute_graph(self, dag: Graph, map_variable: TypeMapVariable = None):
297
+ def execute_graph(self, dag: Graph, map_variable: MapVariableType = None):
305
298
  """
306
299
  The parallelization is controlled by the nodes and not by this function.
307
300
 
@@ -356,7 +349,7 @@ class BasePipelineExecutor(BaseExecutor):
356
349
  ...
357
350
 
358
351
  @abstractmethod
359
- def fan_out(self, node: BaseNode, map_variable: TypeMapVariable = None):
352
+ def fan_out(self, node: BaseNode, map_variable: MapVariableType = None):
360
353
  """
361
354
  This method is used to appropriately fan-out the execution of a composite node.
362
355
  This is only useful when we want to execute a composite node during 3rd party orchestrators.
@@ -379,7 +372,7 @@ class BasePipelineExecutor(BaseExecutor):
379
372
  ...
380
373
 
381
374
  @abstractmethod
382
- def fan_in(self, node: BaseNode, map_variable: TypeMapVariable = None):
375
+ def fan_in(self, node: BaseNode, map_variable: MapVariableType = None):
383
376
  """
384
377
  This method is used to appropriately fan-in after the execution of a composite node.
385
378
  This is only useful when we want to execute a composite node during 3rd party orchestrators.
@@ -402,7 +395,7 @@ class BasePipelineExecutor(BaseExecutor):
402
395
 
403
396
  @abstractmethod
404
397
  def trigger_node_execution(
405
- self, node: BaseNode, map_variable: TypeMapVariable = None
398
+ self, node: BaseNode, map_variable: MapVariableType = None
406
399
  ):
407
400
  """
408
401
  Executor specific way of triggering jobs when runnable does both traversal and execution
runnable/names.py CHANGED
@@ -306,7 +306,7 @@ left = [
306
306
  "chalky",
307
307
  "moist",
308
308
  "formal",
309
- "brute force",
309
+ "force",
310
310
  "ancient",
311
311
  "wan",
312
312
  "square",
runnable/nodes.py CHANGED
@@ -7,7 +7,7 @@ from pydantic import BaseModel, ConfigDict, Field, field_validator
7
7
  import runnable.context as context
8
8
  from runnable import defaults, exceptions
9
9
  from runnable.datastore import StepLog
10
- from runnable.defaults import TypeMapVariable
10
+ from runnable.defaults import MapVariableType
11
11
  from runnable.graph import Graph
12
12
 
13
13
  logger = logging.getLogger(defaults.LOGGER_NAME)
@@ -42,6 +42,7 @@ class BaseNode(ABC, BaseModel):
42
42
 
43
43
  @property
44
44
  def _context(self):
45
+ assert isinstance(context.run_context, context.PipelineContext)
45
46
  return context.run_context
46
47
 
47
48
  model_config = ConfigDict(extra="forbid", arbitrary_types_allowed=False)
@@ -81,7 +82,7 @@ class BaseNode(ABC, BaseModel):
81
82
 
82
83
  @classmethod
83
84
  def _resolve_map_placeholders(
84
- cls, name: str, map_variable: TypeMapVariable = None
85
+ cls, name: str, map_variable: MapVariableType = None
85
86
  ) -> str:
86
87
  """
87
88
  If there is no map step used, then we just return the name as we find it.
@@ -132,7 +133,7 @@ class BaseNode(ABC, BaseModel):
132
133
 
133
134
  return name
134
135
 
135
- def _get_step_log_name(self, map_variable: TypeMapVariable = None) -> str:
136
+ def _get_step_log_name(self, map_variable: MapVariableType = None) -> str:
136
137
  """
137
138
  For every step in the dag, there is a corresponding step log name.
138
139
  This method returns the step log name in dot path convention.
@@ -151,7 +152,7 @@ class BaseNode(ABC, BaseModel):
151
152
  self.internal_name, map_variable=map_variable
152
153
  )
153
154
 
154
- def _get_branch_log_name(self, map_variable: TypeMapVariable = None) -> str:
155
+ def _get_branch_log_name(self, map_variable: MapVariableType = None) -> str:
155
156
  """
156
157
  For nodes that are internally branches, this method returns the branch log name.
157
158
  The branch log name is in dot path convention.
@@ -280,7 +281,7 @@ class BaseNode(ABC, BaseModel):
280
281
  def execute(
281
282
  self,
282
283
  mock=False,
283
- map_variable: TypeMapVariable = None,
284
+ map_variable: MapVariableType = None,
284
285
  attempt_number: int = 1,
285
286
  ) -> StepLog:
286
287
  """
@@ -300,7 +301,7 @@ class BaseNode(ABC, BaseModel):
300
301
  """
301
302
 
302
303
  @abstractmethod
303
- def execute_as_graph(self, map_variable: TypeMapVariable = None):
304
+ def execute_as_graph(self, map_variable: MapVariableType = None):
304
305
  """
305
306
  This function would be called to set up the execution of the individual
306
307
  branches of a composite node.
@@ -315,7 +316,7 @@ class BaseNode(ABC, BaseModel):
315
316
  """
316
317
 
317
318
  @abstractmethod
318
- def fan_out(self, map_variable: TypeMapVariable = None):
319
+ def fan_out(self, map_variable: MapVariableType = None):
319
320
  """
320
321
  This function would be called to set up the execution of the individual
321
322
  branches of a composite node.
@@ -331,7 +332,7 @@ class BaseNode(ABC, BaseModel):
331
332
  """
332
333
 
333
334
  @abstractmethod
334
- def fan_in(self, map_variable: TypeMapVariable = None):
335
+ def fan_in(self, map_variable: MapVariableType = None):
335
336
  """
336
337
  This function would be called to tear down the execution of the individual
337
338
  branches of a composite node.
@@ -440,17 +441,17 @@ class ExecutableNode(TraversalNode):
440
441
  "This is an executable node and does not have branches"
441
442
  )
442
443
 
443
- def execute_as_graph(self, map_variable: TypeMapVariable = None):
444
+ def execute_as_graph(self, map_variable: MapVariableType = None):
444
445
  raise exceptions.NodeMethodCallError(
445
446
  "This is an executable node and does not have a graph"
446
447
  )
447
448
 
448
- def fan_in(self, map_variable: TypeMapVariable = None):
449
+ def fan_in(self, map_variable: MapVariableType = None):
449
450
  raise exceptions.NodeMethodCallError(
450
451
  "This is an executable node and does not have a fan in"
451
452
  )
452
453
 
453
- def fan_out(self, map_variable: TypeMapVariable = None):
454
+ def fan_out(self, map_variable: MapVariableType = None):
454
455
  raise exceptions.NodeMethodCallError(
455
456
  "This is an executable node and does not have a fan out"
456
457
  )
@@ -476,7 +477,7 @@ class CompositeNode(TraversalNode):
476
477
  def execute(
477
478
  self,
478
479
  mock=False,
479
- map_variable: TypeMapVariable = None,
480
+ map_variable: MapVariableType = None,
480
481
  attempt_number: int = 1,
481
482
  ) -> StepLog:
482
483
  raise exceptions.NodeMethodCallError(
@@ -506,15 +507,15 @@ class TerminalNode(BaseNode):
506
507
  def _get_max_attempts(self) -> int:
507
508
  return 1
508
509
 
509
- def execute_as_graph(self, map_variable: TypeMapVariable = None):
510
+ def execute_as_graph(self, map_variable: MapVariableType = None):
510
511
  raise exceptions.TerminalNodeError()
511
512
 
512
- def fan_in(self, map_variable: TypeMapVariable = None):
513
+ def fan_in(self, map_variable: MapVariableType = None):
513
514
  raise exceptions.TerminalNodeError()
514
515
 
515
516
  def fan_out(
516
517
  self,
517
- map_variable: TypeMapVariable = None,
518
+ map_variable: MapVariableType = None,
518
519
  ):
519
520
  raise exceptions.TerminalNodeError()
520
521
 
runnable/parameters.py CHANGED
@@ -10,7 +10,7 @@ from typing_extensions import Callable
10
10
 
11
11
  from runnable import defaults
12
12
  from runnable.datastore import JsonParameter, ObjectParameter
13
- from runnable.defaults import TypeMapVariable
13
+ from runnable.defaults import MapVariableType
14
14
  from runnable.utils import remove_prefix
15
15
 
16
16
  logger = logging.getLogger(defaults.LOGGER_NAME)
@@ -51,7 +51,7 @@ def get_user_set_parameters(remove: bool = False) -> Dict[str, JsonParameter]:
51
51
  def filter_arguments_for_func(
52
52
  func: Callable[..., Any],
53
53
  params: Dict[str, Any],
54
- map_variable: TypeMapVariable = None,
54
+ map_variable: MapVariableType = None,
55
55
  ) -> Dict[str, Any]:
56
56
  """
57
57
  Inspects the function to be called as part of the pipeline to find the arguments of the function.