runnable 0.34.0a1__py3-none-any.whl → 1.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of runnable might be problematic. Click here for more details.

Files changed (49) hide show
  1. extensions/catalog/any_path.py +13 -2
  2. extensions/job_executor/__init__.py +7 -5
  3. extensions/job_executor/emulate.py +106 -0
  4. extensions/job_executor/k8s.py +8 -8
  5. extensions/job_executor/local_container.py +13 -14
  6. extensions/nodes/__init__.py +0 -0
  7. extensions/nodes/conditional.py +243 -0
  8. extensions/nodes/fail.py +72 -0
  9. extensions/nodes/map.py +350 -0
  10. extensions/nodes/parallel.py +159 -0
  11. extensions/nodes/stub.py +89 -0
  12. extensions/nodes/success.py +72 -0
  13. extensions/nodes/task.py +92 -0
  14. extensions/pipeline_executor/__init__.py +27 -27
  15. extensions/pipeline_executor/argo.py +52 -46
  16. extensions/pipeline_executor/emulate.py +112 -0
  17. extensions/pipeline_executor/local.py +4 -4
  18. extensions/pipeline_executor/local_container.py +19 -79
  19. extensions/pipeline_executor/mocked.py +5 -9
  20. extensions/pipeline_executor/retry.py +6 -10
  21. runnable/__init__.py +2 -11
  22. runnable/catalog.py +6 -23
  23. runnable/cli.py +145 -48
  24. runnable/context.py +520 -28
  25. runnable/datastore.py +51 -54
  26. runnable/defaults.py +12 -34
  27. runnable/entrypoints.py +82 -440
  28. runnable/exceptions.py +35 -34
  29. runnable/executor.py +13 -20
  30. runnable/gantt.py +1141 -0
  31. runnable/graph.py +1 -1
  32. runnable/names.py +1 -1
  33. runnable/nodes.py +20 -16
  34. runnable/parameters.py +108 -51
  35. runnable/sdk.py +125 -204
  36. runnable/tasks.py +62 -85
  37. runnable/utils.py +6 -268
  38. runnable-1.0.0.dist-info/METADATA +122 -0
  39. runnable-1.0.0.dist-info/RECORD +73 -0
  40. {runnable-0.34.0a1.dist-info → runnable-1.0.0.dist-info}/entry_points.txt +9 -8
  41. extensions/nodes/nodes.py +0 -778
  42. extensions/nodes/torch.py +0 -273
  43. extensions/nodes/torch_config.py +0 -76
  44. extensions/tasks/torch.py +0 -286
  45. extensions/tasks/torch_config.py +0 -76
  46. runnable-0.34.0a1.dist-info/METADATA +0 -267
  47. runnable-0.34.0a1.dist-info/RECORD +0 -67
  48. {runnable-0.34.0a1.dist-info → runnable-1.0.0.dist-info}/WHEEL +0 -0
  49. {runnable-0.34.0a1.dist-info → runnable-1.0.0.dist-info}/licenses/LICENSE +0 -0
runnable/exceptions.py CHANGED
@@ -6,8 +6,9 @@ class RunLogExistsError(Exception): # pragma: no cover
6
6
  """
7
7
 
8
8
  def __init__(self, run_id):
9
- super().__init__()
10
- self.message = f"Run id for {run_id} is already found in the datastore"
9
+ self.run_id = run_id
10
+ message = f"Run id for {run_id} is already found in the datastore"
11
+ super().__init__(message)
11
12
 
12
13
 
13
14
  class JobLogNotFoundError(Exception):
@@ -18,8 +19,9 @@ class JobLogNotFoundError(Exception):
18
19
  """
19
20
 
20
21
  def __init__(self, run_id):
21
- super().__init__()
22
- self.message = f"Job for {run_id} is not found in the datastore"
22
+ self.run_id = run_id
23
+ message = f"Job for {run_id} is not found in the datastore"
24
+ super().__init__(message)
23
25
 
24
26
 
25
27
  class RunLogNotFoundError(Exception): # pragma: no cover
@@ -30,8 +32,9 @@ class RunLogNotFoundError(Exception): # pragma: no cover
30
32
  """
31
33
 
32
34
  def __init__(self, run_id):
33
- super().__init__()
34
- self.message = f"Run id for {run_id} is not found in the datastore"
35
+ self.run_id = run_id
36
+ message = f"Run id for {run_id} is not found in the datastore"
37
+ super().__init__(message)
35
38
 
36
39
 
37
40
  class StepLogNotFoundError(Exception): # pragma: no cover
@@ -41,11 +44,11 @@ class StepLogNotFoundError(Exception): # pragma: no cover
41
44
  Exception ([type]): [description]
42
45
  """
43
46
 
44
- def __init__(self, run_id, name):
45
- super().__init__()
46
- self.message = (
47
- f"Step log for {name} is not found in the datastore for Run id: {run_id}"
48
- )
47
+ def __init__(self, run_id, step_name):
48
+ self.run_id = run_id
49
+ self.step_name = step_name
50
+ message = f"Step log for {step_name} is not found in the datastore for Run id: {run_id}"
51
+ super().__init__(message)
49
52
 
50
53
 
51
54
  class BranchLogNotFoundError(Exception): # pragma: no cover
@@ -55,11 +58,11 @@ class BranchLogNotFoundError(Exception): # pragma: no cover
55
58
  Exception ([type]): [description]
56
59
  """
57
60
 
58
- def __init__(self, run_id, name):
59
- super().__init__()
60
- self.message = (
61
- f"Branch log for {name} is not found in the datastore for Run id: {run_id}"
62
- )
61
+ def __init__(self, run_id, branch_name):
62
+ self.run_id = run_id
63
+ self.branch_name = branch_name
64
+ message = f"Branch log for {branch_name} is not found in the datastore for Run id: {run_id}"
65
+ super().__init__(message)
63
66
 
64
67
 
65
68
  class NodeNotFoundError(Exception): # pragma: no cover
@@ -70,8 +73,9 @@ class NodeNotFoundError(Exception): # pragma: no cover
70
73
  """
71
74
 
72
75
  def __init__(self, name):
73
- super().__init__()
74
- self.message = f"Node of name {name} is not found the graph"
76
+ self.name = name
77
+ message = f"Node of name {name} is not found the graph"
78
+ super().__init__(message)
75
79
 
76
80
 
77
81
  class BranchNotFoundError(Exception): # pragma: no cover
@@ -82,8 +86,9 @@ class BranchNotFoundError(Exception): # pragma: no cover
82
86
  """
83
87
 
84
88
  def __init__(self, name):
85
- super().__init__()
86
- self.message = f"Branch of name {name} is not found the graph"
89
+ self.name = name
90
+ message = f"Branch of name {name} is not found the graph"
91
+ super().__init__(message)
87
92
 
88
93
 
89
94
  class NodeMethodCallError(Exception):
@@ -92,32 +97,28 @@ class NodeMethodCallError(Exception):
92
97
  """
93
98
 
94
99
  def __init__(self, message):
95
- super().__init__()
96
- self.message = message
100
+ super().__init__(message)
97
101
 
98
102
 
99
103
  class TerminalNodeError(Exception): # pragma: no cover
100
104
  def __init__(self):
101
- super().__init__()
102
- self.message = "Terminal Nodes do not have next node"
105
+ message = "Terminal Nodes do not have next node"
106
+ super().__init__(message)
103
107
 
104
108
 
105
109
  class SecretNotFoundError(Exception): # pragma: no cover
106
- """
107
- Exception class
108
- Args:
109
- Exception ([type]): [description]
110
- """
111
-
112
110
  def __init__(self, secret_name, secret_setting):
113
- super().__init__()
114
- self.message = f"No secret found by name:{secret_name} in {secret_setting}"
111
+ self.secret_name = secret_name
112
+ self.secret_setting = secret_setting
113
+ message = f"No secret found by name:{secret_name} in {secret_setting}"
114
+ super().__init__(message)
115
115
 
116
116
 
117
117
  class ExecutionFailedError(Exception): # pragma: no cover
118
118
  def __init__(self, run_id: str):
119
- super().__init__()
120
- self.message = f"Execution failed for run id: {run_id}"
119
+ self.run_id = run_id
120
+ message = f"Execution failed for run id: {run_id}"
121
+ super().__init__(message)
121
122
 
122
123
 
123
124
  class CommandCallError(Exception): # pragma: no cover
runnable/executor.py CHANGED
@@ -10,7 +10,7 @@ from pydantic import BaseModel, ConfigDict, PrivateAttr
10
10
  import runnable.context as context
11
11
  from runnable import defaults
12
12
  from runnable.datastore import DataCatalog, JobLog, StepLog
13
- from runnable.defaults import TypeMapVariable
13
+ from runnable.defaults import MapVariableType
14
14
 
15
15
  if TYPE_CHECKING: # pragma: no cover
16
16
  from runnable.graph import Graph
@@ -34,7 +34,9 @@ class BaseExecutor(ABC, BaseModel):
34
34
  service_name: str = ""
35
35
  service_type: str = "executor"
36
36
 
37
- _is_local: bool = PrivateAttr(default=False)
37
+ # Should have _should_setup_run_log_at_traversal, local, local_container, emulator is true
38
+ # False for everything else
39
+ _should_setup_run_log_at_traversal: bool = PrivateAttr(default=True)
38
40
 
39
41
  model_config = ConfigDict(extra="forbid")
40
42
 
@@ -86,7 +88,7 @@ class BaseExecutor(ABC, BaseModel):
86
88
 
87
89
  @abstractmethod
88
90
  def add_task_log_to_catalog(
89
- self, name: str, map_variable: Optional[TypeMapVariable] = None
91
+ self, name: str, map_variable: Optional[MapVariableType] = None
90
92
  ): ...
91
93
 
92
94
 
@@ -153,16 +155,7 @@ class BaseJobExecutor(BaseExecutor):
153
155
  """
154
156
  ...
155
157
 
156
- # @abstractmethod
157
- # def scale_up(self, job: BaseTaskType):
158
- # """
159
- # Scale up the job to run on max_nodes
160
- # This has to also call the entry point
161
- # """
162
- # ...
163
158
 
164
-
165
- # TODO: Consolidate execute_node, trigger_node_execution, _execute_node
166
159
  class BasePipelineExecutor(BaseExecutor):
167
160
  service_type: str = "pipeline_executor"
168
161
  overrides: dict[str, Any] = {}
@@ -214,7 +207,7 @@ class BasePipelineExecutor(BaseExecutor):
214
207
  def _execute_node(
215
208
  self,
216
209
  node: BaseNode,
217
- map_variable: TypeMapVariable = None,
210
+ map_variable: MapVariableType = None,
218
211
  mock: bool = False,
219
212
  ):
220
213
  """
@@ -238,7 +231,7 @@ class BasePipelineExecutor(BaseExecutor):
238
231
  ...
239
232
 
240
233
  @abstractmethod
241
- def execute_node(self, node: BaseNode, map_variable: TypeMapVariable = None):
234
+ def execute_node(self, node: BaseNode, map_variable: MapVariableType = None):
242
235
  """
243
236
  The entry point for all executors apart from local.
244
237
  We have already prepared for node execution.
@@ -253,7 +246,7 @@ class BasePipelineExecutor(BaseExecutor):
253
246
  ...
254
247
 
255
248
  @abstractmethod
256
- def execute_from_graph(self, node: BaseNode, map_variable: TypeMapVariable = None):
249
+ def execute_from_graph(self, node: BaseNode, map_variable: MapVariableType = None):
257
250
  """
258
251
  This is the entry point to from the graph execution.
259
252
 
@@ -282,7 +275,7 @@ class BasePipelineExecutor(BaseExecutor):
282
275
 
283
276
  @abstractmethod
284
277
  def _get_status_and_next_node_name(
285
- self, current_node: BaseNode, dag: Graph, map_variable: TypeMapVariable = None
278
+ self, current_node: BaseNode, dag: Graph, map_variable: MapVariableType = None
286
279
  ) -> tuple[str, str]:
287
280
  """
288
281
  Given the current node and the graph, returns the name of the next node to execute.
@@ -301,7 +294,7 @@ class BasePipelineExecutor(BaseExecutor):
301
294
  ...
302
295
 
303
296
  @abstractmethod
304
- def execute_graph(self, dag: Graph, map_variable: TypeMapVariable = None):
297
+ def execute_graph(self, dag: Graph, map_variable: MapVariableType = None):
305
298
  """
306
299
  The parallelization is controlled by the nodes and not by this function.
307
300
 
@@ -356,7 +349,7 @@ class BasePipelineExecutor(BaseExecutor):
356
349
  ...
357
350
 
358
351
  @abstractmethod
359
- def fan_out(self, node: BaseNode, map_variable: TypeMapVariable = None):
352
+ def fan_out(self, node: BaseNode, map_variable: MapVariableType = None):
360
353
  """
361
354
  This method is used to appropriately fan-out the execution of a composite node.
362
355
  This is only useful when we want to execute a composite node during 3rd party orchestrators.
@@ -379,7 +372,7 @@ class BasePipelineExecutor(BaseExecutor):
379
372
  ...
380
373
 
381
374
  @abstractmethod
382
- def fan_in(self, node: BaseNode, map_variable: TypeMapVariable = None):
375
+ def fan_in(self, node: BaseNode, map_variable: MapVariableType = None):
383
376
  """
384
377
  This method is used to appropriately fan-in after the execution of a composite node.
385
378
  This is only useful when we want to execute a composite node during 3rd party orchestrators.
@@ -402,7 +395,7 @@ class BasePipelineExecutor(BaseExecutor):
402
395
 
403
396
  @abstractmethod
404
397
  def trigger_node_execution(
405
- self, node: BaseNode, map_variable: TypeMapVariable = None
398
+ self, node: BaseNode, map_variable: MapVariableType = None
406
399
  ):
407
400
  """
408
401
  Executor specific way of triggering jobs when runnable does both traversal and execution