runnable 0.34.0a1__py3-none-any.whl → 1.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of runnable might be problematic. Click here for more details.

Files changed (49) hide show
  1. extensions/catalog/any_path.py +13 -2
  2. extensions/job_executor/__init__.py +7 -5
  3. extensions/job_executor/emulate.py +106 -0
  4. extensions/job_executor/k8s.py +8 -8
  5. extensions/job_executor/local_container.py +13 -14
  6. extensions/nodes/__init__.py +0 -0
  7. extensions/nodes/conditional.py +243 -0
  8. extensions/nodes/fail.py +72 -0
  9. extensions/nodes/map.py +350 -0
  10. extensions/nodes/parallel.py +159 -0
  11. extensions/nodes/stub.py +89 -0
  12. extensions/nodes/success.py +72 -0
  13. extensions/nodes/task.py +92 -0
  14. extensions/pipeline_executor/__init__.py +27 -27
  15. extensions/pipeline_executor/argo.py +52 -46
  16. extensions/pipeline_executor/emulate.py +112 -0
  17. extensions/pipeline_executor/local.py +4 -4
  18. extensions/pipeline_executor/local_container.py +19 -79
  19. extensions/pipeline_executor/mocked.py +5 -9
  20. extensions/pipeline_executor/retry.py +6 -10
  21. runnable/__init__.py +2 -11
  22. runnable/catalog.py +6 -23
  23. runnable/cli.py +145 -48
  24. runnable/context.py +520 -28
  25. runnable/datastore.py +51 -54
  26. runnable/defaults.py +12 -34
  27. runnable/entrypoints.py +82 -440
  28. runnable/exceptions.py +35 -34
  29. runnable/executor.py +13 -20
  30. runnable/gantt.py +1141 -0
  31. runnable/graph.py +1 -1
  32. runnable/names.py +1 -1
  33. runnable/nodes.py +20 -16
  34. runnable/parameters.py +108 -51
  35. runnable/sdk.py +125 -204
  36. runnable/tasks.py +62 -85
  37. runnable/utils.py +6 -268
  38. runnable-1.0.0.dist-info/METADATA +122 -0
  39. runnable-1.0.0.dist-info/RECORD +73 -0
  40. {runnable-0.34.0a1.dist-info → runnable-1.0.0.dist-info}/entry_points.txt +9 -8
  41. extensions/nodes/nodes.py +0 -778
  42. extensions/nodes/torch.py +0 -273
  43. extensions/nodes/torch_config.py +0 -76
  44. extensions/tasks/torch.py +0 -286
  45. extensions/tasks/torch_config.py +0 -76
  46. runnable-0.34.0a1.dist-info/METADATA +0 -267
  47. runnable-0.34.0a1.dist-info/RECORD +0 -67
  48. {runnable-0.34.0a1.dist-info → runnable-1.0.0.dist-info}/WHEEL +0 -0
  49. {runnable-0.34.0a1.dist-info → runnable-1.0.0.dist-info}/licenses/LICENSE +0 -0
runnable/graph.py CHANGED
@@ -329,7 +329,7 @@ def create_graph(dag_config: Dict[str, Any], internal_branch_name: str = "") ->
329
329
  Returns:
330
330
  Graph: The created graph object
331
331
  """
332
- description: str = dag_config.get("description", None)
332
+ description: str | None = dag_config.get("description", None)
333
333
  start_at: str = cast(
334
334
  str, dag_config.get("start_at")
335
335
  ) # Let the start_at be relative to the graph
runnable/names.py CHANGED
@@ -306,7 +306,7 @@ left = [
306
306
  "chalky",
307
307
  "moist",
308
308
  "formal",
309
- "brute force",
309
+ "force",
310
310
  "ancient",
311
311
  "wan",
312
312
  "square",
runnable/nodes.py CHANGED
@@ -7,7 +7,8 @@ from pydantic import BaseModel, ConfigDict, Field, field_validator
7
7
  import runnable.context as context
8
8
  from runnable import defaults, exceptions
9
9
  from runnable.datastore import StepLog
10
- from runnable.defaults import TypeMapVariable
10
+ from runnable.defaults import MapVariableType
11
+ from runnable.graph import Graph
11
12
 
12
13
  logger = logging.getLogger(defaults.LOGGER_NAME)
13
14
 
@@ -41,6 +42,7 @@ class BaseNode(ABC, BaseModel):
41
42
 
42
43
  @property
43
44
  def _context(self):
45
+ assert isinstance(context.run_context, context.PipelineContext)
44
46
  return context.run_context
45
47
 
46
48
  model_config = ConfigDict(extra="forbid", arbitrary_types_allowed=False)
@@ -80,7 +82,7 @@ class BaseNode(ABC, BaseModel):
80
82
 
81
83
  @classmethod
82
84
  def _resolve_map_placeholders(
83
- cls, name: str, map_variable: TypeMapVariable = None
85
+ cls, name: str, map_variable: MapVariableType = None
84
86
  ) -> str:
85
87
  """
86
88
  If there is no map step used, then we just return the name as we find it.
@@ -131,7 +133,7 @@ class BaseNode(ABC, BaseModel):
131
133
 
132
134
  return name
133
135
 
134
- def _get_step_log_name(self, map_variable: TypeMapVariable = None) -> str:
136
+ def _get_step_log_name(self, map_variable: MapVariableType = None) -> str:
135
137
  """
136
138
  For every step in the dag, there is a corresponding step log name.
137
139
  This method returns the step log name in dot path convention.
@@ -150,7 +152,7 @@ class BaseNode(ABC, BaseModel):
150
152
  self.internal_name, map_variable=map_variable
151
153
  )
152
154
 
153
- def _get_branch_log_name(self, map_variable: TypeMapVariable = None) -> str:
155
+ def _get_branch_log_name(self, map_variable: MapVariableType = None) -> str:
154
156
  """
155
157
  For nodes that are internally branches, this method returns the branch log name.
156
158
  The branch log name is in dot path convention.
@@ -218,7 +220,7 @@ class BaseNode(ABC, BaseModel):
218
220
  """
219
221
 
220
222
  @abstractmethod
221
- def _get_branch_by_name(self, branch_name: str):
223
+ def _get_branch_by_name(self, branch_name: str) -> Graph:
222
224
  """
223
225
  Retrieve a branch by name.
224
226
 
@@ -279,7 +281,7 @@ class BaseNode(ABC, BaseModel):
279
281
  def execute(
280
282
  self,
281
283
  mock=False,
282
- map_variable: TypeMapVariable = None,
284
+ map_variable: MapVariableType = None,
283
285
  attempt_number: int = 1,
284
286
  ) -> StepLog:
285
287
  """
@@ -299,7 +301,7 @@ class BaseNode(ABC, BaseModel):
299
301
  """
300
302
 
301
303
  @abstractmethod
302
- def execute_as_graph(self, map_variable: TypeMapVariable = None):
304
+ def execute_as_graph(self, map_variable: MapVariableType = None):
303
305
  """
304
306
  This function would be called to set up the execution of the individual
305
307
  branches of a composite node.
@@ -314,7 +316,7 @@ class BaseNode(ABC, BaseModel):
314
316
  """
315
317
 
316
318
  @abstractmethod
317
- def fan_out(self, map_variable: TypeMapVariable = None):
319
+ def fan_out(self, map_variable: MapVariableType = None):
318
320
  """
319
321
  This function would be called to set up the execution of the individual
320
322
  branches of a composite node.
@@ -330,7 +332,7 @@ class BaseNode(ABC, BaseModel):
330
332
  """
331
333
 
332
334
  @abstractmethod
333
- def fan_in(self, map_variable: TypeMapVariable = None):
335
+ def fan_in(self, map_variable: MapVariableType = None):
334
336
  """
335
337
  This function would be called to tear down the execution of the individual
336
338
  branches of a composite node.
@@ -409,11 +411,13 @@ class TraversalNode(BaseNode):
409
411
  return self.overrides.get(executor_type) or ""
410
412
 
411
413
 
414
+ # Unfortunately, this is defined in 2 places. Look in SDK
412
415
  class CatalogStructure(BaseModel):
413
416
  model_config = ConfigDict(extra="forbid") # Need to forbid
414
417
 
415
418
  get: List[str] = Field(default_factory=list)
416
419
  put: List[str] = Field(default_factory=list)
420
+ store_copy: bool = Field(default=True, alias="store_copy")
417
421
 
418
422
 
419
423
  class ExecutableNode(TraversalNode):
@@ -439,17 +443,17 @@ class ExecutableNode(TraversalNode):
439
443
  "This is an executable node and does not have branches"
440
444
  )
441
445
 
442
- def execute_as_graph(self, map_variable: TypeMapVariable = None):
446
+ def execute_as_graph(self, map_variable: MapVariableType = None):
443
447
  raise exceptions.NodeMethodCallError(
444
448
  "This is an executable node and does not have a graph"
445
449
  )
446
450
 
447
- def fan_in(self, map_variable: TypeMapVariable = None):
451
+ def fan_in(self, map_variable: MapVariableType = None):
448
452
  raise exceptions.NodeMethodCallError(
449
453
  "This is an executable node and does not have a fan in"
450
454
  )
451
455
 
452
- def fan_out(self, map_variable: TypeMapVariable = None):
456
+ def fan_out(self, map_variable: MapVariableType = None):
453
457
  raise exceptions.NodeMethodCallError(
454
458
  "This is an executable node and does not have a fan out"
455
459
  )
@@ -475,7 +479,7 @@ class CompositeNode(TraversalNode):
475
479
  def execute(
476
480
  self,
477
481
  mock=False,
478
- map_variable: TypeMapVariable = None,
482
+ map_variable: MapVariableType = None,
479
483
  attempt_number: int = 1,
480
484
  ) -> StepLog:
481
485
  raise exceptions.NodeMethodCallError(
@@ -505,15 +509,15 @@ class TerminalNode(BaseNode):
505
509
  def _get_max_attempts(self) -> int:
506
510
  return 1
507
511
 
508
- def execute_as_graph(self, map_variable: TypeMapVariable = None):
512
+ def execute_as_graph(self, map_variable: MapVariableType = None):
509
513
  raise exceptions.TerminalNodeError()
510
514
 
511
- def fan_in(self, map_variable: TypeMapVariable = None):
515
+ def fan_in(self, map_variable: MapVariableType = None):
512
516
  raise exceptions.TerminalNodeError()
513
517
 
514
518
  def fan_out(
515
519
  self,
516
- map_variable: TypeMapVariable = None,
520
+ map_variable: MapVariableType = None,
517
521
  ):
518
522
  raise exceptions.TerminalNodeError()
519
523
 
runnable/parameters.py CHANGED
@@ -1,16 +1,16 @@
1
+ import argparse
1
2
  import inspect
2
3
  import json
3
4
  import logging
4
5
  import os
5
- from typing import Any, Dict, Type
6
+ from typing import Any, Dict, Type, get_origin
6
7
 
7
- import pydantic
8
8
  from pydantic import BaseModel, ConfigDict
9
9
  from typing_extensions import Callable
10
10
 
11
11
  from runnable import defaults
12
12
  from runnable.datastore import JsonParameter, ObjectParameter
13
- from runnable.defaults import TypeMapVariable
13
+ from runnable.defaults import MapVariableType
14
14
  from runnable.utils import remove_prefix
15
15
 
16
16
  logger = logging.getLogger(defaults.LOGGER_NAME)
@@ -48,15 +48,40 @@ def get_user_set_parameters(remove: bool = False) -> Dict[str, JsonParameter]:
48
48
  return parameters
49
49
 
50
50
 
51
+ def return_json_parameters(params: Dict[str, Any]) -> Dict[str, Any]:
52
+ """
53
+ Returns the parameters as a JSON serializable dictionary.
54
+
55
+ Args:
56
+ params (dict): The parameters to serialize.
57
+
58
+ Returns:
59
+ dict: The JSON serializable dictionary.
60
+ """
61
+ return_params = {}
62
+ for key, value in params.items():
63
+ if isinstance(value, ObjectParameter):
64
+ continue
65
+
66
+ return_params[key] = value.get_value()
67
+ return return_params
68
+
69
+
51
70
  def filter_arguments_for_func(
52
71
  func: Callable[..., Any],
53
72
  params: Dict[str, Any],
54
- map_variable: TypeMapVariable = None,
73
+ map_variable: MapVariableType = None,
55
74
  ) -> Dict[str, Any]:
56
75
  """
57
76
  Inspects the function to be called as part of the pipeline to find the arguments of the function.
58
- Matches the function arguments to the parameters available either by command line or by up stream steps.
77
+ Matches the function arguments to the parameters available either by static parameters or by up stream steps.
59
78
 
79
+ The function "func" signature could be:
80
+ - def my_function(arg1: int, arg2: str, arg3: float):
81
+ - def my_function(arg1: int, arg2: str, arg3: float, **kwargs):
82
+ in this case, we would need to send in remaining keyword arguments as a dictionary.
83
+ - def my_function(arg1: int, arg2: str, arg3: float, args: argparse.Namespace):
84
+ In this case, we need to send the rest of the parameters as attributes of the args object.
60
85
 
61
86
  Args:
62
87
  func (Callable): The function to inspect
@@ -72,63 +97,95 @@ def filter_arguments_for_func(
72
97
  params[key] = JsonParameter(kind="json", value=v)
73
98
 
74
99
  bound_args = {}
75
- unassigned_params = set(params.keys())
76
- # Check if VAR_KEYWORD is used, it is we send back everything
77
- # If **kwargs is present in the function signature, we send back everything
78
- for name, value in function_args.items():
79
- if value.kind != inspect.Parameter.VAR_KEYWORD:
80
- continue
81
- # Found VAR_KEYWORD, we send back everything as found
82
- for key, value in params.items():
83
- bound_args[key] = params[key].get_value()
84
-
85
- return bound_args
100
+ var_keyword_param = None
101
+ namespace_param = None
86
102
 
87
- # Lets return what is asked for then!!
103
+ # First pass: Handle regular parameters and identify special parameters
88
104
  for name, value in function_args.items():
89
105
  # Ignore any *args
90
106
  if value.kind == inspect.Parameter.VAR_POSITIONAL:
91
107
  logger.warning(f"Ignoring parameter {name} as it is VAR_POSITIONAL")
92
108
  continue
93
109
 
94
- if name not in params:
95
- # No parameter of this name was provided
96
- if value.default == inspect.Parameter.empty:
97
- # No default value is given in the function signature. error as parameter is required.
98
- raise ValueError(
99
- f"Parameter {name} is required for {func.__name__} but not provided"
100
- )
101
- # default value is given in the function signature, nothing further to do.
110
+ # Check for **kwargs parameter, we need to send in all the unnamed values in this as a dict
111
+ if value.kind == inspect.Parameter.VAR_KEYWORD:
112
+ var_keyword_param = name
102
113
  continue
103
114
 
104
- param_value = params[name]
105
-
106
- if type(value.annotation) in [
107
- BaseModel,
108
- pydantic._internal._model_construction.ModelMetaclass,
109
- ] and not isinstance(param_value, ObjectParameter):
110
- # Even if the annotation is a pydantic model, it can be passed as an object parameter
111
- # We try to cast it as a pydantic model if asked
112
- named_param = params[name].get_value()
113
-
114
- if not isinstance(named_param, dict):
115
- # A case where the parameter is a one attribute model
116
- named_param = {name: named_param}
117
-
118
- bound_model = bind_args_for_pydantic_model(named_param, value.annotation)
119
- bound_args[name] = bound_model
115
+ # Check for argparse.Namespace parameter, we need to send in all the unnamed values in this as a namespace
116
+ if value.annotation == argparse.Namespace:
117
+ namespace_param = name
118
+ continue
120
119
 
121
- elif value.annotation in [str, int, float, bool]:
122
- # Cast it if its a primitive type. Ensure the type matches the annotation.
123
- bound_args[name] = value.annotation(params[name].get_value())
120
+ # Handle regular parameters
121
+ if name not in params:
122
+ if value.default != inspect.Parameter.empty:
123
+ # Default value is given in the function signature, we can use it
124
+ bound_args[name] = value.default
125
+ else:
126
+ # This is a required parameter that's missing - error immediately
127
+ raise ValueError(
128
+ f"Function {func.__name__} has required parameter '{name}' that is not present in the parameters"
129
+ )
124
130
  else:
125
- bound_args[name] = params[name].get_value()
126
-
127
- unassigned_params.remove(name)
128
-
129
- params = {
130
- key: params[key] for key in unassigned_params
131
- } # remove keys from params if they are assigned
131
+ # We have a parameter of this name, lets bind it
132
+ param_value = params[name]
133
+
134
+ if (issubclass(value.annotation, BaseModel)) and not isinstance(
135
+ param_value, ObjectParameter
136
+ ):
137
+ # Even if the annotation is a pydantic model, it can be passed as an object parameter
138
+ # We try to cast it as a pydantic model if asked
139
+ named_param = params[name].get_value()
140
+
141
+ if not isinstance(named_param, dict):
142
+ # A case where the parameter is a one attribute model
143
+ named_param = {name: named_param}
144
+
145
+ bound_model = bind_args_for_pydantic_model(
146
+ named_param, value.annotation
147
+ )
148
+ bound_args[name] = bound_model
149
+
150
+ elif value.annotation is not inspect.Parameter.empty and callable(
151
+ value.annotation
152
+ ):
153
+ # Cast it if its a primitive type. Ensure the type matches the annotation.
154
+ try:
155
+ # Handle typing generics like Dict[str, int], List[str] by using their origin
156
+ origin = get_origin(value.annotation)
157
+ if origin is not None:
158
+ # For generics like Dict[str, int], use dict() instead of Dict[str, int]()
159
+ bound_args[name] = origin(params[name].get_value())
160
+ else:
161
+ # Regular callable types like int, str, float, etc.
162
+ bound_args[name] = value.annotation(params[name].get_value())
163
+ except (ValueError, TypeError) as e:
164
+ annotation_name = getattr(
165
+ value.annotation, "__name__", str(value.annotation)
166
+ )
167
+ raise ValueError(
168
+ f"Cannot cast parameter '{name}' to {annotation_name}: {e}"
169
+ )
170
+ else:
171
+ # We do not know type of parameter, we send the value as found
172
+ bound_args[name] = params[name].get_value()
173
+
174
+ # Find extra parameters (parameters in params but not consumed by regular function parameters)
175
+ consumed_param_names = set(bound_args.keys())
176
+ extra_params = {k: v for k, v in params.items() if k not in consumed_param_names}
177
+
178
+ # Second pass: Handle **kwargs and argparse.Namespace parameters
179
+ if var_keyword_param is not None:
180
+ # Function accepts **kwargs - add all extra parameters directly to bound_args
181
+ for param_name, param_value in extra_params.items():
182
+ bound_args[param_name] = param_value.get_value()
183
+ elif namespace_param is not None:
184
+ # Function accepts argparse.Namespace - create namespace with extra parameters
185
+ args_namespace = argparse.Namespace()
186
+ for param_name, param_value in extra_params.items():
187
+ setattr(args_namespace, param_name, param_value.get_value())
188
+ bound_args[namespace_param] = args_namespace
132
189
 
133
190
  return bound_args
134
191