runnable 0.19.1__py3-none-any.whl → 0.20.0__py3-none-any.whl

Sign up to get free protection for your applications and to get access to all the features.
@@ -155,39 +155,6 @@ class LocalContainerExecutor(GenericPipelineExecutor):
155
155
  )
156
156
  self.trigger_node_execution(node=node, map_variable=map_variable, **kwargs)
157
157
 
158
- # def execute_job(self, node: TaskNode):
159
- # """
160
- # Set up the step log and call the execute node
161
-
162
- # Args:
163
- # node (BaseNode): _description_
164
- # """
165
-
166
- # step_log = self._context.run_log_store.create_step_log(
167
- # node.name, node._get_step_log_name(map_variable=None)
168
- # )
169
-
170
- # self.add_code_identities(node=node, step_log=step_log)
171
-
172
- # step_log.step_type = node.node_type
173
- # step_log.status = defaults.PROCESSING
174
- # self._context.run_log_store.add_step_log(step_log, self._context.run_id)
175
-
176
- # command = utils.get_job_execution_command(node)
177
- # self._spin_container(node=node, command=command)
178
-
179
- # # Check the step log status and warn if necessary. Docker errors are generally suppressed.
180
- # step_log = self._context.run_log_store.get_step_log(
181
- # node._get_step_log_name(map_variable=None), self._context.run_id
182
- # )
183
- # if step_log.status != defaults.SUCCESS:
184
- # msg = (
185
- # "Node execution inside the container failed. Please check the logs.\n"
186
- # "Note: If you do not see any docker issue from your side and the code works properly on local execution"
187
- # "please raise a bug report."
188
- # )
189
- # logger.warning(msg)
190
-
191
158
  def trigger_node_execution(
192
159
  self, node: BaseNode, map_variable: TypeMapVariable = None, **kwargs
193
160
  ):
runnable/cli.py CHANGED
@@ -164,7 +164,7 @@ def fan(
164
164
  python_or_yaml_file: Annotated[
165
165
  str, typer.Argument(help="The pipeline definition file")
166
166
  ],
167
- mode: Annotated[FanMode, typer.Option(help="fan in or fan out")],
167
+ in_or_out: Annotated[str, typer.Argument(help="The fan mode")],
168
168
  map_variable: Annotated[
169
169
  str,
170
170
  typer.Option(
@@ -172,7 +172,7 @@ def fan(
172
172
  help="The map variable dictionary in str",
173
173
  show_default=True,
174
174
  ),
175
- ],
175
+ ] = "",
176
176
  config_file: Annotated[
177
177
  str,
178
178
  typer.Option(
@@ -197,6 +197,14 @@ def fan(
197
197
  ),
198
198
  ] = LogLevel.INFO,
199
199
  tag: Annotated[str, typer.Option(help="A tag attached to the run")] = "",
200
+ mode: Annotated[
201
+ ExecutionMode,
202
+ typer.Option(
203
+ "--mode",
204
+ "-m",
205
+ help="spec in yaml or python sdk",
206
+ ),
207
+ ] = ExecutionMode.YAML,
200
208
  ):
201
209
  logger.setLevel(log_level.value)
202
210
 
@@ -206,6 +214,7 @@ def fan(
206
214
  pipeline_file=python_or_yaml_file,
207
215
  step_name=step_name,
208
216
  mode=mode,
217
+ in_or_out=in_or_out,
209
218
  map_variable=map_variable,
210
219
  run_id=run_id,
211
220
  tag=tag,
runnable/entrypoints.py CHANGED
@@ -332,6 +332,7 @@ def execute_single_node(
332
332
  run_id=run_id, configuration_file=configuration_file, tag=tag
333
333
  )
334
334
 
335
+ # TODO: Is it useful to make it get from an environment variable
335
336
  map_variable_dict = utils.json_to_ordered_dict(map_variable)
336
337
 
337
338
  step_internal_name = nodes.BaseNode._get_internal_name_from_command_name(step_name)
@@ -354,6 +355,8 @@ def execute_single_node(
354
355
  run_context.catalog_handler.put(name=log_file_name, run_id=run_context.run_id)
355
356
  os.remove(log_file_name)
356
357
 
358
+ executor.send_return_code()
359
+
357
360
 
358
361
  def execute_job_yaml_spec(
359
362
  job_definition_file: str,
@@ -492,6 +495,7 @@ def fan(
492
495
  pipeline_file: str,
493
496
  step_name: str,
494
497
  mode: str,
498
+ in_or_out: str,
495
499
  map_variable: str,
496
500
  run_id: str,
497
501
  tag: str = "",
@@ -551,10 +555,10 @@ def fan(
551
555
 
552
556
  map_variable_dict = utils.json_to_ordered_dict(map_variable)
553
557
 
554
- if mode == "in":
558
+ if in_or_out == "in":
555
559
  logger.info("Fanning in for : %s", node_to_execute)
556
560
  executor.fan_in(node=node_to_execute, map_variable=map_variable_dict)
557
- elif mode == "out":
561
+ elif in_or_out == "out":
558
562
  logger.info("Fanning out for : %s", node_to_execute)
559
563
  executor.fan_out(node=node_to_execute, map_variable=map_variable_dict)
560
564
  else:
runnable/sdk.py CHANGED
@@ -419,6 +419,11 @@ class ShellTask(BaseTask):
419
419
  def command_type(self) -> str:
420
420
  return "shell"
421
421
 
422
+ def create_job(self) -> RunnableTask:
423
+ self.terminate_with_success = True
424
+ node = self.create_node()
425
+ return node.executable
426
+
422
427
 
423
428
  class Stub(BaseTraversal):
424
429
  """
runnable/tasks.py CHANGED
@@ -11,7 +11,7 @@ from datetime import datetime
11
11
  from pathlib import Path
12
12
  from pickle import PicklingError
13
13
  from string import Template
14
- from typing import Any, Dict, List, Literal, Tuple
14
+ from typing import Any, Dict, List, Literal
15
15
 
16
16
  from pydantic import BaseModel, ConfigDict, Field, field_validator
17
17
  from stevedore import driver
@@ -59,20 +59,6 @@ class BaseTaskType(BaseModel):
59
59
  def _context(self):
60
60
  return context.run_context
61
61
 
62
- def get_cli_options(self) -> Tuple[str, dict]:
63
- """
64
- Key is the name of the cli option and value is the value of the cli option.
65
- This should always be in sync with the cli options defined in execute_*.
66
-
67
- Returns:
68
- str: The name of the cli option.
69
- dict: The dict of cli options for the task.
70
-
71
- Raises:
72
- NotImplementedError: Base class, not implemented
73
- """
74
- raise NotImplementedError()
75
-
76
62
  def set_secrets_as_env_variables(self):
77
63
  # Preparing the environment for the task execution
78
64
  for key in self.secrets:
@@ -262,14 +248,6 @@ class PythonTaskType(BaseTaskType): # pylint: disable=too-few-public-methods
262
248
  task_type: str = Field(default="python", serialization_alias="command_type")
263
249
  command: str
264
250
 
265
- def get_cli_options(self) -> Tuple[str, dict]:
266
- """Return the cli options for the task.
267
-
268
- Returns:
269
- dict: The cli options for the task
270
- """
271
- return "function", {"command": self.command}
272
-
273
251
  def execute_command(
274
252
  self,
275
253
  map_variable: TypeMapVariable = None,
@@ -425,26 +403,22 @@ class NotebookTaskType(BaseTaskType):
425
403
 
426
404
  return command
427
405
 
428
- @property
429
- def notebook_output_path(self) -> str:
430
- # This is to accommodate jobs which does not have a context_node
406
+ def get_notebook_output_path(self, map_variable: TypeMapVariable = None) -> str:
407
+ tag = ""
408
+ map_variable = map_variable or {}
409
+ for key, value in map_variable.items():
410
+ tag += f"{key}_{value}_"
411
+
431
412
  if self._context.executor._context_node:
432
- node_name = self._context.executor._context_node.internal_name
433
- sane_name = "".join(x for x in node_name if x.isalnum())
434
- else:
435
- sane_name = ""
413
+ tag += self._context.executor._context_node.name
414
+
415
+ tag = "".join(x for x in tag if x.isalnum())
436
416
 
437
417
  output_path = Path(".", self.command)
438
- file_name = output_path.parent / (output_path.stem + f"{sane_name}_out.ipynb")
418
+ file_name = output_path.parent / (output_path.stem + f"-{tag}_out.ipynb")
439
419
 
440
420
  return str(file_name)
441
421
 
442
- def get_cli_options(self) -> Tuple[str, dict]:
443
- return "notebook", {
444
- "command": self.command,
445
- "notebook-output-path": self.notebook_output_path,
446
- }
447
-
448
422
  def execute_command(
449
423
  self,
450
424
  map_variable: TypeMapVariable = None,
@@ -464,7 +438,9 @@ class NotebookTaskType(BaseTaskType):
464
438
  import ploomber_engine as pm
465
439
  from ploomber_engine.ipython import PloomberClient
466
440
 
467
- notebook_output_path = self.notebook_output_path
441
+ notebook_output_path = self.get_notebook_output_path(
442
+ map_variable=map_variable
443
+ )
468
444
 
469
445
  with (
470
446
  self.execution_context(
@@ -476,7 +452,6 @@ class NotebookTaskType(BaseTaskType):
476
452
 
477
453
  if map_variable:
478
454
  for key, value in map_variable.items():
479
- notebook_output_path += "_" + str(value)
480
455
  copy_params[key] = JsonParameter(kind="json", value=value)
481
456
 
482
457
  # Remove any {v}_unreduced parameters from the parameters
runnable/utils.py CHANGED
@@ -455,9 +455,9 @@ def get_fan_command(
455
455
  log_level = log_level or logging.getLevelName(logger.getEffectiveLevel())
456
456
  action = (
457
457
  f"runnable fan {run_id} "
458
- f"{node._command_friendly_name()} "
459
- f"{context.run_context.pipeline_file} "
460
- f"--mode {mode} "
458
+ f"{node._command_friendly_name()} " # step name
459
+ f"{context.run_context.pipeline_file} " # yaml or python
460
+ f"{mode} " # in or out
461
461
  f"--log-level {log_level} "
462
462
  )
463
463
  if context.run_context.configuration_file:
@@ -469,8 +469,8 @@ def get_fan_command(
469
469
  if map_variable:
470
470
  action = action + f" --map-variable '{json.dumps(map_variable)}'"
471
471
 
472
- if context.run_context.tag:
473
- action = action + f" --tag {context.run_context.tag}"
472
+ if context.run_context.from_sdk: # execution mode
473
+ action = action + " --mode python "
474
474
 
475
475
  return action
476
476
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: runnable
3
- Version: 0.19.1
3
+ Version: 0.20.0
4
4
  Summary: Add your description here
5
5
  Author-email: "Vammi, Vijay" <vijay.vammi@astrazeneca.com>
6
6
  License-File: LICENSE
@@ -15,10 +15,9 @@ extensions/nodes/nodes.py,sha256=ib68QE737ihGLIVp3V2wea13u7lmMZdRvK80bgUkRtA,346
15
15
  extensions/nodes/pyproject.toml,sha256=YTu-ETN3JNFSkMzzWeOwn4m-O2nbRH-PmiPBALDCUw4,278
16
16
  extensions/pipeline_executor/README.md,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
17
17
  extensions/pipeline_executor/__init__.py,sha256=YnKILiy-SxfnG3rYUoinjh1lfkuAF5QXpPePtn6VxBY,25174
18
- extensions/pipeline_executor/argo.py,sha256=ClfuU_Of_2f5mvqVgY1QQwwJwXHB0LbzwNArG1x2Axc,44666
19
- extensions/pipeline_executor/argo_specification.yaml,sha256=wXQcm2gOQYqy-IOQIhucohS32ZrHKCfGA5zZ0RraPYc,1276
18
+ extensions/pipeline_executor/argo.py,sha256=zA-9zeo93Rvyn-7--bLNwRybREUrtmFRC0QL8oXwNio,32154
20
19
  extensions/pipeline_executor/local.py,sha256=H8s6AdML_9_f-vdGG_6k0y9FbLqAqvA1S_7xMNyARzY,1946
21
- extensions/pipeline_executor/local_container.py,sha256=UCap8wCbHrtTN5acECBBkvcXkA3SXtrAOGW88JT7ofw,13853
20
+ extensions/pipeline_executor/local_container.py,sha256=HOT9I-cPDCvgy6_bzNEtl4jPhTyeYSn1GK7lplH3vDA,12515
22
21
  extensions/pipeline_executor/mocked.py,sha256=SuObJ6Myt7p8duW8sylIp1cYIAnFutsJW1avWaOUY3c,5798
23
22
  extensions/pipeline_executor/pyproject.toml,sha256=ykTX7srR10PBYb8LsIwEj8vIPPIEZQ5V_R7VYbZ-ido,291
24
23
  extensions/pipeline_executor/retry.py,sha256=KGenhWrLLmOQgzMvqloXHDRJyoNs91t05rRW8aLW6FA,6969
@@ -35,11 +34,11 @@ extensions/secrets/dotenv.py,sha256=FbYYd_pVuJuVuIDIvXbzKuSSQ9GPq7xJXTDbJMTQbhM,
35
34
  extensions/secrets/pyproject.toml,sha256=mLJNImNcBlbLKHh-0ugVWT9V83R4RibyyYDtBCSqVF4,282
36
35
  runnable/__init__.py,sha256=fYkOrbsb-E1rGkrof7kOJ3KboTFH-HriGa-8npn4-50,625
37
36
  runnable/catalog.py,sha256=b9N40kTv1IBidzlWjkHcBGyYhq6qIDHZfBuFenzjsMI,4924
38
- runnable/cli.py,sha256=01zmzOdynEmLI4vWDtSHQ6y1od_Jlc8G1RF69fi2L8g,8446
37
+ runnable/cli.py,sha256=xHb2VSC16z9IT45SnhUYHuXUrzPqF_pK-jyWiTnz5sM,8670
39
38
  runnable/context.py,sha256=by5uepmuCP0dmM9BmsliXihSes5QEFejwAsmekcqylE,1388
40
39
  runnable/datastore.py,sha256=9y5enzn6AXLHLdwvgkdjGPrBkVlrcjfbaAHsst-lJzg,32466
41
40
  runnable/defaults.py,sha256=3o9IVGryyCE6PoQTOoaIaHHTbJGEzmdXMcwzOhwAYoI,3518
42
- runnable/entrypoints.py,sha256=P958nFz5WAsgTwd9sW04Q30vtjweYpr3rPsHVY4gh2U,18876
41
+ runnable/entrypoints.py,sha256=DxboaCJ4ADZjIHl_oSPsC74ChUCKYic3i8dQqd6U43w,19009
43
42
  runnable/exceptions.py,sha256=LFbp0-Qxg2PAMLEVt7w2whhBxSG-5pzUEv5qN-Rc4_c,3003
44
43
  runnable/executor.py,sha256=ZPpfKwjDJnta03M2cWIINXcwke2ZDVc_QrIw7kwpHDQ,15547
45
44
  runnable/graph.py,sha256=jVjikRLR-so3b2ufmNKpEQ_Ny68qN4bcGDAdXBRKiCY,16574
@@ -47,12 +46,12 @@ runnable/names.py,sha256=vn92Kv9ANROYSZX6Z4z1v_WA3WiEdIYmG6KEStBFZug,8134
47
46
  runnable/nodes.py,sha256=YU9u7r1ESzui1uVtJ1dgwdv1ozyJnF2k-MCFieT8CLI,17519
48
47
  runnable/parameters.py,sha256=LyQb1d0SaFeI4PJ_yDYt9wArm9ThSPASWb36TwIdDUs,5213
49
48
  runnable/pickler.py,sha256=ydJ_eti_U1F4l-YacFp7BWm6g5vTn04UXye25S1HVok,2684
50
- runnable/sdk.py,sha256=xN5F4XX8r5wCN131kgN2xG7MkNm0bSGJ3Ukw8prHYJ8,31444
49
+ runnable/sdk.py,sha256=thffRpZgkz1XW4DUY6EIJPyF8NLaTE1puh43dZKW9MU,31595
51
50
  runnable/secrets.py,sha256=PXcEJw-4WPzeWRLfsatcPPyr1zkqgHzdRWRcS9vvpvM,2354
52
- runnable/tasks.py,sha256=JnIIYQf3YUidHXIN6hiUIfDnegc7_rJMNXuHW4WS9ig,29378
53
- runnable/utils.py,sha256=Kwf54tHMVXYK7MCmvAi_FG08U_bHDKIQO-HDpM9X0QI,19500
54
- runnable-0.19.1.dist-info/METADATA,sha256=INBkKmT9vbToqxJs3UCH4G2Db1G8Gk7mR-Jsk-r99EE,9945
55
- runnable-0.19.1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
56
- runnable-0.19.1.dist-info/entry_points.txt,sha256=seek5WVGvwYALm8lZ0TfPXwG5NaCeUKjU8urF8k3gvY,1621
57
- runnable-0.19.1.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
58
- runnable-0.19.1.dist-info/RECORD,,
51
+ runnable/tasks.py,sha256=WdXFUL9mT3zmUc5YoOrTmwfjss02MwNKanf11pn8py4,28497
52
+ runnable/utils.py,sha256=h-E6ZmUDcKwrJLjKlu0XXAK4uzfnD0K0EEPpZmeSIfM,19542
53
+ runnable-0.20.0.dist-info/METADATA,sha256=njNTjovB_HmjC52tNvGJkj5Ae0aZV4nLeNdjwynXnKE,9945
54
+ runnable-0.20.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
55
+ runnable-0.20.0.dist-info/entry_points.txt,sha256=seek5WVGvwYALm8lZ0TfPXwG5NaCeUKjU8urF8k3gvY,1621
56
+ runnable-0.20.0.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
57
+ runnable-0.20.0.dist-info/RECORD,,
@@ -1,51 +0,0 @@
1
- apiVersion: argoproj.io/v1alpha1
2
- kind: Workflow
3
- metadata:
4
- generateName: runnable-dag
5
- spec:
6
- activeDeadlineSeconds: int # max run time of the workflow
7
- entrypoint: str
8
- nodeSelector: Dict[str, str] # global node selector
9
- parallelism: # global level
10
- podGC: OnPodCompletion
11
- resources: # Should be converted to podSpecPath
12
- limits:
13
- requests:
14
- podSpecPatch: json str representation of resources for defaults
15
- retryStrategy: # global level for all templates
16
- limit: int
17
- retryPolicy: # global level for all templates
18
- backoff:
19
- duration: str
20
- factor: int
21
- maxDuration: str
22
- serviceAccountName: str # Optionally required
23
- templateDefaults:
24
- activeDeadlineSeconds: int, for a template
25
- timeout: str # max time including the wait time
26
- failFast: true
27
- volumes:
28
- templates:
29
- activeDeadlineSeconds: # override
30
- nodeSelector: # override
31
- retryStrategy: # override
32
- tolerations: # override
33
- container:
34
- command:
35
- env:
36
- image:
37
- imagePullPolicy:
38
- volumeMounts:
39
- resources:
40
- limits:
41
- requests:
42
- dag:
43
- tasks:
44
- depends:
45
- continueOn:
46
- tolerations: # global level for all templates
47
- effect: str
48
- key: str
49
- operator: str
50
- value: str
51
- volumes: