runnable 0.22.0__py3-none-any.whl → 0.24.0__py3-none-any.whl

Sign up to get free protection for your applications and to get access to all the features.
@@ -2,7 +2,7 @@ import logging
2
2
  import os
3
3
  from typing import Dict, List, Optional
4
4
 
5
- from runnable import context, defaults, exceptions, parameters, utils
5
+ from runnable import context, defaults, exceptions, parameters, task_console, utils
6
6
  from runnable.datastore import DataCatalog, JobLog, JsonParameter
7
7
  from runnable.executor import BaseJobExecutor
8
8
 
@@ -158,3 +158,17 @@ class GenericJobExecutor(BaseJobExecutor):
158
158
  data_catalogs.extend(data_catalog)
159
159
 
160
160
  return data_catalogs
161
+
162
+ def add_task_log_to_catalog(
163
+ self, name: str, map_variable: Dict[str, str | int | float] | None = None
164
+ ):
165
+ log_file_name = utils.make_log_file_name(
166
+ name=name,
167
+ map_variable=map_variable,
168
+ )
169
+ task_console.save_text(log_file_name)
170
+ # Put the log file in the catalog
171
+ self._context.catalog_handler.put(
172
+ name=log_file_name, run_id=self._context.run_id
173
+ )
174
+ os.remove(log_file_name)
@@ -210,6 +210,20 @@ class GenericPipelineExecutor(BasePipelineExecutor):
210
210
  """
211
211
  return int(os.environ.get(defaults.ATTEMPT_NUMBER, 1))
212
212
 
213
+ def add_task_log_to_catalog(
214
+ self, name: str, map_variable: Dict[str, str | int | float] | None = None
215
+ ):
216
+ log_file_name = utils.make_log_file_name(
217
+ name=name,
218
+ map_variable=map_variable,
219
+ )
220
+ task_console.save_text(log_file_name)
221
+ # Put the log file in the catalog
222
+ self._context.catalog_handler.put(
223
+ name=log_file_name, run_id=self._context.run_id
224
+ )
225
+ os.remove(log_file_name)
226
+
213
227
  def _execute_node(
214
228
  self,
215
229
  node: BaseNode,
@@ -336,14 +350,6 @@ class GenericPipelineExecutor(BasePipelineExecutor):
336
350
  )
337
351
  self.trigger_node_execution(node=node, map_variable=map_variable, **kwargs)
338
352
 
339
- log_file_name = utils.make_log_file_name(node=node, map_variable=map_variable)
340
- task_console.save_text(log_file_name, clear=True)
341
-
342
- self._context.catalog_handler.put(
343
- name=log_file_name, run_id=self._context.run_id
344
- )
345
- os.remove(log_file_name)
346
-
347
353
  def trigger_node_execution(
348
354
  self, node: BaseNode, map_variable: TypeMapVariable = None, **kwargs
349
355
  ):
@@ -482,6 +488,11 @@ class GenericPipelineExecutor(BasePipelineExecutor):
482
488
  console.print(e, style=defaults.error_style)
483
489
  logger.exception(e)
484
490
  raise
491
+ finally:
492
+ # Add task log to the catalog
493
+ self.add_task_log_to_catalog(
494
+ name=working_on.internal_name, map_variable=map_variable
495
+ )
485
496
 
486
497
  console.rule(style="[dark orange]")
487
498
 
@@ -6,7 +6,7 @@ import string
6
6
  from collections import namedtuple
7
7
  from enum import Enum
8
8
  from functools import cached_property
9
- from typing import Annotated, Literal, Optional
9
+ from typing import Annotated, Any, Literal, Optional, cast
10
10
 
11
11
  from pydantic import (
12
12
  BaseModel,
@@ -20,7 +20,13 @@ from pydantic import (
20
20
  from pydantic.alias_generators import to_camel
21
21
  from ruamel.yaml import YAML
22
22
 
23
- from extensions.nodes.nodes import MapNode, ParallelNode, TaskNode
23
+ from extensions.nodes.nodes import (
24
+ MapNode,
25
+ ParallelNode,
26
+ StubNode,
27
+ SuccessNode,
28
+ TaskNode,
29
+ )
24
30
  from extensions.pipeline_executor import GenericPipelineExecutor
25
31
  from runnable import defaults, utils
26
32
  from runnable.defaults import TypeMapVariable
@@ -229,9 +235,8 @@ class Resources(BaseModel):
229
235
  requests: Request = Field(default=Request(), serialization_alias="requests")
230
236
 
231
237
 
232
- # This is what the user can override per template
233
- # Some are specific to container and some are specific to dag
234
- class TemplateDefaults(BaseModelWIthConfig):
238
+ # Lets construct this from UserDefaults
239
+ class ArgoTemplateDefaults(BaseModelWIthConfig):
235
240
  active_deadline_seconds: Optional[int] = Field(default=86400) # 1 day
236
241
  fail_fast: bool = Field(default=True)
237
242
  node_selector: dict[str, str] = Field(default_factory=dict)
@@ -240,13 +245,34 @@ class TemplateDefaults(BaseModelWIthConfig):
240
245
  timeout: Optional[str] = Field(default=None)
241
246
  tolerations: Optional[list[Toleration]] = Field(default=None)
242
247
 
243
- # These are in addition to what argo spec provides
244
- image: str
245
- image_pull_policy: Optional[ImagePullPolicy] = Field(default=ImagePullPolicy.Always)
248
+ model_config = ConfigDict(
249
+ extra="ignore",
250
+ )
251
+
252
+
253
+ class CommonDefaults(BaseModelWIthConfig):
254
+ active_deadline_seconds: Optional[int] = Field(default=86400) # 1 day
255
+ fail_fast: bool = Field(default=True)
256
+ node_selector: dict[str, str] = Field(default_factory=dict)
257
+ parallelism: Optional[int] = Field(default=None)
258
+ retry_strategy: Optional[RetryStrategy] = Field(default=None)
259
+ timeout: Optional[str] = Field(default=None)
260
+ tolerations: Optional[list[Toleration]] = Field(default=None)
261
+ image_pull_policy: ImagePullPolicy = Field(default=ImagePullPolicy.Always)
246
262
  resources: Resources = Field(default_factory=Resources)
247
263
  env: list[EnvVar | SecretEnvVar] = Field(default_factory=list, exclude=True)
248
264
 
249
265
 
266
+ # The user provided defaults at the top level
267
+ class UserDefaults(CommonDefaults):
268
+ image: str
269
+
270
+
271
+ # Overrides need not have image
272
+ class Overrides(CommonDefaults):
273
+ image: Optional[str] = Field(default=None)
274
+
275
+
250
276
  # User provides this as part of the argoSpec
251
277
  # some an be provided here or as a template default or node override
252
278
  class ArgoWorkflowSpec(BaseModelWIthConfig):
@@ -258,8 +284,8 @@ class ArgoWorkflowSpec(BaseModelWIthConfig):
258
284
  pod_gc: Optional[PodGC] = Field(default=None, serialization_alias="podGC")
259
285
  retry_strategy: Optional[RetryStrategy] = Field(default=None)
260
286
  service_account_name: Optional[str] = Field(default=None)
261
- template_defaults: TemplateDefaults
262
287
  tolerations: Optional[list[Toleration]] = Field(default=None)
288
+ template_defaults: Optional[ArgoTemplateDefaults] = Field(default=None)
263
289
 
264
290
 
265
291
  class ArgoMetadata(BaseModelWIthConfig):
@@ -321,7 +347,6 @@ class ContainerTemplate((BaseModelWIthConfig)):
321
347
  inputs: Optional[Inputs] = Field(default=None)
322
348
  outputs: Optional[Outputs] = Field(default=None)
323
349
 
324
- # The remaining can be from template defaults or node overrides
325
350
  active_deadline_seconds: Optional[int] = Field(default=86400) # 1 day
326
351
  metadata: Optional[PodMetaData] = Field(default=None)
327
352
  node_selector: dict[str, str] = Field(default_factory=dict)
@@ -356,17 +381,10 @@ class ArgoExecutor(GenericPipelineExecutor):
356
381
  from_attributes=True,
357
382
  use_enum_values=True,
358
383
  )
359
-
360
- argo_workflow: ArgoWorkflow
361
-
362
- # Lets use a generic one
363
384
  pvc_for_runnable: Optional[str] = Field(default=None)
364
- # pvc_for_catalog: Optional[str] = Field(default=None)
365
- # pvc_for_run_log: Optional[str] = Field(default=None)
366
385
  custom_volumes: Optional[list[CustomVolume]] = Field(
367
386
  default_factory=list[CustomVolume]
368
387
  )
369
- env: list[EnvVar] = Field(default_factory=list[EnvVar])
370
388
 
371
389
  expose_parameters_as_inputs: bool = True
372
390
  secret_from_k8s: Optional[str] = Field(default=None)
@@ -375,6 +393,11 @@ class ArgoExecutor(GenericPipelineExecutor):
375
393
  default="INFO"
376
394
  )
377
395
 
396
+ defaults: UserDefaults
397
+ argo_workflow: ArgoWorkflow
398
+
399
+ overrides: dict[str, Overrides] = Field(default_factory=dict)
400
+
378
401
  # This should be used when we refer to run_id or log_level in the containers
379
402
  _run_id_as_parameter: str = PrivateAttr(default="{{workflow.parameters.run_id}}")
380
403
  _log_level_as_parameter: str = PrivateAttr(
@@ -388,6 +411,11 @@ class ArgoExecutor(GenericPipelineExecutor):
388
411
  _container_catalog_location: str = PrivateAttr(default="/tmp/catalog/")
389
412
  _added_initial_container: bool = PrivateAttr(default=False)
390
413
 
414
+ def model_post_init(self, __context: Any) -> None:
415
+ self.argo_workflow.spec.template_defaults = ArgoTemplateDefaults(
416
+ **self.defaults.model_dump()
417
+ )
418
+
391
419
  def sanitize_name(self, name: str) -> str:
392
420
  formatted_name = name.replace(" ", "-").replace(".", "-").replace("_", "-")
393
421
  tag = "".join(random.choices(string.ascii_lowercase + string.digits, k=6))
@@ -425,8 +453,6 @@ class ArgoExecutor(GenericPipelineExecutor):
425
453
  parameters: Optional[list[Parameter]],
426
454
  task_name: str,
427
455
  ):
428
- template_defaults = self.argo_workflow.spec.template_defaults.model_dump()
429
-
430
456
  map_variable: TypeMapVariable = {}
431
457
  for parameter in parameters or []:
432
458
  map_variable[parameter.name] = ( # type: ignore
@@ -442,8 +468,8 @@ class ArgoExecutor(GenericPipelineExecutor):
442
468
 
443
469
  core_container_template = CoreContainerTemplate(
444
470
  command=shlex.split(fan_command),
445
- image=template_defaults["image"],
446
- image_pull_policy=template_defaults["image_pull_policy"],
471
+ image=self.defaults.image,
472
+ image_pull_policy=self.defaults.image_pull_policy,
447
473
  volume_mounts=[
448
474
  volume_pair.volume_mount for volume_pair in self.volume_pairs
449
475
  ],
@@ -459,12 +485,17 @@ class ArgoExecutor(GenericPipelineExecutor):
459
485
  outputs = Outputs(parameters=[OutputParameter(name="iterate-on")])
460
486
 
461
487
  container_template = ContainerTemplate(
462
- container=core_container_template,
463
488
  name=task_name,
464
- volumes=[volume_pair.volume for volume_pair in self.volume_pairs],
489
+ container=core_container_template,
465
490
  inputs=Inputs(parameters=parameters),
466
491
  outputs=outputs,
467
- **template_defaults,
492
+ active_deadline_seconds=self.defaults.active_deadline_seconds,
493
+ node_selector=self.defaults.node_selector,
494
+ parallelism=self.defaults.parallelism,
495
+ retry_strategy=self.defaults.retry_strategy,
496
+ timeout=self.defaults.timeout,
497
+ tolerations=self.defaults.tolerations,
498
+ volumes=[volume_pair.volume for volume_pair in self.volume_pairs],
468
499
  )
469
500
 
470
501
  self._templates.append(container_template)
@@ -475,14 +506,23 @@ class ArgoExecutor(GenericPipelineExecutor):
475
506
  task_name: str,
476
507
  inputs: Optional[Inputs] = None,
477
508
  ) -> ContainerTemplate:
478
- template_defaults = self.argo_workflow.spec.template_defaults.model_dump()
509
+ assert (
510
+ isinstance(node, TaskNode)
511
+ or isinstance(node, StubNode)
512
+ or isinstance(node, SuccessNode)
513
+ )
479
514
 
480
- node_overide = {}
481
- if hasattr(node, "overides"):
482
- node_overide = node.overides
515
+ node_override = None
516
+ if hasattr(node, "overrides"):
517
+ override_key = node.overrides.get(self.service_name, "")
518
+ try:
519
+ node_override = self.overrides.get(override_key)
520
+ except: # noqa
521
+ raise Exception("Override not found for: ", override_key)
483
522
 
484
- # update template defaults with node overrides
485
- template_defaults.update(node_overide)
523
+ effective_settings = self.defaults.model_dump()
524
+ if node_override:
525
+ effective_settings.update(node_override.model_dump())
486
526
 
487
527
  inputs = inputs or Inputs(parameters=[])
488
528
 
@@ -502,8 +542,9 @@ class ArgoExecutor(GenericPipelineExecutor):
502
542
 
503
543
  core_container_template = CoreContainerTemplate(
504
544
  command=shlex.split(command),
505
- image=template_defaults["image"],
506
- image_pull_policy=template_defaults["image_pull_policy"],
545
+ image=effective_settings["image"],
546
+ image_pull_policy=effective_settings["image_pull_policy"],
547
+ resources=effective_settings["resources"],
507
548
  volume_mounts=[
508
549
  volume_pair.volume_mount for volume_pair in self.volume_pairs
509
550
  ],
@@ -516,15 +557,15 @@ class ArgoExecutor(GenericPipelineExecutor):
516
557
  self._set_env_vars_to_task(node, core_container_template)
517
558
 
518
559
  container_template = ContainerTemplate(
519
- container=core_container_template,
520
560
  name=task_name,
561
+ container=core_container_template,
521
562
  inputs=Inputs(
522
563
  parameters=[
523
564
  Parameter(name=param.name) for param in inputs.parameters or []
524
565
  ]
525
566
  ),
526
567
  volumes=[volume_pair.volume for volume_pair in self.volume_pairs],
527
- **template_defaults,
568
+ **node_override.model_dump() if node_override else {},
528
569
  )
529
570
 
530
571
  return container_template
@@ -534,16 +575,22 @@ class ArgoExecutor(GenericPipelineExecutor):
534
575
  ):
535
576
  if not isinstance(working_on, TaskNode):
536
577
  return
578
+
537
579
  global_envs: dict[str, str] = {}
538
580
 
539
- for env_var in self.env:
581
+ for env_var in self.defaults.env:
582
+ env_var = cast(EnvVar, env_var)
540
583
  global_envs[env_var.name] = env_var.value
541
584
 
542
- node_overide = {}
543
- if hasattr(working_on, "overides"):
544
- node_overide = working_on.overides
585
+ override_key = working_on.overrides.get(self.service_name, "")
586
+ node_override = self.overrides.get(override_key, None)
587
+
588
+ # Update the global envs with the node overrides
589
+ if node_override:
590
+ for env_var in node_override.env:
591
+ env_var = cast(EnvVar, env_var)
592
+ global_envs[env_var.name] = env_var.value
545
593
 
546
- global_envs.update(node_overide.get("env", {}))
547
594
  for key, value in global_envs.items():
548
595
  env_var_to_add = EnvVar(name=key, value=value)
549
596
  container_template.env.append(env_var_to_add)
@@ -796,11 +843,6 @@ class ArgoExecutor(GenericPipelineExecutor):
796
843
 
797
844
  argo_workflow_dump = self.argo_workflow.model_dump(
798
845
  by_alias=True,
799
- exclude={
800
- "spec": {
801
- "template_defaults": {"image_pull_policy", "image", "resources"}
802
- }
803
- },
804
846
  exclude_none=True,
805
847
  round_trip=False,
806
848
  )
runnable/cli.py CHANGED
@@ -64,8 +64,9 @@ def execute(
64
64
  tag: Annotated[str, typer.Option(help="A tag attached to the run")] = "",
65
65
  run_id: Annotated[
66
66
  str,
67
- typer.Option(
68
- help="An optional run_id, one would be generated if its not provided"
67
+ typer.Argument(
68
+ envvar="RUNNABLE_RUN_ID",
69
+ help="An optional run_id, one would be generated if its not provided",
69
70
  ),
70
71
  ] = "",
71
72
  ):
@@ -282,8 +283,11 @@ def execute_job(
282
283
  ],
283
284
  run_id: Annotated[
284
285
  str,
285
- typer.Argument(help="An run_id, one would be generated if its not provided"),
286
- ],
286
+ typer.Argument(
287
+ envvar="RUNNABLE_RUN_ID",
288
+ help="An optional run_id, one would be generated if its not provided",
289
+ ),
290
+ ] = "",
287
291
  config_file: Annotated[
288
292
  str,
289
293
  typer.Option(
runnable/entrypoints.py CHANGED
@@ -345,15 +345,9 @@ def execute_single_node(
345
345
  try:
346
346
  executor.execute_node(node=node_to_execute, map_variable=map_variable_dict)
347
347
  finally:
348
- log_file_name = utils.make_log_file_name(
349
- node=node_to_execute,
350
- map_variable=map_variable_dict,
348
+ run_context.executor.add_task_log_to_catalog(
349
+ name=node_to_execute.internal_name, map_variable=map_variable_dict
351
350
  )
352
- task_console.save_text(log_file_name)
353
-
354
- # Put the log file in the catalog
355
- run_context.catalog_handler.put(name=log_file_name, run_id=run_context.run_id)
356
- os.remove(log_file_name)
357
351
 
358
352
  executor.send_return_code()
359
353
 
@@ -408,7 +402,10 @@ def execute_job_yaml_spec(
408
402
  )
409
403
 
410
404
  assert isinstance(executor, BaseJobExecutor)
411
- executor.submit_job(job, catalog_settings=catalog_config)
405
+ try:
406
+ executor.submit_job(job, catalog_settings=catalog_config)
407
+ finally:
408
+ run_context.executor.add_task_log_to_catalog("job")
412
409
 
413
410
  executor.send_return_code()
414
411
 
@@ -483,9 +480,12 @@ def execute_job_non_local(
483
480
  "Executing the job from the user. We are still in the caller's compute environment"
484
481
  )
485
482
 
486
- run_context.executor.execute_job(
487
- run_context.job, catalog_settings=run_context.job_catalog_settings
488
- )
483
+ try:
484
+ run_context.executor.execute_job(
485
+ run_context.job, catalog_settings=run_context.job_catalog_settings
486
+ )
487
+ finally:
488
+ run_context.executor.add_task_log_to_catalog("job")
489
489
 
490
490
  run_context.executor.send_return_code()
491
491
 
runnable/executor.py CHANGED
@@ -84,6 +84,11 @@ class BaseExecutor(ABC, BaseModel):
84
84
  """
85
85
  ...
86
86
 
87
+ @abstractmethod
88
+ def add_task_log_to_catalog(
89
+ self, name: str, map_variable: Optional[TypeMapVariable] = None
90
+ ): ...
91
+
87
92
 
88
93
  class BaseJobExecutor(BaseExecutor):
89
94
  service_type: str = "job_executor"
runnable/sdk.py CHANGED
@@ -899,7 +899,10 @@ class BaseJob(BaseModel):
899
899
  job = self.get_task()
900
900
  catalog_settings = self.return_catalog_settings()
901
901
 
902
- run_context.executor.submit_job(job, catalog_settings=catalog_settings)
902
+ try:
903
+ run_context.executor.submit_job(job, catalog_settings=catalog_settings)
904
+ finally:
905
+ run_context.executor.add_task_log_to_catalog("job")
903
906
 
904
907
  logger.info(
905
908
  "Executing the job from the user. We are still in the caller's compute environment"
runnable/utils.py CHANGED
@@ -77,7 +77,11 @@ def generate_run_id(run_id: str = "") -> str:
77
77
  Returns:
78
78
  str: A generated run_id
79
79
  """
80
- # If we are not provided with a run_id, generate one
80
+ # If we are not provided with a run_id, check env var
81
+ if not run_id:
82
+ run_id = os.environ.get(defaults.ENV_RUN_ID, "")
83
+
84
+ # If both are not given, generate one
81
85
  if not run_id:
82
86
  now = datetime.now()
83
87
  run_id = f"{names.get_random_name()}-{now.hour:02}{now.minute:02}"
@@ -628,9 +632,9 @@ def gather_variables() -> Dict[str, str]:
628
632
  return variables
629
633
 
630
634
 
631
- def make_log_file_name(node: BaseNode, map_variable: TypeMapVariable) -> str:
635
+ def make_log_file_name(name: str, map_variable: TypeMapVariable) -> str:
632
636
  random_tag = "".join(random.choices(string.ascii_uppercase + string.digits, k=3))
633
- log_file_name = node.name
637
+ log_file_name = name
634
638
 
635
639
  if map_variable:
636
640
  for _, value in map_variable.items():
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: runnable
3
- Version: 0.22.0
3
+ Version: 0.24.0
4
4
  Summary: Add your description here
5
5
  Author-email: "Vammi, Vijay" <vijay.vammi@astrazeneca.com>
6
6
  License-File: LICENSE
@@ -4,7 +4,7 @@ extensions/catalog/README.md,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,
4
4
  extensions/catalog/file_system.py,sha256=VZEUx4X-GDSM8rJ_2kiCOyw1eek3roN0CiSB8wdUcOA,9307
5
5
  extensions/catalog/pyproject.toml,sha256=lLNxY6v04c8I5QK_zKw_E6sJTArSJRA_V-79ktaA3Hk,279
6
6
  extensions/job_executor/README.md,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
7
- extensions/job_executor/__init__.py,sha256=HINaPjBWz04Ni7GqhuDLi0lS0-gYzq52HcOioYueYJE,5513
7
+ extensions/job_executor/__init__.py,sha256=3zS2m6dg-L6SkKfL0kr4AxVUVmVJcepV6eipyMvQR6s,6006
8
8
  extensions/job_executor/k8s.py,sha256=V5k6Rnf_sAFqptVbCrWs_x5sl3x3fSHwO96IZoiJxKU,15342
9
9
  extensions/job_executor/k8s_job_spec.yaml,sha256=7aFpxHdO_p6Hkc3YxusUOuAQTD1Myu0yTPX9DrhxbOg,1158
10
10
  extensions/job_executor/local.py,sha256=FvxTk0vyxdrbLOAyNkLyjvmmowypabWOSITQBK_ffVE,1907
@@ -14,8 +14,8 @@ extensions/nodes/README.md,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
14
14
  extensions/nodes/nodes.py,sha256=ib68QE737ihGLIVp3V2wea13u7lmMZdRvK80bgUkRtA,34645
15
15
  extensions/nodes/pyproject.toml,sha256=YTu-ETN3JNFSkMzzWeOwn4m-O2nbRH-PmiPBALDCUw4,278
16
16
  extensions/pipeline_executor/README.md,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
17
- extensions/pipeline_executor/__init__.py,sha256=YnKILiy-SxfnG3rYUoinjh1lfkuAF5QXpPePtn6VxBY,25174
18
- extensions/pipeline_executor/argo.py,sha256=81Lv8WO2riYo4KdtgpRabl5YtSgrNGGq8PfMo6YLKUY,33154
17
+ extensions/pipeline_executor/__init__.py,sha256=bobyC4BWmDKCnMQsuyj9buQX7tZOFxuwU3Coq9-QgR0,25568
18
+ extensions/pipeline_executor/argo.py,sha256=nnlR_D6arQMUSgAevnW1RXeN48SoB1wVcEfQ4TBireY,34543
19
19
  extensions/pipeline_executor/local.py,sha256=H8s6AdML_9_f-vdGG_6k0y9FbLqAqvA1S_7xMNyARzY,1946
20
20
  extensions/pipeline_executor/local_container.py,sha256=HOT9I-cPDCvgy6_bzNEtl4jPhTyeYSn1GK7lplH3vDA,12515
21
21
  extensions/pipeline_executor/mocked.py,sha256=SuObJ6Myt7p8duW8sylIp1cYIAnFutsJW1avWaOUY3c,5798
@@ -34,24 +34,24 @@ extensions/secrets/dotenv.py,sha256=FbYYd_pVuJuVuIDIvXbzKuSSQ9GPq7xJXTDbJMTQbhM,
34
34
  extensions/secrets/pyproject.toml,sha256=mLJNImNcBlbLKHh-0ugVWT9V83R4RibyyYDtBCSqVF4,282
35
35
  runnable/__init__.py,sha256=n14AnTUUEYxXlTJ6-YLT0tMmeFb7Co_3kNldV6pgKSs,662
36
36
  runnable/catalog.py,sha256=b9N40kTv1IBidzlWjkHcBGyYhq6qIDHZfBuFenzjsMI,4924
37
- runnable/cli.py,sha256=xHb2VSC16z9IT45SnhUYHuXUrzPqF_pK-jyWiTnz5sM,8670
37
+ runnable/cli.py,sha256=4bEqXXh1sdkgJo5_2S9Sy260WbLoM2hofRnpeN4cfaA,8786
38
38
  runnable/context.py,sha256=by5uepmuCP0dmM9BmsliXihSes5QEFejwAsmekcqylE,1388
39
39
  runnable/datastore.py,sha256=9y5enzn6AXLHLdwvgkdjGPrBkVlrcjfbaAHsst-lJzg,32466
40
40
  runnable/defaults.py,sha256=3o9IVGryyCE6PoQTOoaIaHHTbJGEzmdXMcwzOhwAYoI,3518
41
- runnable/entrypoints.py,sha256=5boTvQExAVwujLW-Lmic6pjGY122qwZB8ZnaKlV8Fac,19006
41
+ runnable/entrypoints.py,sha256=xkUa568-7x9xALz13qW14DxS1nnLDKwLwdIBJZG-vM0,18982
42
42
  runnable/exceptions.py,sha256=LFbp0-Qxg2PAMLEVt7w2whhBxSG-5pzUEv5qN-Rc4_c,3003
43
- runnable/executor.py,sha256=ZPpfKwjDJnta03M2cWIINXcwke2ZDVc_QrIw7kwpHDQ,15547
43
+ runnable/executor.py,sha256=ne-iRQqGuEmmuApnkBDz1_hokVcjFrbe7BvWqXCG1Ys,15684
44
44
  runnable/graph.py,sha256=jVjikRLR-so3b2ufmNKpEQ_Ny68qN4bcGDAdXBRKiCY,16574
45
45
  runnable/names.py,sha256=vn92Kv9ANROYSZX6Z4z1v_WA3WiEdIYmG6KEStBFZug,8134
46
46
  runnable/nodes.py,sha256=YU9u7r1ESzui1uVtJ1dgwdv1ozyJnF2k-MCFieT8CLI,17519
47
47
  runnable/parameters.py,sha256=LyQb1d0SaFeI4PJ_yDYt9wArm9ThSPASWb36TwIdDUs,5213
48
48
  runnable/pickler.py,sha256=ydJ_eti_U1F4l-YacFp7BWm6g5vTn04UXye25S1HVok,2684
49
- runnable/sdk.py,sha256=-FvRoIMlbNFOS_8c2kefYA-mlyE_15rNG3GoN6gKumc,33470
49
+ runnable/sdk.py,sha256=T1nqDpLN9fULvvU9L-oY0EHqYdKUI9qk7oekLynm02Y,33568
50
50
  runnable/secrets.py,sha256=PXcEJw-4WPzeWRLfsatcPPyr1zkqgHzdRWRcS9vvpvM,2354
51
51
  runnable/tasks.py,sha256=YVKpKxSpsRZWcU3MOqoBoqxeo1XSqv5crkOu6kyu63o,28520
52
- runnable/utils.py,sha256=HyYo6Xe4O02eHuT_m9oLqzh7dxdhbmtTqKzgFR2bpWE,19712
53
- runnable-0.22.0.dist-info/METADATA,sha256=KsHHJP5DJOu9S1yv7SEid62OEnSg_eBZfBGbgYUWUiw,9945
54
- runnable-0.22.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
55
- runnable-0.22.0.dist-info/entry_points.txt,sha256=seek5WVGvwYALm8lZ0TfPXwG5NaCeUKjU8urF8k3gvY,1621
56
- runnable-0.22.0.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
57
- runnable-0.22.0.dist-info/RECORD,,
52
+ runnable/utils.py,sha256=hJUfRmIgU20weWPmBOHF22F6svBU0A_0nqifRMuXKs0,19822
53
+ runnable-0.24.0.dist-info/METADATA,sha256=k4SpZ9SjbDI1xtJK6vV3JGJYWPPbRSJnE1mMtA3YJnE,9945
54
+ runnable-0.24.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
55
+ runnable-0.24.0.dist-info/entry_points.txt,sha256=seek5WVGvwYALm8lZ0TfPXwG5NaCeUKjU8urF8k3gvY,1621
56
+ runnable-0.24.0.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
57
+ runnable-0.24.0.dist-info/RECORD,,