ob-metaflow-stubs 5.8.1__py2.py3-none-any.whl → 5.9__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (139) hide show
  1. metaflow-stubs/__init__.pyi +461 -457
  2. metaflow-stubs/cards.pyi +5 -5
  3. metaflow-stubs/cli.pyi +3 -3
  4. metaflow-stubs/client/__init__.pyi +3 -3
  5. metaflow-stubs/client/core.pyi +5 -5
  6. metaflow-stubs/client/filecache.pyi +3 -3
  7. metaflow-stubs/clone_util.pyi +2 -2
  8. metaflow-stubs/events.pyi +3 -3
  9. metaflow-stubs/exception.pyi +2 -2
  10. metaflow-stubs/flowspec.pyi +5 -5
  11. metaflow-stubs/generated_for.txt +1 -1
  12. metaflow-stubs/includefile.pyi +4 -4
  13. metaflow-stubs/metadata/metadata.pyi +3 -3
  14. metaflow-stubs/metadata/util.pyi +2 -2
  15. metaflow-stubs/metaflow_config.pyi +2 -2
  16. metaflow-stubs/metaflow_current.pyi +27 -25
  17. metaflow-stubs/mflog/mflog.pyi +2 -2
  18. metaflow-stubs/multicore_utils.pyi +2 -2
  19. metaflow-stubs/parameters.pyi +4 -4
  20. metaflow-stubs/plugins/__init__.pyi +2 -2
  21. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  22. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  23. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  24. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  25. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +3 -3
  26. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +3 -3
  27. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +3 -3
  28. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  29. metaflow-stubs/plugins/argo/argo_client.pyi +6 -2
  30. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  31. metaflow-stubs/plugins/argo/argo_workflows.pyi +3 -3
  32. metaflow-stubs/plugins/argo/argo_workflows_cli.pyi +6 -6
  33. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +6 -6
  34. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +5 -3
  35. metaflow-stubs/plugins/aws/__init__.pyi +2 -2
  36. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  37. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  38. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  39. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  40. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  41. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +4 -4
  42. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  43. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +3 -3
  44. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  45. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  46. metaflow-stubs/plugins/aws/step_functions/production_token.pyi +2 -2
  47. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  48. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  49. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  50. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +5 -3
  51. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  52. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  53. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  54. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +3 -3
  55. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  56. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  57. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  58. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  59. metaflow-stubs/plugins/cards/card_cli.pyi +2 -2
  60. metaflow-stubs/plugins/cards/card_client.pyi +3 -3
  61. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  62. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  63. metaflow-stubs/plugins/cards/card_decorator.pyi +3 -3
  64. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  65. metaflow-stubs/plugins/cards/card_modules/basic.pyi +3 -3
  66. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  67. metaflow-stubs/plugins/cards/card_modules/components.pyi +2 -2
  68. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  69. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  70. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +3 -3
  71. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  72. metaflow-stubs/plugins/cards/component_serializer.pyi +3 -3
  73. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  74. metaflow-stubs/plugins/catch_decorator.pyi +4 -4
  75. metaflow-stubs/plugins/datatools/__init__.pyi +3 -3
  76. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  77. metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
  78. metaflow-stubs/plugins/datatools/s3/s3.pyi +6 -6
  79. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  80. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  81. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  82. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  83. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  84. metaflow-stubs/plugins/events_decorator.pyi +3 -3
  85. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  86. metaflow-stubs/plugins/frameworks/pytorch.pyi +3 -3
  87. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  88. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +3 -3
  89. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  90. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  91. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  92. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  93. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  94. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +18 -0
  95. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +7 -3
  96. metaflow-stubs/plugins/kubernetes/kubernetes_cli.pyi +26 -4
  97. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +8 -2
  98. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +4 -4
  99. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
  100. metaflow-stubs/plugins/logs_cli.pyi +3 -3
  101. metaflow-stubs/plugins/package_cli.pyi +2 -2
  102. metaflow-stubs/plugins/parallel_decorator.pyi +5 -5
  103. metaflow-stubs/plugins/perimeters.pyi +2 -2
  104. metaflow-stubs/plugins/project_decorator.pyi +3 -3
  105. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  106. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  107. metaflow-stubs/plugins/pypi/conda_environment.pyi +4 -4
  108. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  109. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  110. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  111. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  112. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  113. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  114. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +3 -3
  115. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  116. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  117. metaflow-stubs/plugins/tag_cli.pyi +4 -4
  118. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +2 -2
  119. metaflow-stubs/plugins/timeout_decorator.pyi +3 -3
  120. metaflow-stubs/procpoll.pyi +2 -2
  121. metaflow-stubs/profilers/__init__.pyi +2 -2
  122. metaflow-stubs/pylint_wrapper.pyi +2 -2
  123. metaflow-stubs/runner/__init__.pyi +2 -2
  124. metaflow-stubs/runner/deployer.pyi +8 -6
  125. metaflow-stubs/runner/metaflow_runner.pyi +7 -7
  126. metaflow-stubs/runner/nbdeploy.pyi +2 -2
  127. metaflow-stubs/runner/nbrun.pyi +2 -2
  128. metaflow-stubs/runner/subprocess_manager.pyi +4 -4
  129. metaflow-stubs/runner/utils.pyi +5 -3
  130. metaflow-stubs/system/__init__.pyi +2 -2
  131. metaflow-stubs/system/system_logger.pyi +3 -3
  132. metaflow-stubs/system/system_monitor.pyi +2 -2
  133. metaflow-stubs/tagging_util.pyi +2 -2
  134. metaflow-stubs/tuple_util.pyi +2 -2
  135. {ob_metaflow_stubs-5.8.1.dist-info → ob_metaflow_stubs-5.9.dist-info}/METADATA +1 -1
  136. ob_metaflow_stubs-5.9.dist-info/RECORD +139 -0
  137. ob_metaflow_stubs-5.8.1.dist-info/RECORD +0 -138
  138. {ob_metaflow_stubs-5.8.1.dist-info → ob_metaflow_stubs-5.9.dist-info}/WHEEL +0 -0
  139. {ob_metaflow_stubs-5.8.1.dist-info → ob_metaflow_stubs-5.9.dist-info}/top_level.txt +0 -0
@@ -1,23 +1,23 @@
1
1
  ##################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.12.19.1+ob(v1) #
4
- # Generated on 2024-09-06T08:14:24.914849 #
3
+ # MF version: 2.12.20.1+ob(v1) #
4
+ # Generated on 2024-09-16T17:51:44.698904 #
5
5
  ##################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
+ import metaflow._vendor.click.types
12
+ import metaflow.datastore.inputs
11
13
  import metaflow.parameters
12
14
  import typing
13
- import datetime
14
15
  import metaflow.runner.metaflow_runner
15
- import metaflow.datastore.inputs
16
- import metaflow.flowspec
16
+ import metaflow.metaflow_current
17
17
  import metaflow.events
18
+ import datetime
18
19
  import metaflow.client.core
19
- import metaflow._vendor.click.types
20
- import metaflow.metaflow_current
20
+ import metaflow.flowspec
21
21
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
22
22
  StepFlag = typing.NewType("StepFlag", bool)
23
23
 
@@ -437,136 +437,6 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
437
437
  """
438
438
  ...
439
439
 
440
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = "KUBERNETES_IMAGE_PULL_POLICY", service_account: str = "METAFLOW_KUBERNETES_SERVICE_ACCOUNT", secrets: typing.Optional[typing.List[str]] = None, namespace: str = "METAFLOW_KUBERNETES_NAMESPACE", gpu: typing.Optional[int] = None, gpu_vendor: str = "KUBERNETES_GPU_VENDOR", tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = "/metaflow_temp", persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
441
- """
442
- Specifies that this step should execute on Kubernetes.
443
-
444
- Parameters
445
- ----------
446
- cpu : int, default 1
447
- Number of CPUs required for this step. If `@resources` is
448
- also present, the maximum value from all decorators is used.
449
- memory : int, default 4096
450
- Memory size (in MB) required for this step. If
451
- `@resources` is also present, the maximum value from all decorators is
452
- used.
453
- disk : int, default 10240
454
- Disk size (in MB) required for this step. If
455
- `@resources` is also present, the maximum value from all decorators is
456
- used.
457
- image : str, optional, default None
458
- Docker image to use when launching on Kubernetes. If not specified, and
459
- METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
460
- not, a default Docker image mapping to the current version of Python is used.
461
- image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
462
- If given, the imagePullPolicy to be applied to the Docker image of the step.
463
- service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
464
- Kubernetes service account to use when launching pod in Kubernetes.
465
- secrets : List[str], optional, default None
466
- Kubernetes secrets to use when launching pod in Kubernetes. These
467
- secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
468
- in Metaflow configuration.
469
- namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
470
- Kubernetes namespace to use when launching pod in Kubernetes.
471
- gpu : int, optional, default None
472
- Number of GPUs required for this step. A value of zero implies that
473
- the scheduled node should not have GPUs.
474
- gpu_vendor : str, default KUBERNETES_GPU_VENDOR
475
- The vendor of the GPUs to be used for this step.
476
- tolerations : List[str], default []
477
- The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
478
- Kubernetes tolerations to use when launching pod in Kubernetes.
479
- use_tmpfs : bool, default False
480
- This enables an explicit tmpfs mount for this step.
481
- tmpfs_tempdir : bool, default True
482
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
483
- tmpfs_size : int, optional, default: None
484
- The value for the size (in MiB) of the tmpfs mount for this step.
485
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
486
- memory allocated for this step.
487
- tmpfs_path : str, optional, default /metaflow_temp
488
- Path to tmpfs mount for this step.
489
- persistent_volume_claims : Dict[str, str], optional, default None
490
- A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
491
- volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
492
- shared_memory: int, optional
493
- Shared memory size (in MiB) required for this step
494
- port: int, optional
495
- Port number to specify in the Kubernetes job object
496
- compute_pool : str, optional, default None
497
- Compute pool to be used for for this step.
498
- If not specified, any accessible compute pool within the perimeter is used.
499
- """
500
- ...
501
-
502
- @typing.overload
503
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
504
- """
505
- Specifies the PyPI packages for the step.
506
-
507
- Information in this decorator will augment any
508
- attributes set in the `@pyi_base` flow-level decorator. Hence,
509
- you can use `@pypi_base` to set packages required by all
510
- steps and use `@pypi` to specify step-specific overrides.
511
-
512
- Parameters
513
- ----------
514
- packages : Dict[str, str], default: {}
515
- Packages to use for this step. The key is the name of the package
516
- and the value is the version to use.
517
- python : str, optional, default: None
518
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
519
- that the version used will correspond to the version of the Python interpreter used to start the run.
520
- """
521
- ...
522
-
523
- @typing.overload
524
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
525
- ...
526
-
527
- @typing.overload
528
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
529
- ...
530
-
531
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
532
- """
533
- Specifies the PyPI packages for the step.
534
-
535
- Information in this decorator will augment any
536
- attributes set in the `@pyi_base` flow-level decorator. Hence,
537
- you can use `@pypi_base` to set packages required by all
538
- steps and use `@pypi` to specify step-specific overrides.
539
-
540
- Parameters
541
- ----------
542
- packages : Dict[str, str], default: {}
543
- Packages to use for this step. The key is the name of the package
544
- and the value is the version to use.
545
- python : str, optional, default: None
546
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
547
- that the version used will correspond to the version of the Python interpreter used to start the run.
548
- """
549
- ...
550
-
551
- @typing.overload
552
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
553
- """
554
- Decorator prototype for all step decorators. This function gets specialized
555
- and imported for all decorators types by _import_plugin_decorators().
556
- """
557
- ...
558
-
559
- @typing.overload
560
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
561
- ...
562
-
563
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
564
- """
565
- Decorator prototype for all step decorators. This function gets specialized
566
- and imported for all decorators types by _import_plugin_decorators().
567
- """
568
- ...
569
-
570
440
  @typing.overload
571
441
  def card(*, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
572
442
  """
@@ -619,88 +489,79 @@ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
619
489
  ...
620
490
 
621
491
  @typing.overload
622
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
492
+ def resources(*, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
623
493
  """
624
- Specifies the number of times the task corresponding
625
- to a step needs to be retried.
494
+ Specifies the resources needed when executing this step.
626
495
 
627
- This decorator is useful for handling transient errors, such as networking issues.
628
- If your task contains operations that can't be retried safely, e.g. database updates,
629
- it is advisable to annotate it with `@retry(times=0)`.
496
+ Use `@resources` to specify the resource requirements
497
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
630
498
 
631
- This can be used in conjunction with the `@catch` decorator. The `@catch`
632
- decorator will execute a no-op task after all retries have been exhausted,
633
- ensuring that the flow execution can continue.
499
+ You can choose the compute layer on the command line by executing e.g.
500
+ ```
501
+ python myflow.py run --with batch
502
+ ```
503
+ or
504
+ ```
505
+ python myflow.py run --with kubernetes
506
+ ```
507
+ which executes the flow on the desired system using the
508
+ requirements specified in `@resources`.
634
509
 
635
510
  Parameters
636
511
  ----------
637
- times : int, default 3
638
- Number of times to retry this task.
639
- minutes_between_retries : int, default 2
640
- Number of minutes between retries.
512
+ cpu : int, default 1
513
+ Number of CPUs required for this step.
514
+ gpu : int, default 0
515
+ Number of GPUs required for this step.
516
+ disk : int, optional, default None
517
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
518
+ memory : int, default 4096
519
+ Memory size (in MB) required for this step.
520
+ shared_memory : int, optional, default None
521
+ The value for the size (in MiB) of the /dev/shm volume for this step.
522
+ This parameter maps to the `--shm-size` option in Docker.
641
523
  """
642
524
  ...
643
525
 
644
526
  @typing.overload
645
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
527
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
646
528
  ...
647
529
 
648
530
  @typing.overload
649
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
531
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
650
532
  ...
651
533
 
652
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
534
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
653
535
  """
654
- Specifies the number of times the task corresponding
655
- to a step needs to be retried.
656
-
657
- This decorator is useful for handling transient errors, such as networking issues.
658
- If your task contains operations that can't be retried safely, e.g. database updates,
659
- it is advisable to annotate it with `@retry(times=0)`.
660
-
661
- This can be used in conjunction with the `@catch` decorator. The `@catch`
662
- decorator will execute a no-op task after all retries have been exhausted,
663
- ensuring that the flow execution can continue.
536
+ Specifies the resources needed when executing this step.
664
537
 
665
- Parameters
666
- ----------
667
- times : int, default 3
668
- Number of times to retry this task.
669
- minutes_between_retries : int, default 2
670
- Number of minutes between retries.
671
- """
672
- ...
673
-
674
- @typing.overload
675
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
676
- """
677
- Specifies secrets to be retrieved and injected as environment variables prior to
678
- the execution of a step.
538
+ Use `@resources` to specify the resource requirements
539
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
679
540
 
680
- Parameters
681
- ----------
682
- sources : List[Union[str, Dict[str, Any]]], default: []
683
- List of secret specs, defining how the secrets are to be retrieved
684
- """
685
- ...
686
-
687
- @typing.overload
688
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
689
- ...
690
-
691
- @typing.overload
692
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
693
- ...
694
-
695
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
696
- """
697
- Specifies secrets to be retrieved and injected as environment variables prior to
698
- the execution of a step.
541
+ You can choose the compute layer on the command line by executing e.g.
542
+ ```
543
+ python myflow.py run --with batch
544
+ ```
545
+ or
546
+ ```
547
+ python myflow.py run --with kubernetes
548
+ ```
549
+ which executes the flow on the desired system using the
550
+ requirements specified in `@resources`.
699
551
 
700
552
  Parameters
701
553
  ----------
702
- sources : List[Union[str, Dict[str, Any]]], default: []
703
- List of secret specs, defining how the secrets are to be retrieved
554
+ cpu : int, default 1
555
+ Number of CPUs required for this step.
556
+ gpu : int, default 0
557
+ Number of GPUs required for this step.
558
+ disk : int, optional, default None
559
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
560
+ memory : int, default 4096
561
+ Memory size (in MB) required for this step.
562
+ shared_memory : int, optional, default None
563
+ The value for the size (in MiB) of the /dev/shm volume for this step.
564
+ This parameter maps to the `--shm-size` option in Docker.
704
565
  """
705
566
  ...
706
567
 
@@ -736,202 +597,277 @@ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], Non
736
597
  ...
737
598
 
738
599
  @typing.overload
739
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
600
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
740
601
  """
741
- Specifies a timeout for your step.
602
+ Specifies that the step will success under all circumstances.
742
603
 
743
- This decorator is useful if this step may hang indefinitely.
744
-
745
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
746
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
747
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
748
-
749
- Note that all the values specified in parameters are added together so if you specify
750
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
604
+ The decorator will create an optional artifact, specified by `var`, which
605
+ contains the exception raised. You can use it to detect the presence
606
+ of errors, indicating that all happy-path artifacts produced by the step
607
+ are missing.
751
608
 
752
609
  Parameters
753
610
  ----------
754
- seconds : int, default 0
755
- Number of seconds to wait prior to timing out.
756
- minutes : int, default 0
757
- Number of minutes to wait prior to timing out.
758
- hours : int, default 0
759
- Number of hours to wait prior to timing out.
611
+ var : str, optional, default None
612
+ Name of the artifact in which to store the caught exception.
613
+ If not specified, the exception is not stored.
614
+ print_exception : bool, default True
615
+ Determines whether or not the exception is printed to
616
+ stdout when caught.
760
617
  """
761
618
  ...
762
619
 
763
620
  @typing.overload
764
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
621
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
765
622
  ...
766
623
 
767
624
  @typing.overload
768
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
625
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
769
626
  ...
770
627
 
771
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
628
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
772
629
  """
773
- Specifies a timeout for your step.
774
-
775
- This decorator is useful if this step may hang indefinitely.
776
-
777
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
778
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
779
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
630
+ Specifies that the step will success under all circumstances.
780
631
 
781
- Note that all the values specified in parameters are added together so if you specify
782
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
632
+ The decorator will create an optional artifact, specified by `var`, which
633
+ contains the exception raised. You can use it to detect the presence
634
+ of errors, indicating that all happy-path artifacts produced by the step
635
+ are missing.
783
636
 
784
637
  Parameters
785
638
  ----------
786
- seconds : int, default 0
787
- Number of seconds to wait prior to timing out.
788
- minutes : int, default 0
789
- Number of minutes to wait prior to timing out.
790
- hours : int, default 0
791
- Number of hours to wait prior to timing out.
639
+ var : str, optional, default None
640
+ Name of the artifact in which to store the caught exception.
641
+ If not specified, the exception is not stored.
642
+ print_exception : bool, default True
643
+ Determines whether or not the exception is printed to
644
+ stdout when caught.
792
645
  """
793
646
  ...
794
647
 
795
- @typing.overload
796
- def resources(*, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
648
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = "KUBERNETES_IMAGE_PULL_POLICY", service_account: str = "METAFLOW_KUBERNETES_SERVICE_ACCOUNT", secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = "METAFLOW_KUBERNETES_NAMESPACE", gpu: typing.Optional[int] = None, gpu_vendor: str = "KUBERNETES_GPU_VENDOR", tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = "/metaflow_temp", persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
797
649
  """
798
- Specifies the resources needed when executing this step.
799
-
800
- Use `@resources` to specify the resource requirements
801
- independently of the specific compute layer (`@batch`, `@kubernetes`).
802
-
803
- You can choose the compute layer on the command line by executing e.g.
804
- ```
805
- python myflow.py run --with batch
806
- ```
807
- or
808
- ```
809
- python myflow.py run --with kubernetes
810
- ```
811
- which executes the flow on the desired system using the
812
- requirements specified in `@resources`.
650
+ Specifies that this step should execute on Kubernetes.
813
651
 
814
652
  Parameters
815
653
  ----------
816
654
  cpu : int, default 1
817
- Number of CPUs required for this step.
818
- gpu : int, default 0
819
- Number of GPUs required for this step.
820
- disk : int, optional, default None
821
- Disk size (in MB) required for this step. Only applies on Kubernetes.
655
+ Number of CPUs required for this step. If `@resources` is
656
+ also present, the maximum value from all decorators is used.
822
657
  memory : int, default 4096
823
- Memory size (in MB) required for this step.
824
- shared_memory : int, optional, default None
825
- The value for the size (in MiB) of the /dev/shm volume for this step.
826
- This parameter maps to the `--shm-size` option in Docker.
658
+ Memory size (in MB) required for this step. If
659
+ `@resources` is also present, the maximum value from all decorators is
660
+ used.
661
+ disk : int, default 10240
662
+ Disk size (in MB) required for this step. If
663
+ `@resources` is also present, the maximum value from all decorators is
664
+ used.
665
+ image : str, optional, default None
666
+ Docker image to use when launching on Kubernetes. If not specified, and
667
+ METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
668
+ not, a default Docker image mapping to the current version of Python is used.
669
+ image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
670
+ If given, the imagePullPolicy to be applied to the Docker image of the step.
671
+ service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
672
+ Kubernetes service account to use when launching pod in Kubernetes.
673
+ secrets : List[str], optional, default None
674
+ Kubernetes secrets to use when launching pod in Kubernetes. These
675
+ secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
676
+ in Metaflow configuration.
677
+ node_selector: Union[Dict[str,str], str], optional, default None
678
+ Kubernetes node selector(s) to apply to the pod running the task.
679
+ Can be passed in as a comma separated string of values e.g. "kubernetes.io/os=linux,kubernetes.io/arch=amd64"
680
+ or as a dictionary {"kubernetes.io/os": "linux", "kubernetes.io/arch": "amd64"}
681
+ namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
682
+ Kubernetes namespace to use when launching pod in Kubernetes.
683
+ gpu : int, optional, default None
684
+ Number of GPUs required for this step. A value of zero implies that
685
+ the scheduled node should not have GPUs.
686
+ gpu_vendor : str, default KUBERNETES_GPU_VENDOR
687
+ The vendor of the GPUs to be used for this step.
688
+ tolerations : List[str], default []
689
+ The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
690
+ Kubernetes tolerations to use when launching pod in Kubernetes.
691
+ use_tmpfs : bool, default False
692
+ This enables an explicit tmpfs mount for this step.
693
+ tmpfs_tempdir : bool, default True
694
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
695
+ tmpfs_size : int, optional, default: None
696
+ The value for the size (in MiB) of the tmpfs mount for this step.
697
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
698
+ memory allocated for this step.
699
+ tmpfs_path : str, optional, default /metaflow_temp
700
+ Path to tmpfs mount for this step.
701
+ persistent_volume_claims : Dict[str, str], optional, default None
702
+ A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
703
+ volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
704
+ shared_memory: int, optional
705
+ Shared memory size (in MiB) required for this step
706
+ port: int, optional
707
+ Port number to specify in the Kubernetes job object
708
+ compute_pool : str, optional, default None
709
+ Compute pool to be used for for this step.
710
+ If not specified, any accessible compute pool within the perimeter is used.
827
711
  """
828
712
  ...
829
713
 
830
714
  @typing.overload
831
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
715
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
716
+ """
717
+ Specifies secrets to be retrieved and injected as environment variables prior to
718
+ the execution of a step.
719
+
720
+ Parameters
721
+ ----------
722
+ sources : List[Union[str, Dict[str, Any]]], default: []
723
+ List of secret specs, defining how the secrets are to be retrieved
724
+ """
832
725
  ...
833
726
 
834
727
  @typing.overload
835
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
728
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
836
729
  ...
837
730
 
838
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
731
+ @typing.overload
732
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
733
+ ...
734
+
735
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
839
736
  """
840
- Specifies the resources needed when executing this step.
841
-
842
- Use `@resources` to specify the resource requirements
843
- independently of the specific compute layer (`@batch`, `@kubernetes`).
844
-
845
- You can choose the compute layer on the command line by executing e.g.
846
- ```
847
- python myflow.py run --with batch
848
- ```
849
- or
850
- ```
851
- python myflow.py run --with kubernetes
852
- ```
853
- which executes the flow on the desired system using the
854
- requirements specified in `@resources`.
737
+ Specifies secrets to be retrieved and injected as environment variables prior to
738
+ the execution of a step.
855
739
 
856
740
  Parameters
857
741
  ----------
858
- cpu : int, default 1
859
- Number of CPUs required for this step.
860
- gpu : int, default 0
861
- Number of GPUs required for this step.
862
- disk : int, optional, default None
863
- Disk size (in MB) required for this step. Only applies on Kubernetes.
864
- memory : int, default 4096
865
- Memory size (in MB) required for this step.
866
- shared_memory : int, optional, default None
867
- The value for the size (in MiB) of the /dev/shm volume for this step.
868
- This parameter maps to the `--shm-size` option in Docker.
742
+ sources : List[Union[str, Dict[str, Any]]], default: []
743
+ List of secret specs, defining how the secrets are to be retrieved
869
744
  """
870
745
  ...
871
746
 
872
747
  @typing.overload
873
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
748
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
874
749
  """
875
- Specifies that the step will success under all circumstances.
750
+ Internal decorator to support Fast bakery
751
+ """
752
+ ...
753
+
754
+ @typing.overload
755
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
756
+ ...
757
+
758
+ def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
759
+ """
760
+ Internal decorator to support Fast bakery
761
+ """
762
+ ...
763
+
764
+ @typing.overload
765
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
766
+ """
767
+ Specifies a timeout for your step.
876
768
 
877
- The decorator will create an optional artifact, specified by `var`, which
878
- contains the exception raised. You can use it to detect the presence
879
- of errors, indicating that all happy-path artifacts produced by the step
880
- are missing.
769
+ This decorator is useful if this step may hang indefinitely.
770
+
771
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
772
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
773
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
774
+
775
+ Note that all the values specified in parameters are added together so if you specify
776
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
881
777
 
882
778
  Parameters
883
779
  ----------
884
- var : str, optional, default None
885
- Name of the artifact in which to store the caught exception.
886
- If not specified, the exception is not stored.
887
- print_exception : bool, default True
888
- Determines whether or not the exception is printed to
889
- stdout when caught.
780
+ seconds : int, default 0
781
+ Number of seconds to wait prior to timing out.
782
+ minutes : int, default 0
783
+ Number of minutes to wait prior to timing out.
784
+ hours : int, default 0
785
+ Number of hours to wait prior to timing out.
890
786
  """
891
787
  ...
892
788
 
893
789
  @typing.overload
894
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
790
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
895
791
  ...
896
792
 
897
793
  @typing.overload
898
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
794
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
899
795
  ...
900
796
 
901
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
797
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
902
798
  """
903
- Specifies that the step will success under all circumstances.
799
+ Specifies a timeout for your step.
904
800
 
905
- The decorator will create an optional artifact, specified by `var`, which
906
- contains the exception raised. You can use it to detect the presence
907
- of errors, indicating that all happy-path artifacts produced by the step
908
- are missing.
801
+ This decorator is useful if this step may hang indefinitely.
802
+
803
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
804
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
805
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
806
+
807
+ Note that all the values specified in parameters are added together so if you specify
808
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
909
809
 
910
810
  Parameters
911
811
  ----------
912
- var : str, optional, default None
913
- Name of the artifact in which to store the caught exception.
914
- If not specified, the exception is not stored.
915
- print_exception : bool, default True
916
- Determines whether or not the exception is printed to
917
- stdout when caught.
812
+ seconds : int, default 0
813
+ Number of seconds to wait prior to timing out.
814
+ minutes : int, default 0
815
+ Number of minutes to wait prior to timing out.
816
+ hours : int, default 0
817
+ Number of hours to wait prior to timing out.
918
818
  """
919
819
  ...
920
820
 
921
821
  @typing.overload
922
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
822
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
923
823
  """
924
- Internal decorator to support Fast bakery
824
+ Specifies the number of times the task corresponding
825
+ to a step needs to be retried.
826
+
827
+ This decorator is useful for handling transient errors, such as networking issues.
828
+ If your task contains operations that can't be retried safely, e.g. database updates,
829
+ it is advisable to annotate it with `@retry(times=0)`.
830
+
831
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
832
+ decorator will execute a no-op task after all retries have been exhausted,
833
+ ensuring that the flow execution can continue.
834
+
835
+ Parameters
836
+ ----------
837
+ times : int, default 3
838
+ Number of times to retry this task.
839
+ minutes_between_retries : int, default 2
840
+ Number of minutes between retries.
925
841
  """
926
842
  ...
927
843
 
928
844
  @typing.overload
929
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
845
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
930
846
  ...
931
847
 
932
- def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
848
+ @typing.overload
849
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
850
+ ...
851
+
852
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
933
853
  """
934
- Internal decorator to support Fast bakery
854
+ Specifies the number of times the task corresponding
855
+ to a step needs to be retried.
856
+
857
+ This decorator is useful for handling transient errors, such as networking issues.
858
+ If your task contains operations that can't be retried safely, e.g. database updates,
859
+ it is advisable to annotate it with `@retry(times=0)`.
860
+
861
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
862
+ decorator will execute a no-op task after all retries have been exhausted,
863
+ ensuring that the flow execution can continue.
864
+
865
+ Parameters
866
+ ----------
867
+ times : int, default 3
868
+ Number of times to retry this task.
869
+ minutes_between_retries : int, default 2
870
+ Number of minutes between retries.
935
871
  """
936
872
  ...
937
873
 
@@ -993,100 +929,112 @@ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
993
929
  ...
994
930
 
995
931
  @typing.overload
996
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
932
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
997
933
  """
998
- Specifies the times when the flow should be run when running on a
999
- production scheduler.
1000
-
1001
- Parameters
1002
- ----------
1003
- hourly : bool, default False
1004
- Run the workflow hourly.
1005
- daily : bool, default True
1006
- Run the workflow daily.
1007
- weekly : bool, default False
1008
- Run the workflow weekly.
1009
- cron : str, optional, default None
1010
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1011
- specified by this expression.
1012
- timezone : str, optional, default None
1013
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1014
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
934
+ Decorator prototype for all step decorators. This function gets specialized
935
+ and imported for all decorators types by _import_plugin_decorators().
1015
936
  """
1016
937
  ...
1017
938
 
1018
939
  @typing.overload
1019
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
940
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1020
941
  ...
1021
942
 
1022
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
943
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1023
944
  """
1024
- Specifies the times when the flow should be run when running on a
1025
- production scheduler.
945
+ Decorator prototype for all step decorators. This function gets specialized
946
+ and imported for all decorators types by _import_plugin_decorators().
947
+ """
948
+ ...
949
+
950
+ @typing.overload
951
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
952
+ """
953
+ Specifies the PyPI packages for the step.
954
+
955
+ Information in this decorator will augment any
956
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
957
+ you can use `@pypi_base` to set packages required by all
958
+ steps and use `@pypi` to specify step-specific overrides.
1026
959
 
1027
960
  Parameters
1028
961
  ----------
1029
- hourly : bool, default False
1030
- Run the workflow hourly.
1031
- daily : bool, default True
1032
- Run the workflow daily.
1033
- weekly : bool, default False
1034
- Run the workflow weekly.
1035
- cron : str, optional, default None
1036
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1037
- specified by this expression.
1038
- timezone : str, optional, default None
1039
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1040
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
962
+ packages : Dict[str, str], default: {}
963
+ Packages to use for this step. The key is the name of the package
964
+ and the value is the version to use.
965
+ python : str, optional, default: None
966
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
967
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1041
968
  """
1042
969
  ...
1043
970
 
1044
971
  @typing.overload
1045
- def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
972
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
973
+ ...
974
+
975
+ @typing.overload
976
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
977
+ ...
978
+
979
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1046
980
  """
1047
- Specifies the Conda environment for all steps of the flow.
981
+ Specifies the PyPI packages for the step.
1048
982
 
1049
- Use `@conda_base` to set common libraries required by all
1050
- steps and use `@conda` to specify step-specific additions.
983
+ Information in this decorator will augment any
984
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
985
+ you can use `@pypi_base` to set packages required by all
986
+ steps and use `@pypi` to specify step-specific overrides.
1051
987
 
1052
988
  Parameters
1053
989
  ----------
1054
- packages : Dict[str, str], default {}
1055
- Packages to use for this flow. The key is the name of the package
990
+ packages : Dict[str, str], default: {}
991
+ Packages to use for this step. The key is the name of the package
1056
992
  and the value is the version to use.
1057
- libraries : Dict[str, str], default {}
1058
- Supported for backward compatibility. When used with packages, packages will take precedence.
1059
- python : str, optional, default None
993
+ python : str, optional, default: None
1060
994
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1061
995
  that the version used will correspond to the version of the Python interpreter used to start the run.
1062
- disabled : bool, default False
1063
- If set to True, disables Conda.
1064
996
  """
1065
997
  ...
1066
998
 
1067
- @typing.overload
1068
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1069
- ...
1070
-
1071
- def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
999
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1072
1000
  """
1073
- Specifies the Conda environment for all steps of the flow.
1074
-
1075
- Use `@conda_base` to set common libraries required by all
1076
- steps and use `@conda` to specify step-specific additions.
1001
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1002
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1003
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1004
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
1005
+ starts only after all sensors finish.
1077
1006
 
1078
1007
  Parameters
1079
1008
  ----------
1080
- packages : Dict[str, str], default {}
1081
- Packages to use for this flow. The key is the name of the package
1082
- and the value is the version to use.
1083
- libraries : Dict[str, str], default {}
1084
- Supported for backward compatibility. When used with packages, packages will take precedence.
1085
- python : str, optional, default None
1086
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1087
- that the version used will correspond to the version of the Python interpreter used to start the run.
1088
- disabled : bool, default False
1089
- If set to True, disables Conda.
1009
+ timeout : int
1010
+ Time, in seconds before the task times out and fails. (Default: 3600)
1011
+ poke_interval : int
1012
+ Time in seconds that the job should wait in between each try. (Default: 60)
1013
+ mode : str
1014
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1015
+ exponential_backoff : bool
1016
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1017
+ pool : str
1018
+ the slot pool this task should run in,
1019
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1020
+ soft_fail : bool
1021
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1022
+ name : str
1023
+ Name of the sensor on Airflow
1024
+ description : str
1025
+ Description of sensor in the Airflow UI
1026
+ bucket_key : Union[str, List[str]]
1027
+ The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1028
+ When it's specified as a full s3:// url, please leave `bucket_name` as None
1029
+ bucket_name : str
1030
+ Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1031
+ When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1032
+ wildcard_match : bool
1033
+ whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1034
+ aws_conn_id : str
1035
+ a reference to the s3 connection on Airflow. (Default: None)
1036
+ verify : bool
1037
+ Whether or not to verify SSL certificates for S3 connection. (Default: None)
1090
1038
  """
1091
1039
  ...
1092
1040
 
@@ -1108,8 +1056,39 @@ def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typ
1108
1056
  """
1109
1057
  ...
1110
1058
 
1059
+ def nim(*, models: "list[NIM]", backend: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1060
+ """
1061
+ This decorator is used to run NIM containers in Metaflow tasks as sidecars.
1062
+
1063
+ User code call
1064
+ -----------
1065
+ @nim(
1066
+ models=['meta/llama3-8b-instruct', 'meta/llama3-70b-instruct'],
1067
+ backend='managed'
1068
+ )
1069
+
1070
+ Valid backend options
1071
+ ---------------------
1072
+ - 'managed': Outerbounds selects a compute provider based on the model.
1073
+ - 🚧 'dataplane': Run in your account.
1074
+
1075
+ Valid model options
1076
+ ----------------
1077
+ - 'meta/llama3-8b-instruct': 8B parameter model
1078
+ - 'meta/llama3-70b-instruct': 70B parameter model
1079
+ - Upon request, any model here: https://nvcf.ngc.nvidia.com/functions?filter=nvidia-functions
1080
+
1081
+ Parameters
1082
+ ----------
1083
+ models: list[NIM]
1084
+ List of NIM containers running models in sidecars.
1085
+ backend: str
1086
+ Compute provider to run the NIM container.
1087
+ """
1088
+ ...
1089
+
1111
1090
  @typing.overload
1112
- def trigger_on_finish(*, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1091
+ def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1113
1092
  """
1114
1093
  Specifies the flow(s) that this flow depends on.
1115
1094
 
@@ -1162,7 +1141,7 @@ def trigger_on_finish(*, flow: typing.Union[str, typing.Dict[str, str], None] =
1162
1141
  def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1163
1142
  ...
1164
1143
 
1165
- def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1144
+ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1166
1145
  """
1167
1146
  Specifies the flow(s) that this flow depends on.
1168
1147
 
@@ -1211,48 +1190,6 @@ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *
1211
1190
  """
1212
1191
  ...
1213
1192
 
1214
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1215
- """
1216
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1217
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1218
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1219
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
1220
- starts only after all sensors finish.
1221
-
1222
- Parameters
1223
- ----------
1224
- timeout : int
1225
- Time, in seconds before the task times out and fails. (Default: 3600)
1226
- poke_interval : int
1227
- Time in seconds that the job should wait in between each try. (Default: 60)
1228
- mode : str
1229
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1230
- exponential_backoff : bool
1231
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1232
- pool : str
1233
- the slot pool this task should run in,
1234
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1235
- soft_fail : bool
1236
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1237
- name : str
1238
- Name of the sensor on Airflow
1239
- description : str
1240
- Description of sensor in the Airflow UI
1241
- bucket_key : Union[str, List[str]]
1242
- The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1243
- When it's specified as a full s3:// url, please leave `bucket_name` as None
1244
- bucket_name : str
1245
- Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1246
- When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1247
- wildcard_match : bool
1248
- whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1249
- aws_conn_id : str
1250
- a reference to the s3 connection on Airflow. (Default: None)
1251
- verify : bool
1252
- Whether or not to verify SSL certificates for S3 connection. (Default: None)
1253
- """
1254
- ...
1255
-
1256
1193
  @typing.overload
1257
1194
  def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1258
1195
  """
@@ -1292,37 +1229,6 @@ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packag
1292
1229
  """
1293
1230
  ...
1294
1231
 
1295
- def nim(*, models: "list[NIM]", backend: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1296
- """
1297
- This decorator is used to run NIM containers in Metaflow tasks as sidecars.
1298
-
1299
- User code call
1300
- -----------
1301
- @nim(
1302
- models=['meta/llama3-8b-instruct', 'meta/llama3-70b-instruct'],
1303
- backend='managed'
1304
- )
1305
-
1306
- Valid backend options
1307
- ---------------------
1308
- - 'managed': Outerbounds selects a compute provider based on the model.
1309
- - 🚧 'dataplane': Run in your account.
1310
-
1311
- Valid model options
1312
- ----------------
1313
- - 'meta/llama3-8b-instruct': 8B parameter model
1314
- - 'meta/llama3-70b-instruct': 70B parameter model
1315
- - Upon request, any model here: https://nvcf.ngc.nvidia.com/functions?filter=nvidia-functions
1316
-
1317
- Parameters
1318
- ----------
1319
- models: list[NIM]
1320
- List of NIM containers running models in sidecars.
1321
- backend: str
1322
- Compute provider to run the NIM container.
1323
- """
1324
- ...
1325
-
1326
1232
  def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1327
1233
  """
1328
1234
  The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
@@ -1365,6 +1271,55 @@ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str,
1365
1271
  """
1366
1272
  ...
1367
1273
 
1274
+ @typing.overload
1275
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1276
+ """
1277
+ Specifies the Conda environment for all steps of the flow.
1278
+
1279
+ Use `@conda_base` to set common libraries required by all
1280
+ steps and use `@conda` to specify step-specific additions.
1281
+
1282
+ Parameters
1283
+ ----------
1284
+ packages : Dict[str, str], default {}
1285
+ Packages to use for this flow. The key is the name of the package
1286
+ and the value is the version to use.
1287
+ libraries : Dict[str, str], default {}
1288
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1289
+ python : str, optional, default None
1290
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1291
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1292
+ disabled : bool, default False
1293
+ If set to True, disables Conda.
1294
+ """
1295
+ ...
1296
+
1297
+ @typing.overload
1298
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1299
+ ...
1300
+
1301
+ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1302
+ """
1303
+ Specifies the Conda environment for all steps of the flow.
1304
+
1305
+ Use `@conda_base` to set common libraries required by all
1306
+ steps and use `@conda` to specify step-specific additions.
1307
+
1308
+ Parameters
1309
+ ----------
1310
+ packages : Dict[str, str], default {}
1311
+ Packages to use for this flow. The key is the name of the package
1312
+ and the value is the version to use.
1313
+ libraries : Dict[str, str], default {}
1314
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1315
+ python : str, optional, default None
1316
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1317
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1318
+ disabled : bool, default False
1319
+ If set to True, disables Conda.
1320
+ """
1321
+ ...
1322
+
1368
1323
  @typing.overload
1369
1324
  def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1370
1325
  """
@@ -1460,6 +1415,55 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
1460
1415
  """
1461
1416
  ...
1462
1417
 
1418
+ @typing.overload
1419
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1420
+ """
1421
+ Specifies the times when the flow should be run when running on a
1422
+ production scheduler.
1423
+
1424
+ Parameters
1425
+ ----------
1426
+ hourly : bool, default False
1427
+ Run the workflow hourly.
1428
+ daily : bool, default True
1429
+ Run the workflow daily.
1430
+ weekly : bool, default False
1431
+ Run the workflow weekly.
1432
+ cron : str, optional, default None
1433
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1434
+ specified by this expression.
1435
+ timezone : str, optional, default None
1436
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1437
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1438
+ """
1439
+ ...
1440
+
1441
+ @typing.overload
1442
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1443
+ ...
1444
+
1445
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1446
+ """
1447
+ Specifies the times when the flow should be run when running on a
1448
+ production scheduler.
1449
+
1450
+ Parameters
1451
+ ----------
1452
+ hourly : bool, default False
1453
+ Run the workflow hourly.
1454
+ daily : bool, default True
1455
+ Run the workflow daily.
1456
+ weekly : bool, default False
1457
+ Run the workflow weekly.
1458
+ cron : str, optional, default None
1459
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1460
+ specified by this expression.
1461
+ timezone : str, optional, default None
1462
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1463
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1464
+ """
1465
+ ...
1466
+
1463
1467
  def namespace(ns: typing.Optional[str]) -> typing.Optional[str]:
1464
1468
  """
1465
1469
  Switch namespace to the one provided.