ob-metaflow-stubs 4.3__py2.py3-none-any.whl → 4.5__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (145) hide show
  1. metaflow-stubs/__init__.pyi +635 -604
  2. metaflow-stubs/cards.pyi +5 -5
  3. metaflow-stubs/cli.pyi +1 -1
  4. metaflow-stubs/client/__init__.pyi +3 -3
  5. metaflow-stubs/client/core.pyi +6 -6
  6. metaflow-stubs/client/filecache.pyi +2 -2
  7. metaflow-stubs/clone_util.pyi +1 -1
  8. metaflow-stubs/events.pyi +1 -1
  9. metaflow-stubs/exception.pyi +1 -1
  10. metaflow-stubs/flowspec.pyi +4 -4
  11. metaflow-stubs/generated_for.txt +1 -1
  12. metaflow-stubs/includefile.pyi +4 -4
  13. metaflow-stubs/metadata/metadata.pyi +2 -2
  14. metaflow-stubs/metadata/util.pyi +1 -1
  15. metaflow-stubs/metaflow_config.pyi +1 -1
  16. metaflow-stubs/metaflow_current.pyi +16 -16
  17. metaflow-stubs/mflog/mflog.pyi +1 -1
  18. metaflow-stubs/multicore_utils.pyi +1 -1
  19. metaflow-stubs/parameters.pyi +3 -3
  20. metaflow-stubs/plugins/__init__.pyi +1 -1
  21. metaflow-stubs/plugins/airflow/__init__.pyi +1 -1
  22. metaflow-stubs/plugins/airflow/airflow.pyi +1 -1
  23. metaflow-stubs/plugins/airflow/airflow_cli.pyi +2 -2
  24. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +1 -1
  25. metaflow-stubs/plugins/airflow/airflow_utils.pyi +1 -1
  26. metaflow-stubs/plugins/airflow/exception.pyi +1 -1
  27. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +1 -1
  28. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +1 -1
  29. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
  30. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
  31. metaflow-stubs/plugins/argo/__init__.pyi +1 -1
  32. metaflow-stubs/plugins/argo/argo_client.pyi +1 -1
  33. metaflow-stubs/plugins/argo/argo_events.pyi +1 -1
  34. metaflow-stubs/plugins/argo/argo_workflows.pyi +2 -2
  35. metaflow-stubs/plugins/argo/argo_workflows_cli.pyi +3 -3
  36. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +2 -2
  37. metaflow-stubs/plugins/aws/__init__.pyi +1 -1
  38. metaflow-stubs/plugins/aws/aws_client.pyi +1 -1
  39. metaflow-stubs/plugins/aws/aws_utils.pyi +1 -1
  40. metaflow-stubs/plugins/aws/batch/__init__.pyi +1 -1
  41. metaflow-stubs/plugins/aws/batch/batch.pyi +1 -1
  42. metaflow-stubs/plugins/aws/batch/batch_cli.pyi +1 -1
  43. metaflow-stubs/plugins/aws/batch/batch_client.pyi +1 -1
  44. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
  45. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +1 -1
  46. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +2 -2
  47. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +1 -1
  48. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +1 -1
  49. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +1 -1
  50. metaflow-stubs/plugins/aws/step_functions/production_token.pyi +1 -1
  51. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +1 -1
  52. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +1 -1
  53. metaflow-stubs/plugins/aws/step_functions/step_functions_cli.pyi +3 -3
  54. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +1 -1
  55. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +1 -1
  56. metaflow-stubs/plugins/azure/__init__.pyi +1 -1
  57. metaflow-stubs/plugins/azure/azure_credential.pyi +1 -1
  58. metaflow-stubs/plugins/azure/azure_exceptions.pyi +1 -1
  59. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +2 -2
  60. metaflow-stubs/plugins/azure/azure_utils.pyi +1 -1
  61. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +1 -1
  62. metaflow-stubs/plugins/azure/includefile_support.pyi +1 -1
  63. metaflow-stubs/plugins/cards/__init__.pyi +1 -1
  64. metaflow-stubs/plugins/cards/card_cli.pyi +3 -3
  65. metaflow-stubs/plugins/cards/card_client.pyi +2 -2
  66. metaflow-stubs/plugins/cards/card_creator.pyi +1 -1
  67. metaflow-stubs/plugins/cards/card_datastore.pyi +1 -1
  68. metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
  69. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +1 -1
  70. metaflow-stubs/plugins/cards/card_modules/basic.pyi +1 -1
  71. metaflow-stubs/plugins/cards/card_modules/card.pyi +1 -1
  72. metaflow-stubs/plugins/cards/card_modules/chevron/__init__.pyi +1 -1
  73. metaflow-stubs/plugins/cards/card_modules/chevron/main.pyi +1 -1
  74. metaflow-stubs/plugins/cards/card_modules/chevron/metadata.pyi +1 -1
  75. metaflow-stubs/plugins/cards/card_modules/chevron/renderer.pyi +1 -1
  76. metaflow-stubs/plugins/cards/card_modules/chevron/tokenizer.pyi +1 -1
  77. metaflow-stubs/plugins/cards/card_modules/components.pyi +2 -2
  78. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +1 -1
  79. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +1 -1
  80. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +1 -1
  81. metaflow-stubs/plugins/cards/card_resolver.pyi +1 -1
  82. metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
  83. metaflow-stubs/plugins/cards/exception.pyi +1 -1
  84. metaflow-stubs/plugins/catch_decorator.pyi +2 -2
  85. metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
  86. metaflow-stubs/plugins/datatools/local.pyi +1 -1
  87. metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
  88. metaflow-stubs/plugins/datatools/s3/s3.pyi +6 -6
  89. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +1 -1
  90. metaflow-stubs/plugins/datatools/s3/s3util.pyi +1 -1
  91. metaflow-stubs/plugins/debug_logger.pyi +1 -1
  92. metaflow-stubs/plugins/debug_monitor.pyi +1 -1
  93. metaflow-stubs/plugins/environment_decorator.pyi +1 -1
  94. metaflow-stubs/plugins/events_decorator.pyi +2 -2
  95. metaflow-stubs/plugins/frameworks/__init__.pyi +1 -1
  96. metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
  97. metaflow-stubs/plugins/gcp/__init__.pyi +1 -1
  98. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +2 -2
  99. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +1 -1
  100. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +1 -1
  101. metaflow-stubs/plugins/gcp/gs_utils.pyi +1 -1
  102. metaflow-stubs/plugins/gcp/includefile_support.pyi +1 -1
  103. metaflow-stubs/plugins/kubernetes/__init__.pyi +1 -1
  104. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +1 -1
  105. metaflow-stubs/plugins/kubernetes/kubernetes_cli.pyi +2 -2
  106. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +1 -1
  107. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
  108. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +1 -1
  109. metaflow-stubs/plugins/logs_cli.pyi +2 -2
  110. metaflow-stubs/plugins/package_cli.pyi +1 -1
  111. metaflow-stubs/plugins/parallel_decorator.pyi +1 -1
  112. metaflow-stubs/plugins/perimeters.pyi +1 -1
  113. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  114. metaflow-stubs/plugins/pypi/__init__.pyi +1 -1
  115. metaflow-stubs/plugins/pypi/conda_decorator.pyi +1 -1
  116. metaflow-stubs/plugins/pypi/conda_environment.pyi +3 -3
  117. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +1 -1
  118. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  119. metaflow-stubs/plugins/pypi/utils.pyi +1 -1
  120. metaflow-stubs/plugins/resources_decorator.pyi +1 -1
  121. metaflow-stubs/plugins/retry_decorator.pyi +1 -1
  122. metaflow-stubs/plugins/secrets/__init__.pyi +1 -1
  123. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +2 -2
  124. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +1 -1
  125. metaflow-stubs/plugins/storage_executor.pyi +1 -1
  126. metaflow-stubs/plugins/tag_cli.pyi +4 -4
  127. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +2 -2
  128. metaflow-stubs/plugins/timeout_decorator.pyi +1 -1
  129. metaflow-stubs/procpoll.pyi +1 -1
  130. metaflow-stubs/profilers/__init__.pyi +1 -1
  131. metaflow-stubs/pylint_wrapper.pyi +1 -1
  132. metaflow-stubs/runner/__init__.pyi +1 -1
  133. metaflow-stubs/runner/metaflow_runner.pyi +3 -3
  134. metaflow-stubs/runner/nbrun.pyi +1 -1
  135. metaflow-stubs/runner/subprocess_manager.pyi +1 -1
  136. metaflow-stubs/system/__init__.pyi +3 -3
  137. metaflow-stubs/system/system_logger.pyi +2 -2
  138. metaflow-stubs/system/system_monitor.pyi +2 -2
  139. metaflow-stubs/tagging_util.pyi +1 -1
  140. metaflow-stubs/tuple_util.pyi +1 -1
  141. {ob_metaflow_stubs-4.3.dist-info → ob_metaflow_stubs-4.5.dist-info}/METADATA +1 -1
  142. ob_metaflow_stubs-4.5.dist-info/RECORD +145 -0
  143. ob_metaflow_stubs-4.3.dist-info/RECORD +0 -145
  144. {ob_metaflow_stubs-4.3.dist-info → ob_metaflow_stubs-4.5.dist-info}/WHEEL +0 -0
  145. {ob_metaflow_stubs-4.3.dist-info → ob_metaflow_stubs-4.5.dist-info}/top_level.txt +0 -0
@@ -1,25 +1,25 @@
1
1
  ##################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
3
  # MF version: 2.12.7.1+ob(v1) #
4
- # Generated on 2024-07-09T16:19:31.537205 #
4
+ # Generated on 2024-07-12T00:19:39.735632 #
5
5
  ##################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import metaflow.client.core
12
- import metaflow.metaflow_current
13
- import io
14
- import metaflow.plugins.datatools.s3.s3
15
- import datetime
16
- import metaflow.datastore.inputs
17
11
  import metaflow.flowspec
18
- import metaflow.parameters
19
- import metaflow._vendor.click.types
20
- import metaflow.events
12
+ import metaflow.plugins.datatools.s3.s3
21
13
  import typing
14
+ import metaflow.events
15
+ import metaflow.metaflow_current
16
+ import metaflow.client.core
22
17
  import metaflow.runner.metaflow_runner
18
+ import metaflow._vendor.click.types
19
+ import metaflow.datastore.inputs
20
+ import datetime
21
+ import io
22
+ import metaflow.parameters
23
23
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
24
24
  StepFlag = typing.NewType("StepFlag", bool)
25
25
 
@@ -728,308 +728,115 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
728
728
  ...
729
729
 
730
730
  @typing.overload
731
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
731
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
732
732
  """
733
- Specifies the Conda environment for the step.
733
+ Specifies that the step will success under all circumstances.
734
734
 
735
- Information in this decorator will augment any
736
- attributes set in the `@conda_base` flow-level decorator. Hence,
737
- you can use `@conda_base` to set packages required by all
738
- steps and use `@conda` to specify step-specific overrides.
735
+ The decorator will create an optional artifact, specified by `var`, which
736
+ contains the exception raised. You can use it to detect the presence
737
+ of errors, indicating that all happy-path artifacts produced by the step
738
+ are missing.
739
739
 
740
740
  Parameters
741
741
  ----------
742
- packages : Dict[str, str], default {}
743
- Packages to use for this step. The key is the name of the package
744
- and the value is the version to use.
745
- libraries : Dict[str, str], default {}
746
- Supported for backward compatibility. When used with packages, packages will take precedence.
747
- python : str, optional, default None
748
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
749
- that the version used will correspond to the version of the Python interpreter used to start the run.
750
- disabled : bool, default False
751
- If set to True, disables @conda.
742
+ var : str, optional, default None
743
+ Name of the artifact in which to store the caught exception.
744
+ If not specified, the exception is not stored.
745
+ print_exception : bool, default True
746
+ Determines whether or not the exception is printed to
747
+ stdout when caught.
752
748
  """
753
749
  ...
754
750
 
755
751
  @typing.overload
756
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
752
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
757
753
  ...
758
754
 
759
755
  @typing.overload
760
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
756
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
761
757
  ...
762
758
 
763
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
759
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
764
760
  """
765
- Specifies the Conda environment for the step.
761
+ Specifies that the step will success under all circumstances.
766
762
 
767
- Information in this decorator will augment any
768
- attributes set in the `@conda_base` flow-level decorator. Hence,
769
- you can use `@conda_base` to set packages required by all
770
- steps and use `@conda` to specify step-specific overrides.
763
+ The decorator will create an optional artifact, specified by `var`, which
764
+ contains the exception raised. You can use it to detect the presence
765
+ of errors, indicating that all happy-path artifacts produced by the step
766
+ are missing.
771
767
 
772
768
  Parameters
773
769
  ----------
774
- packages : Dict[str, str], default {}
775
- Packages to use for this step. The key is the name of the package
776
- and the value is the version to use.
777
- libraries : Dict[str, str], default {}
778
- Supported for backward compatibility. When used with packages, packages will take precedence.
779
- python : str, optional, default None
780
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
781
- that the version used will correspond to the version of the Python interpreter used to start the run.
782
- disabled : bool, default False
783
- If set to True, disables @conda.
770
+ var : str, optional, default None
771
+ Name of the artifact in which to store the caught exception.
772
+ If not specified, the exception is not stored.
773
+ print_exception : bool, default True
774
+ Determines whether or not the exception is printed to
775
+ stdout when caught.
784
776
  """
785
777
  ...
786
778
 
787
779
  @typing.overload
788
- def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
780
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
789
781
  """
790
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
782
+ Specifies environment variables to be set prior to the execution of a step.
791
783
 
792
784
  Parameters
793
785
  ----------
794
- cpu : int, default 1
795
- Number of CPUs required for this step. If `@resources` is
796
- also present, the maximum value from all decorators is used.
797
- gpu : int, default 0
798
- Number of GPUs required for this step. If `@resources` is
799
- also present, the maximum value from all decorators is used.
800
- memory : int, default 4096
801
- Memory size (in MB) required for this step. If
802
- `@resources` is also present, the maximum value from all decorators is
803
- used.
804
- image : str, optional, default None
805
- Docker image to use when launching on AWS Batch. If not specified, and
806
- METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
807
- not, a default Docker image mapping to the current version of Python is used.
808
- queue : str, default METAFLOW_BATCH_JOB_QUEUE
809
- AWS Batch Job Queue to submit the job to.
810
- iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
811
- AWS IAM role that AWS Batch container uses to access AWS cloud resources.
812
- execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
813
- AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
814
- (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
815
- shared_memory : int, optional, default None
816
- The value for the size (in MiB) of the /dev/shm volume for this step.
817
- This parameter maps to the `--shm-size` option in Docker.
818
- max_swap : int, optional, default None
819
- The total amount of swap memory (in MiB) a container can use for this
820
- step. This parameter is translated to the `--memory-swap` option in
821
- Docker where the value is the sum of the container memory plus the
822
- `max_swap` value.
823
- swappiness : int, optional, default None
824
- This allows you to tune memory swappiness behavior for this step.
825
- A swappiness value of 0 causes swapping not to happen unless absolutely
826
- necessary. A swappiness value of 100 causes pages to be swapped very
827
- aggressively. Accepted values are whole numbers between 0 and 100.
828
- use_tmpfs : bool, default False
829
- This enables an explicit tmpfs mount for this step. Note that tmpfs is
830
- not available on Fargate compute environments
831
- tmpfs_tempdir : bool, default True
832
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
833
- tmpfs_size : int, optional, default None
834
- The value for the size (in MiB) of the tmpfs mount for this step.
835
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
836
- memory allocated for this step.
837
- tmpfs_path : str, optional, default None
838
- Path to tmpfs mount for this step. Defaults to /metaflow_temp.
839
- inferentia : int, default 0
840
- Number of Inferentia chips required for this step.
841
- trainium : int, default None
842
- Alias for inferentia. Use only one of the two.
843
- efa : int, default 0
844
- Number of elastic fabric adapter network devices to attach to container
845
- ephemeral_storage : int, default None
846
- The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
847
- This is only relevant for Fargate compute environments
848
- log_driver: str, optional, default None
849
- The log driver to use for the Amazon ECS container.
850
- log_options: List[str], optional, default None
851
- List of strings containing options for the chosen log driver. The configurable values
852
- depend on the `log driver` chosen. Validation of these options is not supported yet.
853
- Example: [`awslogs-group:aws/batch/job`]
786
+ vars : Dict[str, str], default {}
787
+ Dictionary of environment variables to set.
854
788
  """
855
789
  ...
856
790
 
857
791
  @typing.overload
858
- def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
792
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
859
793
  ...
860
794
 
861
795
  @typing.overload
862
- def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
796
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
863
797
  ...
864
798
 
865
- def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
799
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
866
800
  """
867
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
801
+ Specifies environment variables to be set prior to the execution of a step.
868
802
 
869
803
  Parameters
870
804
  ----------
871
- cpu : int, default 1
872
- Number of CPUs required for this step. If `@resources` is
873
- also present, the maximum value from all decorators is used.
874
- gpu : int, default 0
875
- Number of GPUs required for this step. If `@resources` is
876
- also present, the maximum value from all decorators is used.
877
- memory : int, default 4096
878
- Memory size (in MB) required for this step. If
879
- `@resources` is also present, the maximum value from all decorators is
880
- used.
881
- image : str, optional, default None
882
- Docker image to use when launching on AWS Batch. If not specified, and
883
- METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
884
- not, a default Docker image mapping to the current version of Python is used.
885
- queue : str, default METAFLOW_BATCH_JOB_QUEUE
886
- AWS Batch Job Queue to submit the job to.
887
- iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
888
- AWS IAM role that AWS Batch container uses to access AWS cloud resources.
889
- execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
890
- AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
891
- (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
892
- shared_memory : int, optional, default None
893
- The value for the size (in MiB) of the /dev/shm volume for this step.
894
- This parameter maps to the `--shm-size` option in Docker.
895
- max_swap : int, optional, default None
896
- The total amount of swap memory (in MiB) a container can use for this
897
- step. This parameter is translated to the `--memory-swap` option in
898
- Docker where the value is the sum of the container memory plus the
899
- `max_swap` value.
900
- swappiness : int, optional, default None
901
- This allows you to tune memory swappiness behavior for this step.
902
- A swappiness value of 0 causes swapping not to happen unless absolutely
903
- necessary. A swappiness value of 100 causes pages to be swapped very
904
- aggressively. Accepted values are whole numbers between 0 and 100.
905
- use_tmpfs : bool, default False
906
- This enables an explicit tmpfs mount for this step. Note that tmpfs is
907
- not available on Fargate compute environments
908
- tmpfs_tempdir : bool, default True
909
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
910
- tmpfs_size : int, optional, default None
911
- The value for the size (in MiB) of the tmpfs mount for this step.
912
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
913
- memory allocated for this step.
914
- tmpfs_path : str, optional, default None
915
- Path to tmpfs mount for this step. Defaults to /metaflow_temp.
916
- inferentia : int, default 0
917
- Number of Inferentia chips required for this step.
918
- trainium : int, default None
919
- Alias for inferentia. Use only one of the two.
920
- efa : int, default 0
921
- Number of elastic fabric adapter network devices to attach to container
922
- ephemeral_storage : int, default None
923
- The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
924
- This is only relevant for Fargate compute environments
925
- log_driver: str, optional, default None
926
- The log driver to use for the Amazon ECS container.
927
- log_options: List[str], optional, default None
928
- List of strings containing options for the chosen log driver. The configurable values
929
- depend on the `log driver` chosen. Validation of these options is not supported yet.
930
- Example: [`awslogs-group:aws/batch/job`]
805
+ vars : Dict[str, str], default {}
806
+ Dictionary of environment variables to set.
931
807
  """
932
808
  ...
933
809
 
934
810
  @typing.overload
935
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
811
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
936
812
  """
937
- Specifies the number of times the task corresponding
938
- to a step needs to be retried.
939
-
940
- This decorator is useful for handling transient errors, such as networking issues.
941
- If your task contains operations that can't be retried safely, e.g. database updates,
942
- it is advisable to annotate it with `@retry(times=0)`.
943
-
944
- This can be used in conjunction with the `@catch` decorator. The `@catch`
945
- decorator will execute a no-op task after all retries have been exhausted,
946
- ensuring that the flow execution can continue.
813
+ Specifies secrets to be retrieved and injected as environment variables prior to
814
+ the execution of a step.
947
815
 
948
816
  Parameters
949
817
  ----------
950
- times : int, default 3
951
- Number of times to retry this task.
952
- minutes_between_retries : int, default 2
953
- Number of minutes between retries.
818
+ sources : List[Union[str, Dict[str, Any]]], default: []
819
+ List of secret specs, defining how the secrets are to be retrieved
954
820
  """
955
821
  ...
956
822
 
957
823
  @typing.overload
958
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
824
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
959
825
  ...
960
826
 
961
827
  @typing.overload
962
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
828
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
963
829
  ...
964
830
 
965
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
831
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
966
832
  """
967
- Specifies the number of times the task corresponding
968
- to a step needs to be retried.
969
-
970
- This decorator is useful for handling transient errors, such as networking issues.
971
- If your task contains operations that can't be retried safely, e.g. database updates,
972
- it is advisable to annotate it with `@retry(times=0)`.
973
-
974
- This can be used in conjunction with the `@catch` decorator. The `@catch`
975
- decorator will execute a no-op task after all retries have been exhausted,
976
- ensuring that the flow execution can continue.
833
+ Specifies secrets to be retrieved and injected as environment variables prior to
834
+ the execution of a step.
977
835
 
978
836
  Parameters
979
837
  ----------
980
- times : int, default 3
981
- Number of times to retry this task.
982
- minutes_between_retries : int, default 2
983
- Number of minutes between retries.
984
- """
985
- ...
986
-
987
- @typing.overload
988
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
989
- """
990
- Specifies the PyPI packages for the step.
991
-
992
- Information in this decorator will augment any
993
- attributes set in the `@pyi_base` flow-level decorator. Hence,
994
- you can use `@pypi_base` to set packages required by all
995
- steps and use `@pypi` to specify step-specific overrides.
996
-
997
- Parameters
998
- ----------
999
- packages : Dict[str, str], default: {}
1000
- Packages to use for this step. The key is the name of the package
1001
- and the value is the version to use.
1002
- python : str, optional, default: None
1003
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1004
- that the version used will correspond to the version of the Python interpreter used to start the run.
1005
- """
1006
- ...
1007
-
1008
- @typing.overload
1009
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1010
- ...
1011
-
1012
- @typing.overload
1013
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1014
- ...
1015
-
1016
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1017
- """
1018
- Specifies the PyPI packages for the step.
1019
-
1020
- Information in this decorator will augment any
1021
- attributes set in the `@pyi_base` flow-level decorator. Hence,
1022
- you can use `@pypi_base` to set packages required by all
1023
- steps and use `@pypi` to specify step-specific overrides.
1024
-
1025
- Parameters
1026
- ----------
1027
- packages : Dict[str, str], default: {}
1028
- Packages to use for this step. The key is the name of the package
1029
- and the value is the version to use.
1030
- python : str, optional, default: None
1031
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1032
- that the version used will correspond to the version of the Python interpreter used to start the run.
838
+ sources : List[Union[str, Dict[str, Any]]], default: []
839
+ List of secret specs, defining how the secrets are to be retrieved
1033
840
  """
1034
841
  ...
1035
842
 
@@ -1110,94 +917,6 @@ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None]
1110
917
  """
1111
918
  ...
1112
919
 
1113
- @typing.overload
1114
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1115
- """
1116
- Specifies environment variables to be set prior to the execution of a step.
1117
-
1118
- Parameters
1119
- ----------
1120
- vars : Dict[str, str], default {}
1121
- Dictionary of environment variables to set.
1122
- """
1123
- ...
1124
-
1125
- @typing.overload
1126
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1127
- ...
1128
-
1129
- @typing.overload
1130
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1131
- ...
1132
-
1133
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
1134
- """
1135
- Specifies environment variables to be set prior to the execution of a step.
1136
-
1137
- Parameters
1138
- ----------
1139
- vars : Dict[str, str], default {}
1140
- Dictionary of environment variables to set.
1141
- """
1142
- ...
1143
-
1144
- @typing.overload
1145
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1146
- """
1147
- Specifies a timeout for your step.
1148
-
1149
- This decorator is useful if this step may hang indefinitely.
1150
-
1151
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1152
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
1153
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
1154
-
1155
- Note that all the values specified in parameters are added together so if you specify
1156
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1157
-
1158
- Parameters
1159
- ----------
1160
- seconds : int, default 0
1161
- Number of seconds to wait prior to timing out.
1162
- minutes : int, default 0
1163
- Number of minutes to wait prior to timing out.
1164
- hours : int, default 0
1165
- Number of hours to wait prior to timing out.
1166
- """
1167
- ...
1168
-
1169
- @typing.overload
1170
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1171
- ...
1172
-
1173
- @typing.overload
1174
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1175
- ...
1176
-
1177
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
1178
- """
1179
- Specifies a timeout for your step.
1180
-
1181
- This decorator is useful if this step may hang indefinitely.
1182
-
1183
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1184
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
1185
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
1186
-
1187
- Note that all the values specified in parameters are added together so if you specify
1188
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1189
-
1190
- Parameters
1191
- ----------
1192
- seconds : int, default 0
1193
- Number of seconds to wait prior to timing out.
1194
- minutes : int, default 0
1195
- Number of minutes to wait prior to timing out.
1196
- hours : int, default 0
1197
- Number of hours to wait prior to timing out.
1198
- """
1199
- ...
1200
-
1201
920
  @typing.overload
1202
921
  def card(*, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1203
922
  """
@@ -1250,158 +969,384 @@ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
1250
969
  ...
1251
970
 
1252
971
  @typing.overload
1253
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
972
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1254
973
  """
1255
- Specifies secrets to be retrieved and injected as environment variables prior to
1256
- the execution of a step.
974
+ Specifies the PyPI packages for the step.
975
+
976
+ Information in this decorator will augment any
977
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
978
+ you can use `@pypi_base` to set packages required by all
979
+ steps and use `@pypi` to specify step-specific overrides.
1257
980
 
1258
981
  Parameters
1259
982
  ----------
1260
- sources : List[Union[str, Dict[str, Any]]], default: []
1261
- List of secret specs, defining how the secrets are to be retrieved
983
+ packages : Dict[str, str], default: {}
984
+ Packages to use for this step. The key is the name of the package
985
+ and the value is the version to use.
986
+ python : str, optional, default: None
987
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
988
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1262
989
  """
1263
990
  ...
1264
991
 
1265
992
  @typing.overload
1266
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
993
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1267
994
  ...
1268
995
 
1269
996
  @typing.overload
1270
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
997
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1271
998
  ...
1272
999
 
1273
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
1000
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1274
1001
  """
1275
- Specifies secrets to be retrieved and injected as environment variables prior to
1276
- the execution of a step.
1002
+ Specifies the PyPI packages for the step.
1003
+
1004
+ Information in this decorator will augment any
1005
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
1006
+ you can use `@pypi_base` to set packages required by all
1007
+ steps and use `@pypi` to specify step-specific overrides.
1277
1008
 
1278
1009
  Parameters
1279
1010
  ----------
1280
- sources : List[Union[str, Dict[str, Any]]], default: []
1281
- List of secret specs, defining how the secrets are to be retrieved
1011
+ packages : Dict[str, str], default: {}
1012
+ Packages to use for this step. The key is the name of the package
1013
+ and the value is the version to use.
1014
+ python : str, optional, default: None
1015
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1016
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1282
1017
  """
1283
1018
  ...
1284
1019
 
1285
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = "KUBERNETES_IMAGE_PULL_POLICY", service_account: str = "METAFLOW_KUBERNETES_SERVICE_ACCOUNT", secrets: typing.Optional[typing.List[str]] = None, namespace: str = "METAFLOW_KUBERNETES_NAMESPACE", gpu: typing.Optional[int] = None, gpu_vendor: str = "KUBERNETES_GPU_VENDOR", tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = "/metaflow_temp", persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1020
+ @typing.overload
1021
+ def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1286
1022
  """
1287
- Specifies that this step should execute on Kubernetes.
1023
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
1288
1024
 
1289
1025
  Parameters
1290
1026
  ----------
1291
1027
  cpu : int, default 1
1292
1028
  Number of CPUs required for this step. If `@resources` is
1293
1029
  also present, the maximum value from all decorators is used.
1030
+ gpu : int, default 0
1031
+ Number of GPUs required for this step. If `@resources` is
1032
+ also present, the maximum value from all decorators is used.
1294
1033
  memory : int, default 4096
1295
1034
  Memory size (in MB) required for this step. If
1296
1035
  `@resources` is also present, the maximum value from all decorators is
1297
1036
  used.
1298
- disk : int, default 10240
1299
- Disk size (in MB) required for this step. If
1300
- `@resources` is also present, the maximum value from all decorators is
1301
- used.
1302
1037
  image : str, optional, default None
1303
- Docker image to use when launching on Kubernetes. If not specified, and
1304
- METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
1038
+ Docker image to use when launching on AWS Batch. If not specified, and
1039
+ METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
1305
1040
  not, a default Docker image mapping to the current version of Python is used.
1306
- image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
1307
- If given, the imagePullPolicy to be applied to the Docker image of the step.
1308
- service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
1309
- Kubernetes service account to use when launching pod in Kubernetes.
1310
- secrets : List[str], optional, default None
1311
- Kubernetes secrets to use when launching pod in Kubernetes. These
1312
- secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
1313
- in Metaflow configuration.
1314
- namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
1315
- Kubernetes namespace to use when launching pod in Kubernetes.
1316
- gpu : int, optional, default None
1317
- Number of GPUs required for this step. A value of zero implies that
1318
- the scheduled node should not have GPUs.
1319
- gpu_vendor : str, default KUBERNETES_GPU_VENDOR
1320
- The vendor of the GPUs to be used for this step.
1321
- tolerations : List[str], default []
1322
- The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
1323
- Kubernetes tolerations to use when launching pod in Kubernetes.
1041
+ queue : str, default METAFLOW_BATCH_JOB_QUEUE
1042
+ AWS Batch Job Queue to submit the job to.
1043
+ iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
1044
+ AWS IAM role that AWS Batch container uses to access AWS cloud resources.
1045
+ execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
1046
+ AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
1047
+ (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
1048
+ shared_memory : int, optional, default None
1049
+ The value for the size (in MiB) of the /dev/shm volume for this step.
1050
+ This parameter maps to the `--shm-size` option in Docker.
1051
+ max_swap : int, optional, default None
1052
+ The total amount of swap memory (in MiB) a container can use for this
1053
+ step. This parameter is translated to the `--memory-swap` option in
1054
+ Docker where the value is the sum of the container memory plus the
1055
+ `max_swap` value.
1056
+ swappiness : int, optional, default None
1057
+ This allows you to tune memory swappiness behavior for this step.
1058
+ A swappiness value of 0 causes swapping not to happen unless absolutely
1059
+ necessary. A swappiness value of 100 causes pages to be swapped very
1060
+ aggressively. Accepted values are whole numbers between 0 and 100.
1324
1061
  use_tmpfs : bool, default False
1325
- This enables an explicit tmpfs mount for this step.
1062
+ This enables an explicit tmpfs mount for this step. Note that tmpfs is
1063
+ not available on Fargate compute environments
1326
1064
  tmpfs_tempdir : bool, default True
1327
1065
  sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1328
- tmpfs_size : int, optional, default: None
1066
+ tmpfs_size : int, optional, default None
1329
1067
  The value for the size (in MiB) of the tmpfs mount for this step.
1330
1068
  This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1331
1069
  memory allocated for this step.
1332
- tmpfs_path : str, optional, default /metaflow_temp
1333
- Path to tmpfs mount for this step.
1334
- persistent_volume_claims : Dict[str, str], optional, default None
1335
- A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
1336
- volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
1337
- shared_memory: int, optional
1338
- Shared memory size (in MiB) required for this step
1339
- port: int, optional
1340
- Port number to specify in the Kubernetes job object
1341
- """
1342
- ...
1343
-
1344
- @typing.overload
1345
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1346
- """
1347
- Specifies that the step will success under all circumstances.
1348
-
1349
- The decorator will create an optional artifact, specified by `var`, which
1350
- contains the exception raised. You can use it to detect the presence
1351
- of errors, indicating that all happy-path artifacts produced by the step
1352
- are missing.
1070
+ tmpfs_path : str, optional, default None
1071
+ Path to tmpfs mount for this step. Defaults to /metaflow_temp.
1072
+ inferentia : int, default 0
1073
+ Number of Inferentia chips required for this step.
1074
+ trainium : int, default None
1075
+ Alias for inferentia. Use only one of the two.
1076
+ efa : int, default 0
1077
+ Number of elastic fabric adapter network devices to attach to container
1078
+ ephemeral_storage : int, default None
1079
+ The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
1080
+ This is only relevant for Fargate compute environments
1081
+ log_driver: str, optional, default None
1082
+ The log driver to use for the Amazon ECS container.
1083
+ log_options: List[str], optional, default None
1084
+ List of strings containing options for the chosen log driver. The configurable values
1085
+ depend on the `log driver` chosen. Validation of these options is not supported yet.
1086
+ Example: [`awslogs-group:aws/batch/job`]
1087
+ """
1088
+ ...
1089
+
1090
+ @typing.overload
1091
+ def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1092
+ ...
1093
+
1094
+ @typing.overload
1095
+ def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1096
+ ...
1097
+
1098
+ def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
1099
+ """
1100
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
1353
1101
 
1354
1102
  Parameters
1355
1103
  ----------
1356
- var : str, optional, default None
1357
- Name of the artifact in which to store the caught exception.
1358
- If not specified, the exception is not stored.
1359
- print_exception : bool, default True
1360
- Determines whether or not the exception is printed to
1361
- stdout when caught.
1104
+ cpu : int, default 1
1105
+ Number of CPUs required for this step. If `@resources` is
1106
+ also present, the maximum value from all decorators is used.
1107
+ gpu : int, default 0
1108
+ Number of GPUs required for this step. If `@resources` is
1109
+ also present, the maximum value from all decorators is used.
1110
+ memory : int, default 4096
1111
+ Memory size (in MB) required for this step. If
1112
+ `@resources` is also present, the maximum value from all decorators is
1113
+ used.
1114
+ image : str, optional, default None
1115
+ Docker image to use when launching on AWS Batch. If not specified, and
1116
+ METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
1117
+ not, a default Docker image mapping to the current version of Python is used.
1118
+ queue : str, default METAFLOW_BATCH_JOB_QUEUE
1119
+ AWS Batch Job Queue to submit the job to.
1120
+ iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
1121
+ AWS IAM role that AWS Batch container uses to access AWS cloud resources.
1122
+ execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
1123
+ AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
1124
+ (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
1125
+ shared_memory : int, optional, default None
1126
+ The value for the size (in MiB) of the /dev/shm volume for this step.
1127
+ This parameter maps to the `--shm-size` option in Docker.
1128
+ max_swap : int, optional, default None
1129
+ The total amount of swap memory (in MiB) a container can use for this
1130
+ step. This parameter is translated to the `--memory-swap` option in
1131
+ Docker where the value is the sum of the container memory plus the
1132
+ `max_swap` value.
1133
+ swappiness : int, optional, default None
1134
+ This allows you to tune memory swappiness behavior for this step.
1135
+ A swappiness value of 0 causes swapping not to happen unless absolutely
1136
+ necessary. A swappiness value of 100 causes pages to be swapped very
1137
+ aggressively. Accepted values are whole numbers between 0 and 100.
1138
+ use_tmpfs : bool, default False
1139
+ This enables an explicit tmpfs mount for this step. Note that tmpfs is
1140
+ not available on Fargate compute environments
1141
+ tmpfs_tempdir : bool, default True
1142
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1143
+ tmpfs_size : int, optional, default None
1144
+ The value for the size (in MiB) of the tmpfs mount for this step.
1145
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1146
+ memory allocated for this step.
1147
+ tmpfs_path : str, optional, default None
1148
+ Path to tmpfs mount for this step. Defaults to /metaflow_temp.
1149
+ inferentia : int, default 0
1150
+ Number of Inferentia chips required for this step.
1151
+ trainium : int, default None
1152
+ Alias for inferentia. Use only one of the two.
1153
+ efa : int, default 0
1154
+ Number of elastic fabric adapter network devices to attach to container
1155
+ ephemeral_storage : int, default None
1156
+ The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
1157
+ This is only relevant for Fargate compute environments
1158
+ log_driver: str, optional, default None
1159
+ The log driver to use for the Amazon ECS container.
1160
+ log_options: List[str], optional, default None
1161
+ List of strings containing options for the chosen log driver. The configurable values
1162
+ depend on the `log driver` chosen. Validation of these options is not supported yet.
1163
+ Example: [`awslogs-group:aws/batch/job`]
1164
+ """
1165
+ ...
1166
+
1167
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = "KUBERNETES_IMAGE_PULL_POLICY", service_account: str = "METAFLOW_KUBERNETES_SERVICE_ACCOUNT", secrets: typing.Optional[typing.List[str]] = None, namespace: str = "METAFLOW_KUBERNETES_NAMESPACE", gpu: typing.Optional[int] = None, gpu_vendor: str = "KUBERNETES_GPU_VENDOR", tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = "/metaflow_temp", persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1168
+ """
1169
+ Specifies that this step should execute on Kubernetes.
1170
+
1171
+ Parameters
1172
+ ----------
1173
+ cpu : int, default 1
1174
+ Number of CPUs required for this step. If `@resources` is
1175
+ also present, the maximum value from all decorators is used.
1176
+ memory : int, default 4096
1177
+ Memory size (in MB) required for this step. If
1178
+ `@resources` is also present, the maximum value from all decorators is
1179
+ used.
1180
+ disk : int, default 10240
1181
+ Disk size (in MB) required for this step. If
1182
+ `@resources` is also present, the maximum value from all decorators is
1183
+ used.
1184
+ image : str, optional, default None
1185
+ Docker image to use when launching on Kubernetes. If not specified, and
1186
+ METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
1187
+ not, a default Docker image mapping to the current version of Python is used.
1188
+ image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
1189
+ If given, the imagePullPolicy to be applied to the Docker image of the step.
1190
+ service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
1191
+ Kubernetes service account to use when launching pod in Kubernetes.
1192
+ secrets : List[str], optional, default None
1193
+ Kubernetes secrets to use when launching pod in Kubernetes. These
1194
+ secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
1195
+ in Metaflow configuration.
1196
+ namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
1197
+ Kubernetes namespace to use when launching pod in Kubernetes.
1198
+ gpu : int, optional, default None
1199
+ Number of GPUs required for this step. A value of zero implies that
1200
+ the scheduled node should not have GPUs.
1201
+ gpu_vendor : str, default KUBERNETES_GPU_VENDOR
1202
+ The vendor of the GPUs to be used for this step.
1203
+ tolerations : List[str], default []
1204
+ The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
1205
+ Kubernetes tolerations to use when launching pod in Kubernetes.
1206
+ use_tmpfs : bool, default False
1207
+ This enables an explicit tmpfs mount for this step.
1208
+ tmpfs_tempdir : bool, default True
1209
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1210
+ tmpfs_size : int, optional, default: None
1211
+ The value for the size (in MiB) of the tmpfs mount for this step.
1212
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1213
+ memory allocated for this step.
1214
+ tmpfs_path : str, optional, default /metaflow_temp
1215
+ Path to tmpfs mount for this step.
1216
+ persistent_volume_claims : Dict[str, str], optional, default None
1217
+ A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
1218
+ volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
1219
+ shared_memory: int, optional
1220
+ Shared memory size (in MiB) required for this step
1221
+ port: int, optional
1222
+ Port number to specify in the Kubernetes job object
1362
1223
  """
1363
1224
  ...
1364
1225
 
1365
1226
  @typing.overload
1366
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1227
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1228
+ """
1229
+ Specifies a timeout for your step.
1230
+
1231
+ This decorator is useful if this step may hang indefinitely.
1232
+
1233
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1234
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
1235
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
1236
+
1237
+ Note that all the values specified in parameters are added together so if you specify
1238
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1239
+
1240
+ Parameters
1241
+ ----------
1242
+ seconds : int, default 0
1243
+ Number of seconds to wait prior to timing out.
1244
+ minutes : int, default 0
1245
+ Number of minutes to wait prior to timing out.
1246
+ hours : int, default 0
1247
+ Number of hours to wait prior to timing out.
1248
+ """
1367
1249
  ...
1368
1250
 
1369
1251
  @typing.overload
1370
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1252
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1371
1253
  ...
1372
1254
 
1373
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
1255
+ @typing.overload
1256
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1257
+ ...
1258
+
1259
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
1374
1260
  """
1375
- Specifies that the step will success under all circumstances.
1261
+ Specifies a timeout for your step.
1376
1262
 
1377
- The decorator will create an optional artifact, specified by `var`, which
1378
- contains the exception raised. You can use it to detect the presence
1379
- of errors, indicating that all happy-path artifacts produced by the step
1380
- are missing.
1263
+ This decorator is useful if this step may hang indefinitely.
1264
+
1265
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1266
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
1267
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
1268
+
1269
+ Note that all the values specified in parameters are added together so if you specify
1270
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1381
1271
 
1382
1272
  Parameters
1383
1273
  ----------
1384
- var : str, optional, default None
1385
- Name of the artifact in which to store the caught exception.
1386
- If not specified, the exception is not stored.
1387
- print_exception : bool, default True
1388
- Determines whether or not the exception is printed to
1389
- stdout when caught.
1274
+ seconds : int, default 0
1275
+ Number of seconds to wait prior to timing out.
1276
+ minutes : int, default 0
1277
+ Number of minutes to wait prior to timing out.
1278
+ hours : int, default 0
1279
+ Number of hours to wait prior to timing out.
1390
1280
  """
1391
1281
  ...
1392
1282
 
1393
1283
  @typing.overload
1394
- def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1284
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1395
1285
  """
1396
- Specifies the Conda environment for all steps of the flow.
1286
+ Specifies the number of times the task corresponding
1287
+ to a step needs to be retried.
1397
1288
 
1398
- Use `@conda_base` to set common libraries required by all
1399
- steps and use `@conda` to specify step-specific additions.
1289
+ This decorator is useful for handling transient errors, such as networking issues.
1290
+ If your task contains operations that can't be retried safely, e.g. database updates,
1291
+ it is advisable to annotate it with `@retry(times=0)`.
1292
+
1293
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
1294
+ decorator will execute a no-op task after all retries have been exhausted,
1295
+ ensuring that the flow execution can continue.
1296
+
1297
+ Parameters
1298
+ ----------
1299
+ times : int, default 3
1300
+ Number of times to retry this task.
1301
+ minutes_between_retries : int, default 2
1302
+ Number of minutes between retries.
1303
+ """
1304
+ ...
1305
+
1306
+ @typing.overload
1307
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1308
+ ...
1309
+
1310
+ @typing.overload
1311
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1312
+ ...
1313
+
1314
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
1315
+ """
1316
+ Specifies the number of times the task corresponding
1317
+ to a step needs to be retried.
1318
+
1319
+ This decorator is useful for handling transient errors, such as networking issues.
1320
+ If your task contains operations that can't be retried safely, e.g. database updates,
1321
+ it is advisable to annotate it with `@retry(times=0)`.
1322
+
1323
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
1324
+ decorator will execute a no-op task after all retries have been exhausted,
1325
+ ensuring that the flow execution can continue.
1326
+
1327
+ Parameters
1328
+ ----------
1329
+ times : int, default 3
1330
+ Number of times to retry this task.
1331
+ minutes_between_retries : int, default 2
1332
+ Number of minutes between retries.
1333
+ """
1334
+ ...
1335
+
1336
+ @typing.overload
1337
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1338
+ """
1339
+ Specifies the Conda environment for the step.
1340
+
1341
+ Information in this decorator will augment any
1342
+ attributes set in the `@conda_base` flow-level decorator. Hence,
1343
+ you can use `@conda_base` to set packages required by all
1344
+ steps and use `@conda` to specify step-specific overrides.
1400
1345
 
1401
1346
  Parameters
1402
1347
  ----------
1403
1348
  packages : Dict[str, str], default {}
1404
- Packages to use for this flow. The key is the name of the package
1349
+ Packages to use for this step. The key is the name of the package
1405
1350
  and the value is the version to use.
1406
1351
  libraries : Dict[str, str], default {}
1407
1352
  Supported for backward compatibility. When used with packages, packages will take precedence.
@@ -1409,25 +1354,31 @@ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[s
1409
1354
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1410
1355
  that the version used will correspond to the version of the Python interpreter used to start the run.
1411
1356
  disabled : bool, default False
1412
- If set to True, disables Conda.
1357
+ If set to True, disables @conda.
1413
1358
  """
1414
1359
  ...
1415
1360
 
1416
1361
  @typing.overload
1417
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1362
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1418
1363
  ...
1419
1364
 
1420
- def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1365
+ @typing.overload
1366
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1367
+ ...
1368
+
1369
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1421
1370
  """
1422
- Specifies the Conda environment for all steps of the flow.
1371
+ Specifies the Conda environment for the step.
1423
1372
 
1424
- Use `@conda_base` to set common libraries required by all
1425
- steps and use `@conda` to specify step-specific additions.
1373
+ Information in this decorator will augment any
1374
+ attributes set in the `@conda_base` flow-level decorator. Hence,
1375
+ you can use `@conda_base` to set packages required by all
1376
+ steps and use `@conda` to specify step-specific overrides.
1426
1377
 
1427
1378
  Parameters
1428
1379
  ----------
1429
1380
  packages : Dict[str, str], default {}
1430
- Packages to use for this flow. The key is the name of the package
1381
+ Packages to use for this step. The key is the name of the package
1431
1382
  and the value is the version to use.
1432
1383
  libraries : Dict[str, str], default {}
1433
1384
  Supported for backward compatibility. When used with packages, packages will take precedence.
@@ -1435,49 +1386,7 @@ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packa
1435
1386
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1436
1387
  that the version used will correspond to the version of the Python interpreter used to start the run.
1437
1388
  disabled : bool, default False
1438
- If set to True, disables Conda.
1439
- """
1440
- ...
1441
-
1442
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1443
- """
1444
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1445
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1446
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1447
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
1448
- starts only after all sensors finish.
1449
-
1450
- Parameters
1451
- ----------
1452
- timeout : int
1453
- Time, in seconds before the task times out and fails. (Default: 3600)
1454
- poke_interval : int
1455
- Time in seconds that the job should wait in between each try. (Default: 60)
1456
- mode : str
1457
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1458
- exponential_backoff : bool
1459
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1460
- pool : str
1461
- the slot pool this task should run in,
1462
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1463
- soft_fail : bool
1464
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1465
- name : str
1466
- Name of the sensor on Airflow
1467
- description : str
1468
- Description of sensor in the Airflow UI
1469
- bucket_key : Union[str, List[str]]
1470
- The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1471
- When it's specified as a full s3:// url, please leave `bucket_name` as None
1472
- bucket_name : str
1473
- Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1474
- When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1475
- wildcard_match : bool
1476
- whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1477
- aws_conn_id : str
1478
- a reference to the s3 connection on Airflow. (Default: None)
1479
- verify : bool
1480
- Whether or not to verify SSL certificates for S3 connection. (Default: None)
1389
+ If set to True, disables @conda.
1481
1390
  """
1482
1391
  ...
1483
1392
 
@@ -1523,106 +1432,45 @@ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str,
1523
1432
  """
1524
1433
  ...
1525
1434
 
1526
- @typing.overload
1527
- def trigger_on_finish(*, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1528
- """
1529
- Specifies the flow(s) that this flow depends on.
1530
-
1531
- ```
1532
- @trigger_on_finish(flow='FooFlow')
1533
- ```
1534
- or
1535
- ```
1536
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1537
- ```
1538
- This decorator respects the @project decorator and triggers the flow
1539
- when upstream runs within the same namespace complete successfully
1540
-
1541
- Additionally, you can specify project aware upstream flow dependencies
1542
- by specifying the fully qualified project_flow_name.
1543
- ```
1544
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1545
- ```
1546
- or
1547
- ```
1548
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1549
- ```
1550
-
1551
- You can also specify just the project or project branch (other values will be
1552
- inferred from the current project or project branch):
1553
- ```
1554
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1555
- ```
1556
-
1557
- Note that `branch` is typically one of:
1558
- - `prod`
1559
- - `user.bob`
1560
- - `test.my_experiment`
1561
- - `prod.staging`
1562
-
1563
- Parameters
1564
- ----------
1565
- flow : Union[str, Dict[str, str]], optional, default None
1566
- Upstream flow dependency for this flow.
1567
- flows : List[Union[str, Dict[str, str]]], default []
1568
- Upstream flow dependencies for this flow.
1569
- options : Dict[str, Any], default {}
1570
- Backend-specific configuration for tuning eventing behavior.
1571
-
1572
-
1573
- """
1574
- ...
1575
-
1576
- @typing.overload
1577
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1578
- ...
1579
-
1580
- def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1435
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1581
1436
  """
1582
- Specifies the flow(s) that this flow depends on.
1583
-
1584
- ```
1585
- @trigger_on_finish(flow='FooFlow')
1586
- ```
1587
- or
1588
- ```
1589
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1590
- ```
1591
- This decorator respects the @project decorator and triggers the flow
1592
- when upstream runs within the same namespace complete successfully
1593
-
1594
- Additionally, you can specify project aware upstream flow dependencies
1595
- by specifying the fully qualified project_flow_name.
1596
- ```
1597
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1598
- ```
1599
- or
1600
- ```
1601
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1602
- ```
1603
-
1604
- You can also specify just the project or project branch (other values will be
1605
- inferred from the current project or project branch):
1606
- ```
1607
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1608
- ```
1609
-
1610
- Note that `branch` is typically one of:
1611
- - `prod`
1612
- - `user.bob`
1613
- - `test.my_experiment`
1614
- - `prod.staging`
1615
-
1616
- Parameters
1617
- ----------
1618
- flow : Union[str, Dict[str, str]], optional, default None
1619
- Upstream flow dependency for this flow.
1620
- flows : List[Union[str, Dict[str, str]]], default []
1621
- Upstream flow dependencies for this flow.
1622
- options : Dict[str, Any], default {}
1623
- Backend-specific configuration for tuning eventing behavior.
1624
-
1437
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1438
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1439
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1440
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
1441
+ starts only after all sensors finish.
1625
1442
 
1443
+ Parameters
1444
+ ----------
1445
+ timeout : int
1446
+ Time, in seconds before the task times out and fails. (Default: 3600)
1447
+ poke_interval : int
1448
+ Time in seconds that the job should wait in between each try. (Default: 60)
1449
+ mode : str
1450
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1451
+ exponential_backoff : bool
1452
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1453
+ pool : str
1454
+ the slot pool this task should run in,
1455
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1456
+ soft_fail : bool
1457
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1458
+ name : str
1459
+ Name of the sensor on Airflow
1460
+ description : str
1461
+ Description of sensor in the Airflow UI
1462
+ bucket_key : Union[str, List[str]]
1463
+ The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1464
+ When it's specified as a full s3:// url, please leave `bucket_name` as None
1465
+ bucket_name : str
1466
+ Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1467
+ When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1468
+ wildcard_match : bool
1469
+ whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1470
+ aws_conn_id : str
1471
+ a reference to the s3 connection on Airflow. (Default: None)
1472
+ verify : bool
1473
+ Whether or not to verify SSL certificates for S3 connection. (Default: None)
1626
1474
  """
1627
1475
  ...
1628
1476
 
@@ -1739,6 +1587,135 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
1739
1587
  """
1740
1588
  ...
1741
1589
 
1590
+ @typing.overload
1591
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1592
+ """
1593
+ Specifies the times when the flow should be run when running on a
1594
+ production scheduler.
1595
+
1596
+ Parameters
1597
+ ----------
1598
+ hourly : bool, default False
1599
+ Run the workflow hourly.
1600
+ daily : bool, default True
1601
+ Run the workflow daily.
1602
+ weekly : bool, default False
1603
+ Run the workflow weekly.
1604
+ cron : str, optional, default None
1605
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1606
+ specified by this expression.
1607
+ timezone : str, optional, default None
1608
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1609
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1610
+ """
1611
+ ...
1612
+
1613
+ @typing.overload
1614
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1615
+ ...
1616
+
1617
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1618
+ """
1619
+ Specifies the times when the flow should be run when running on a
1620
+ production scheduler.
1621
+
1622
+ Parameters
1623
+ ----------
1624
+ hourly : bool, default False
1625
+ Run the workflow hourly.
1626
+ daily : bool, default True
1627
+ Run the workflow daily.
1628
+ weekly : bool, default False
1629
+ Run the workflow weekly.
1630
+ cron : str, optional, default None
1631
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1632
+ specified by this expression.
1633
+ timezone : str, optional, default None
1634
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1635
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1636
+ """
1637
+ ...
1638
+
1639
+ @typing.overload
1640
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1641
+ """
1642
+ Specifies the Conda environment for all steps of the flow.
1643
+
1644
+ Use `@conda_base` to set common libraries required by all
1645
+ steps and use `@conda` to specify step-specific additions.
1646
+
1647
+ Parameters
1648
+ ----------
1649
+ packages : Dict[str, str], default {}
1650
+ Packages to use for this flow. The key is the name of the package
1651
+ and the value is the version to use.
1652
+ libraries : Dict[str, str], default {}
1653
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1654
+ python : str, optional, default None
1655
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1656
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1657
+ disabled : bool, default False
1658
+ If set to True, disables Conda.
1659
+ """
1660
+ ...
1661
+
1662
+ @typing.overload
1663
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1664
+ ...
1665
+
1666
+ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1667
+ """
1668
+ Specifies the Conda environment for all steps of the flow.
1669
+
1670
+ Use `@conda_base` to set common libraries required by all
1671
+ steps and use `@conda` to specify step-specific additions.
1672
+
1673
+ Parameters
1674
+ ----------
1675
+ packages : Dict[str, str], default {}
1676
+ Packages to use for this flow. The key is the name of the package
1677
+ and the value is the version to use.
1678
+ libraries : Dict[str, str], default {}
1679
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1680
+ python : str, optional, default None
1681
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1682
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1683
+ disabled : bool, default False
1684
+ If set to True, disables Conda.
1685
+ """
1686
+ ...
1687
+
1688
+ def nim(*, models: "list[NIM]", backend: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1689
+ """
1690
+ This decorator is used to run NIM containers in Metaflow tasks as sidecars.
1691
+
1692
+ User code call
1693
+ -----------
1694
+ @nim(
1695
+ models=['meta/llama3-8b-instruct', 'meta/llama3-70b-instruct'],
1696
+ backend='managed'
1697
+ )
1698
+
1699
+ Valid backend options
1700
+ ---------------------
1701
+ - 'managed': Outerbounds selects a compute provider based on the model.
1702
+ - 🚧 'dataplane': Run in your account.
1703
+
1704
+ Valid model options
1705
+ ----------------
1706
+ - 'meta/llama3-8b-instruct': 8B parameter model
1707
+ - 'meta/llama3-70b-instruct': 70B parameter model
1708
+ - Upon request, any model here: https://nvcf.ngc.nvidia.com/functions?filter=nvidia-functions
1709
+
1710
+ Parameters
1711
+ ----------
1712
+ models: list[NIM]
1713
+ List of NIM containers running models in sidecars.
1714
+ backend: str
1715
+ Compute provider to run the NIM container.
1716
+ """
1717
+ ...
1718
+
1742
1719
  @typing.overload
1743
1720
  def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1744
1721
  """
@@ -1779,51 +1756,105 @@ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packag
1779
1756
  ...
1780
1757
 
1781
1758
  @typing.overload
1782
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1759
+ def trigger_on_finish(*, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1783
1760
  """
1784
- Specifies the times when the flow should be run when running on a
1785
- production scheduler.
1761
+ Specifies the flow(s) that this flow depends on.
1762
+
1763
+ ```
1764
+ @trigger_on_finish(flow='FooFlow')
1765
+ ```
1766
+ or
1767
+ ```
1768
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1769
+ ```
1770
+ This decorator respects the @project decorator and triggers the flow
1771
+ when upstream runs within the same namespace complete successfully
1772
+
1773
+ Additionally, you can specify project aware upstream flow dependencies
1774
+ by specifying the fully qualified project_flow_name.
1775
+ ```
1776
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1777
+ ```
1778
+ or
1779
+ ```
1780
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1781
+ ```
1782
+
1783
+ You can also specify just the project or project branch (other values will be
1784
+ inferred from the current project or project branch):
1785
+ ```
1786
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1787
+ ```
1788
+
1789
+ Note that `branch` is typically one of:
1790
+ - `prod`
1791
+ - `user.bob`
1792
+ - `test.my_experiment`
1793
+ - `prod.staging`
1786
1794
 
1787
1795
  Parameters
1788
1796
  ----------
1789
- hourly : bool, default False
1790
- Run the workflow hourly.
1791
- daily : bool, default True
1792
- Run the workflow daily.
1793
- weekly : bool, default False
1794
- Run the workflow weekly.
1795
- cron : str, optional, default None
1796
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1797
- specified by this expression.
1798
- timezone : str, optional, default None
1799
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1800
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1797
+ flow : Union[str, Dict[str, str]], optional, default None
1798
+ Upstream flow dependency for this flow.
1799
+ flows : List[Union[str, Dict[str, str]]], default []
1800
+ Upstream flow dependencies for this flow.
1801
+ options : Dict[str, Any], default {}
1802
+ Backend-specific configuration for tuning eventing behavior.
1803
+
1804
+
1801
1805
  """
1802
1806
  ...
1803
1807
 
1804
1808
  @typing.overload
1805
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1809
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1806
1810
  ...
1807
1811
 
1808
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1812
+ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1809
1813
  """
1810
- Specifies the times when the flow should be run when running on a
1811
- production scheduler.
1814
+ Specifies the flow(s) that this flow depends on.
1815
+
1816
+ ```
1817
+ @trigger_on_finish(flow='FooFlow')
1818
+ ```
1819
+ or
1820
+ ```
1821
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1822
+ ```
1823
+ This decorator respects the @project decorator and triggers the flow
1824
+ when upstream runs within the same namespace complete successfully
1825
+
1826
+ Additionally, you can specify project aware upstream flow dependencies
1827
+ by specifying the fully qualified project_flow_name.
1828
+ ```
1829
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1830
+ ```
1831
+ or
1832
+ ```
1833
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1834
+ ```
1835
+
1836
+ You can also specify just the project or project branch (other values will be
1837
+ inferred from the current project or project branch):
1838
+ ```
1839
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1840
+ ```
1841
+
1842
+ Note that `branch` is typically one of:
1843
+ - `prod`
1844
+ - `user.bob`
1845
+ - `test.my_experiment`
1846
+ - `prod.staging`
1812
1847
 
1813
1848
  Parameters
1814
1849
  ----------
1815
- hourly : bool, default False
1816
- Run the workflow hourly.
1817
- daily : bool, default True
1818
- Run the workflow daily.
1819
- weekly : bool, default False
1820
- Run the workflow weekly.
1821
- cron : str, optional, default None
1822
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1823
- specified by this expression.
1824
- timezone : str, optional, default None
1825
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1826
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1850
+ flow : Union[str, Dict[str, str]], optional, default None
1851
+ Upstream flow dependency for this flow.
1852
+ flows : List[Union[str, Dict[str, str]]], default []
1853
+ Upstream flow dependencies for this flow.
1854
+ options : Dict[str, Any], default {}
1855
+ Backend-specific configuration for tuning eventing behavior.
1856
+
1857
+
1827
1858
  """
1828
1859
  ...
1829
1860