metaflow-stubs 2.11.11__py2.py3-none-any.whl → 2.11.13__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (132) hide show
  1. metaflow-stubs/__init__.pyi +489 -489
  2. metaflow-stubs/cards.pyi +5 -5
  3. metaflow-stubs/cli.pyi +2 -2
  4. metaflow-stubs/client/__init__.pyi +3 -3
  5. metaflow-stubs/client/core.pyi +7 -7
  6. metaflow-stubs/client/filecache.pyi +2 -2
  7. metaflow-stubs/clone_util.pyi +2 -2
  8. metaflow-stubs/events.pyi +3 -3
  9. metaflow-stubs/exception.pyi +2 -2
  10. metaflow-stubs/flowspec.pyi +5 -5
  11. metaflow-stubs/generated_for.txt +1 -1
  12. metaflow-stubs/includefile.pyi +3 -3
  13. metaflow-stubs/metadata/metadata.pyi +2 -2
  14. metaflow-stubs/metadata/util.pyi +2 -2
  15. metaflow-stubs/metaflow_config.pyi +8 -2
  16. metaflow-stubs/metaflow_current.pyi +17 -17
  17. metaflow-stubs/mflog/mflog.pyi +2 -2
  18. metaflow-stubs/multicore_utils.pyi +2 -2
  19. metaflow-stubs/parameters.pyi +4 -4
  20. metaflow-stubs/plugins/__init__.pyi +2 -2
  21. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  22. metaflow-stubs/plugins/airflow/airflow.pyi +3 -3
  23. metaflow-stubs/plugins/airflow/airflow_cli.pyi +3 -3
  24. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
  25. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  26. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  27. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  28. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +3 -3
  29. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +3 -3
  30. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +3 -3
  31. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  32. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  33. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  34. metaflow-stubs/plugins/argo/argo_workflows.pyi +3 -3
  35. metaflow-stubs/plugins/argo/argo_workflows_cli.pyi +4 -4
  36. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +4 -4
  37. metaflow-stubs/plugins/aws/__init__.pyi +2 -2
  38. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  39. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  40. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  41. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  42. metaflow-stubs/plugins/aws/batch/batch_cli.pyi +2 -2
  43. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  44. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +4 -4
  45. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  46. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +2 -2
  47. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  48. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
  49. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  50. metaflow-stubs/plugins/aws/step_functions/production_token.pyi +2 -2
  51. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  52. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  53. metaflow-stubs/plugins/aws/step_functions/step_functions_cli.pyi +4 -4
  54. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  55. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
  56. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  57. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  58. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  59. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  60. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  61. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  62. metaflow-stubs/plugins/cards/card_cli.pyi +4 -4
  63. metaflow-stubs/plugins/cards/card_client.pyi +4 -4
  64. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  65. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  66. metaflow-stubs/plugins/cards/card_decorator.pyi +3 -3
  67. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  68. metaflow-stubs/plugins/cards/card_modules/basic.pyi +4 -4
  69. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  70. metaflow-stubs/plugins/cards/card_modules/chevron/__init__.pyi +2 -2
  71. metaflow-stubs/plugins/cards/card_modules/chevron/main.pyi +2 -2
  72. metaflow-stubs/plugins/cards/card_modules/chevron/metadata.pyi +2 -2
  73. metaflow-stubs/plugins/cards/card_modules/chevron/renderer.pyi +2 -2
  74. metaflow-stubs/plugins/cards/card_modules/chevron/tokenizer.pyi +2 -2
  75. metaflow-stubs/plugins/cards/card_modules/components.pyi +3 -3
  76. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  77. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  78. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +3 -3
  79. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  80. metaflow-stubs/plugins/cards/component_serializer.pyi +3 -3
  81. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  82. metaflow-stubs/plugins/catch_decorator.pyi +3 -3
  83. metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
  84. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  85. metaflow-stubs/plugins/datatools/s3/__init__.pyi +3 -3
  86. metaflow-stubs/plugins/datatools/s3/s3.pyi +6 -6
  87. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  88. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  89. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  90. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  91. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  92. metaflow-stubs/plugins/events_decorator.pyi +3 -3
  93. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  94. metaflow-stubs/plugins/frameworks/pytorch.pyi +3 -3
  95. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  96. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  97. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  98. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  99. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  100. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  101. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
  102. metaflow-stubs/plugins/kubernetes/kubernetes_cli.pyi +4 -4
  103. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  104. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +10 -4
  105. metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +2 -2
  106. metaflow-stubs/plugins/package_cli.pyi +2 -2
  107. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  108. metaflow-stubs/plugins/project_decorator.pyi +3 -3
  109. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  110. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  111. metaflow-stubs/plugins/pypi/conda_environment.pyi +4 -4
  112. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  113. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  114. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  115. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  116. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  117. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  118. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +2 -2
  119. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  120. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  121. metaflow-stubs/plugins/tag_cli.pyi +4 -4
  122. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +2 -2
  123. metaflow-stubs/plugins/timeout_decorator.pyi +3 -3
  124. metaflow-stubs/procpoll.pyi +2 -2
  125. metaflow-stubs/pylint_wrapper.pyi +2 -2
  126. metaflow-stubs/tagging_util.pyi +2 -2
  127. metaflow-stubs/version.pyi +2 -2
  128. {metaflow_stubs-2.11.11.dist-info → metaflow_stubs-2.11.13.dist-info}/METADATA +2 -2
  129. metaflow_stubs-2.11.13.dist-info/RECORD +132 -0
  130. metaflow_stubs-2.11.11.dist-info/RECORD +0 -132
  131. {metaflow_stubs-2.11.11.dist-info → metaflow_stubs-2.11.13.dist-info}/WHEEL +0 -0
  132. {metaflow_stubs-2.11.11.dist-info → metaflow_stubs-2.11.13.dist-info}/top_level.txt +0 -0
@@ -1,23 +1,23 @@
1
1
  ##################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.11.11 #
4
- # Generated on 2024-05-02T22:04:13.800222 #
3
+ # MF version: 2.11.13 #
4
+ # Generated on 2024-05-06T22:21:21.471072 #
5
5
  ##################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import metaflow.client.core
11
+ import metaflow.plugins.datatools.s3.s3
12
12
  import datetime
13
- import metaflow._vendor.click.types
13
+ import io
14
14
  import typing
15
- import metaflow.plugins.datatools.s3.s3
16
- import metaflow.datastore.inputs
17
- import metaflow.metaflow_current
15
+ import metaflow.client.core
18
16
  import metaflow.parameters
19
- import io
17
+ import metaflow._vendor.click.types
20
18
  import metaflow.events
19
+ import metaflow.metaflow_current
20
+ import metaflow.datastore.inputs
21
21
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
22
22
  StepFlag = typing.NewType("StepFlag", bool)
23
23
 
@@ -726,198 +726,151 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
726
726
  ...
727
727
 
728
728
  @typing.overload
729
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
729
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
730
730
  """
731
- Specifies the PyPI packages for the step.
731
+ Specifies the Conda environment for the step.
732
732
 
733
733
  Information in this decorator will augment any
734
- attributes set in the `@pyi_base` flow-level decorator. Hence,
735
- you can use `@pypi_base` to set packages required by all
736
- steps and use `@pypi` to specify step-specific overrides.
734
+ attributes set in the `@conda_base` flow-level decorator. Hence,
735
+ you can use `@conda_base` to set packages required by all
736
+ steps and use `@conda` to specify step-specific overrides.
737
737
 
738
738
  Parameters
739
739
  ----------
740
- packages : Dict[str, str], default: {}
740
+ packages : Dict[str, str], default {}
741
741
  Packages to use for this step. The key is the name of the package
742
742
  and the value is the version to use.
743
- python : str, optional, default: None
743
+ libraries : Dict[str, str], default {}
744
+ Supported for backward compatibility. When used with packages, packages will take precedence.
745
+ python : str, optional, default None
744
746
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
745
747
  that the version used will correspond to the version of the Python interpreter used to start the run.
748
+ disabled : bool, default False
749
+ If set to True, disables @conda.
746
750
  """
747
751
  ...
748
752
 
749
753
  @typing.overload
750
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
754
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
751
755
  ...
752
756
 
753
757
  @typing.overload
754
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
758
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
755
759
  ...
756
760
 
757
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
761
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
758
762
  """
759
- Specifies the PyPI packages for the step.
763
+ Specifies the Conda environment for the step.
760
764
 
761
765
  Information in this decorator will augment any
762
- attributes set in the `@pyi_base` flow-level decorator. Hence,
763
- you can use `@pypi_base` to set packages required by all
764
- steps and use `@pypi` to specify step-specific overrides.
766
+ attributes set in the `@conda_base` flow-level decorator. Hence,
767
+ you can use `@conda_base` to set packages required by all
768
+ steps and use `@conda` to specify step-specific overrides.
765
769
 
766
770
  Parameters
767
771
  ----------
768
- packages : Dict[str, str], default: {}
772
+ packages : Dict[str, str], default {}
769
773
  Packages to use for this step. The key is the name of the package
770
774
  and the value is the version to use.
771
- python : str, optional, default: None
775
+ libraries : Dict[str, str], default {}
776
+ Supported for backward compatibility. When used with packages, packages will take precedence.
777
+ python : str, optional, default None
772
778
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
773
779
  that the version used will correspond to the version of the Python interpreter used to start the run.
780
+ disabled : bool, default False
781
+ If set to True, disables @conda.
774
782
  """
775
783
  ...
776
784
 
777
785
  @typing.overload
778
- def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
786
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
779
787
  """
780
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
788
+ Specifies secrets to be retrieved and injected as environment variables prior to
789
+ the execution of a step.
781
790
 
782
791
  Parameters
783
792
  ----------
784
- cpu : int, default 1
785
- Number of CPUs required for this step. If `@resources` is
786
- also present, the maximum value from all decorators is used.
787
- gpu : int, default 0
788
- Number of GPUs required for this step. If `@resources` is
789
- also present, the maximum value from all decorators is used.
790
- memory : int, default 4096
791
- Memory size (in MB) required for this step. If
792
- `@resources` is also present, the maximum value from all decorators is
793
- used.
794
- image : str, optional, default None
795
- Docker image to use when launching on AWS Batch. If not specified, and
796
- METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
797
- not, a default Docker image mapping to the current version of Python is used.
798
- queue : str, default METAFLOW_BATCH_JOB_QUEUE
799
- AWS Batch Job Queue to submit the job to.
800
- iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
801
- AWS IAM role that AWS Batch container uses to access AWS cloud resources.
802
- execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
803
- AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
804
- (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
805
- shared_memory : int, optional, default None
806
- The value for the size (in MiB) of the /dev/shm volume for this step.
807
- This parameter maps to the `--shm-size` option in Docker.
808
- max_swap : int, optional, default None
809
- The total amount of swap memory (in MiB) a container can use for this
810
- step. This parameter is translated to the `--memory-swap` option in
811
- Docker where the value is the sum of the container memory plus the
812
- `max_swap` value.
813
- swappiness : int, optional, default None
814
- This allows you to tune memory swappiness behavior for this step.
815
- A swappiness value of 0 causes swapping not to happen unless absolutely
816
- necessary. A swappiness value of 100 causes pages to be swapped very
817
- aggressively. Accepted values are whole numbers between 0 and 100.
818
- use_tmpfs : bool, default False
819
- This enables an explicit tmpfs mount for this step. Note that tmpfs is
820
- not available on Fargate compute environments
821
- tmpfs_tempdir : bool, default True
822
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
823
- tmpfs_size : int, optional, default None
824
- The value for the size (in MiB) of the tmpfs mount for this step.
825
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
826
- memory allocated for this step.
827
- tmpfs_path : str, optional, default None
828
- Path to tmpfs mount for this step. Defaults to /metaflow_temp.
829
- inferentia : int, default 0
830
- Number of Inferentia chips required for this step.
831
- trainium : int, default None
832
- Alias for inferentia. Use only one of the two.
833
- efa : int, default 0
834
- Number of elastic fabric adapter network devices to attach to container
835
- ephemeral_storage: int, default None
836
- The total amount, in GiB, of ephemeral storage to set for the task (21-200)
837
- This is only relevant for Fargate compute environments
838
- log_driver: str, optional, default None
839
- The log driver to use for the Amazon ECS container.
840
- log_options: List[str], optional, default None
841
- List of strings containing options for the chosen log driver. The configurable values
842
- depend on the `log driver` chosen. Validation of these options is not supported yet.
843
- Example usage: ["awslogs-group:aws/batch/job"]
793
+ sources : List[Union[str, Dict[str, Any]]], default: []
794
+ List of secret specs, defining how the secrets are to be retrieved
844
795
  """
845
796
  ...
846
797
 
847
798
  @typing.overload
848
- def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
799
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
849
800
  ...
850
801
 
851
802
  @typing.overload
852
- def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
803
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
853
804
  ...
854
805
 
855
- def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
806
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
856
807
  """
857
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
808
+ Specifies secrets to be retrieved and injected as environment variables prior to
809
+ the execution of a step.
810
+
811
+ Parameters
812
+ ----------
813
+ sources : List[Union[str, Dict[str, Any]]], default: []
814
+ List of secret specs, defining how the secrets are to be retrieved
815
+ """
816
+ ...
817
+
818
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = "KUBERNETES_IMAGE_PULL_POLICY", service_account: str = "METAFLOW_KUBERNETES_SERVICE_ACCOUNT", secrets: typing.Optional[typing.List[str]] = None, namespace: str = "METAFLOW_KUBERNETES_NAMESPACE", gpu: typing.Optional[int] = None, gpu_vendor: str = "KUBERNETES_GPU_VENDOR", tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = "/metaflow_temp", persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
819
+ """
820
+ Specifies that this step should execute on Kubernetes.
858
821
 
859
822
  Parameters
860
823
  ----------
861
824
  cpu : int, default 1
862
825
  Number of CPUs required for this step. If `@resources` is
863
826
  also present, the maximum value from all decorators is used.
864
- gpu : int, default 0
865
- Number of GPUs required for this step. If `@resources` is
866
- also present, the maximum value from all decorators is used.
867
827
  memory : int, default 4096
868
828
  Memory size (in MB) required for this step. If
869
829
  `@resources` is also present, the maximum value from all decorators is
870
830
  used.
831
+ disk : int, default 10240
832
+ Disk size (in MB) required for this step. If
833
+ `@resources` is also present, the maximum value from all decorators is
834
+ used.
871
835
  image : str, optional, default None
872
- Docker image to use when launching on AWS Batch. If not specified, and
873
- METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
836
+ Docker image to use when launching on Kubernetes. If not specified, and
837
+ METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
874
838
  not, a default Docker image mapping to the current version of Python is used.
875
- queue : str, default METAFLOW_BATCH_JOB_QUEUE
876
- AWS Batch Job Queue to submit the job to.
877
- iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
878
- AWS IAM role that AWS Batch container uses to access AWS cloud resources.
879
- execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
880
- AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
881
- (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
882
- shared_memory : int, optional, default None
883
- The value for the size (in MiB) of the /dev/shm volume for this step.
884
- This parameter maps to the `--shm-size` option in Docker.
885
- max_swap : int, optional, default None
886
- The total amount of swap memory (in MiB) a container can use for this
887
- step. This parameter is translated to the `--memory-swap` option in
888
- Docker where the value is the sum of the container memory plus the
889
- `max_swap` value.
890
- swappiness : int, optional, default None
891
- This allows you to tune memory swappiness behavior for this step.
892
- A swappiness value of 0 causes swapping not to happen unless absolutely
893
- necessary. A swappiness value of 100 causes pages to be swapped very
894
- aggressively. Accepted values are whole numbers between 0 and 100.
839
+ image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
840
+ If given, the imagePullPolicy to be applied to the Docker image of the step.
841
+ service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
842
+ Kubernetes service account to use when launching pod in Kubernetes.
843
+ secrets : List[str], optional, default None
844
+ Kubernetes secrets to use when launching pod in Kubernetes. These
845
+ secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
846
+ in Metaflow configuration.
847
+ namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
848
+ Kubernetes namespace to use when launching pod in Kubernetes.
849
+ gpu : int, optional, default None
850
+ Number of GPUs required for this step. A value of zero implies that
851
+ the scheduled node should not have GPUs.
852
+ gpu_vendor : str, default KUBERNETES_GPU_VENDOR
853
+ The vendor of the GPUs to be used for this step.
854
+ tolerations : List[str], default []
855
+ The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
856
+ Kubernetes tolerations to use when launching pod in Kubernetes.
895
857
  use_tmpfs : bool, default False
896
- This enables an explicit tmpfs mount for this step. Note that tmpfs is
897
- not available on Fargate compute environments
858
+ This enables an explicit tmpfs mount for this step.
898
859
  tmpfs_tempdir : bool, default True
899
860
  sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
900
- tmpfs_size : int, optional, default None
861
+ tmpfs_size : int, optional, default: None
901
862
  The value for the size (in MiB) of the tmpfs mount for this step.
902
863
  This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
903
864
  memory allocated for this step.
904
- tmpfs_path : str, optional, default None
905
- Path to tmpfs mount for this step. Defaults to /metaflow_temp.
906
- inferentia : int, default 0
907
- Number of Inferentia chips required for this step.
908
- trainium : int, default None
909
- Alias for inferentia. Use only one of the two.
910
- efa : int, default 0
911
- Number of elastic fabric adapter network devices to attach to container
912
- ephemeral_storage: int, default None
913
- The total amount, in GiB, of ephemeral storage to set for the task (21-200)
914
- This is only relevant for Fargate compute environments
915
- log_driver: str, optional, default None
916
- The log driver to use for the Amazon ECS container.
917
- log_options: List[str], optional, default None
918
- List of strings containing options for the chosen log driver. The configurable values
919
- depend on the `log driver` chosen. Validation of these options is not supported yet.
920
- Example usage: ["awslogs-group:aws/batch/job"]
865
+ tmpfs_path : str, optional, default /metaflow_temp
866
+ Path to tmpfs mount for this step.
867
+ persistent_volume_claims : Dict[str, str], optional, default None
868
+ A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
869
+ volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
870
+ shared_memory: int, optional
871
+ Shared memory size (in MiB) required for this step
872
+ port: int, optional
873
+ Port number to specify in the Kubernetes job object
921
874
  """
922
875
  ...
923
876
 
@@ -1050,112 +1003,51 @@ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
1050
1003
  ...
1051
1004
 
1052
1005
  @typing.overload
1053
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1006
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1054
1007
  """
1055
- Specifies a timeout for your step.
1056
-
1057
- This decorator is useful if this step may hang indefinitely.
1058
-
1059
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1060
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
1061
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
1008
+ Specifies the PyPI packages for the step.
1062
1009
 
1063
- Note that all the values specified in parameters are added together so if you specify
1064
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1010
+ Information in this decorator will augment any
1011
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
1012
+ you can use `@pypi_base` to set packages required by all
1013
+ steps and use `@pypi` to specify step-specific overrides.
1065
1014
 
1066
1015
  Parameters
1067
1016
  ----------
1068
- seconds : int, default 0
1069
- Number of seconds to wait prior to timing out.
1070
- minutes : int, default 0
1071
- Number of minutes to wait prior to timing out.
1072
- hours : int, default 0
1073
- Number of hours to wait prior to timing out.
1017
+ packages : Dict[str, str], default: {}
1018
+ Packages to use for this step. The key is the name of the package
1019
+ and the value is the version to use.
1020
+ python : str, optional, default: None
1021
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1022
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1074
1023
  """
1075
1024
  ...
1076
1025
 
1077
1026
  @typing.overload
1078
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1027
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1079
1028
  ...
1080
1029
 
1081
1030
  @typing.overload
1082
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1031
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1083
1032
  ...
1084
1033
 
1085
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
1034
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1086
1035
  """
1087
- Specifies a timeout for your step.
1036
+ Specifies the PyPI packages for the step.
1088
1037
 
1089
- This decorator is useful if this step may hang indefinitely.
1090
-
1091
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1092
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
1093
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
1094
-
1095
- Note that all the values specified in parameters are added together so if you specify
1096
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1097
-
1098
- Parameters
1099
- ----------
1100
- seconds : int, default 0
1101
- Number of seconds to wait prior to timing out.
1102
- minutes : int, default 0
1103
- Number of minutes to wait prior to timing out.
1104
- hours : int, default 0
1105
- Number of hours to wait prior to timing out.
1106
- """
1107
- ...
1108
-
1109
- @typing.overload
1110
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1111
- """
1112
- Specifies the number of times the task corresponding
1113
- to a step needs to be retried.
1114
-
1115
- This decorator is useful for handling transient errors, such as networking issues.
1116
- If your task contains operations that can't be retried safely, e.g. database updates,
1117
- it is advisable to annotate it with `@retry(times=0)`.
1118
-
1119
- This can be used in conjunction with the `@catch` decorator. The `@catch`
1120
- decorator will execute a no-op task after all retries have been exhausted,
1121
- ensuring that the flow execution can continue.
1122
-
1123
- Parameters
1124
- ----------
1125
- times : int, default 3
1126
- Number of times to retry this task.
1127
- minutes_between_retries : int, default 2
1128
- Number of minutes between retries.
1129
- """
1130
- ...
1131
-
1132
- @typing.overload
1133
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1134
- ...
1135
-
1136
- @typing.overload
1137
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1138
- ...
1139
-
1140
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
1141
- """
1142
- Specifies the number of times the task corresponding
1143
- to a step needs to be retried.
1144
-
1145
- This decorator is useful for handling transient errors, such as networking issues.
1146
- If your task contains operations that can't be retried safely, e.g. database updates,
1147
- it is advisable to annotate it with `@retry(times=0)`.
1148
-
1149
- This can be used in conjunction with the `@catch` decorator. The `@catch`
1150
- decorator will execute a no-op task after all retries have been exhausted,
1151
- ensuring that the flow execution can continue.
1038
+ Information in this decorator will augment any
1039
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
1040
+ you can use `@pypi_base` to set packages required by all
1041
+ steps and use `@pypi` to specify step-specific overrides.
1152
1042
 
1153
1043
  Parameters
1154
1044
  ----------
1155
- times : int, default 3
1156
- Number of times to retry this task.
1157
- minutes_between_retries : int, default 2
1158
- Number of minutes between retries.
1045
+ packages : Dict[str, str], default: {}
1046
+ Packages to use for this step. The key is the name of the package
1047
+ and the value is the version to use.
1048
+ python : str, optional, default: None
1049
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1050
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1159
1051
  """
1160
1052
  ...
1161
1053
 
@@ -1209,92 +1101,55 @@ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
1209
1101
  ...
1210
1102
 
1211
1103
  @typing.overload
1212
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1104
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1213
1105
  """
1214
- Specifies the Conda environment for the step.
1106
+ Specifies the number of times the task corresponding
1107
+ to a step needs to be retried.
1215
1108
 
1216
- Information in this decorator will augment any
1217
- attributes set in the `@conda_base` flow-level decorator. Hence,
1218
- you can use `@conda_base` to set packages required by all
1219
- steps and use `@conda` to specify step-specific overrides.
1109
+ This decorator is useful for handling transient errors, such as networking issues.
1110
+ If your task contains operations that can't be retried safely, e.g. database updates,
1111
+ it is advisable to annotate it with `@retry(times=0)`.
1112
+
1113
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
1114
+ decorator will execute a no-op task after all retries have been exhausted,
1115
+ ensuring that the flow execution can continue.
1220
1116
 
1221
1117
  Parameters
1222
1118
  ----------
1223
- packages : Dict[str, str], default {}
1224
- Packages to use for this step. The key is the name of the package
1225
- and the value is the version to use.
1226
- libraries : Dict[str, str], default {}
1227
- Supported for backward compatibility. When used with packages, packages will take precedence.
1228
- python : str, optional, default None
1229
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1230
- that the version used will correspond to the version of the Python interpreter used to start the run.
1231
- disabled : bool, default False
1232
- If set to True, disables @conda.
1119
+ times : int, default 3
1120
+ Number of times to retry this task.
1121
+ minutes_between_retries : int, default 2
1122
+ Number of minutes between retries.
1233
1123
  """
1234
1124
  ...
1235
1125
 
1236
1126
  @typing.overload
1237
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1127
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1238
1128
  ...
1239
1129
 
1240
1130
  @typing.overload
1241
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1131
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1242
1132
  ...
1243
1133
 
1244
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1134
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
1245
1135
  """
1246
- Specifies the Conda environment for the step.
1247
-
1248
- Information in this decorator will augment any
1249
- attributes set in the `@conda_base` flow-level decorator. Hence,
1250
- you can use `@conda_base` to set packages required by all
1251
- steps and use `@conda` to specify step-specific overrides.
1136
+ Specifies the number of times the task corresponding
1137
+ to a step needs to be retried.
1252
1138
 
1253
- Parameters
1254
- ----------
1255
- packages : Dict[str, str], default {}
1256
- Packages to use for this step. The key is the name of the package
1257
- and the value is the version to use.
1258
- libraries : Dict[str, str], default {}
1259
- Supported for backward compatibility. When used with packages, packages will take precedence.
1260
- python : str, optional, default None
1261
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1262
- that the version used will correspond to the version of the Python interpreter used to start the run.
1263
- disabled : bool, default False
1264
- If set to True, disables @conda.
1265
- """
1266
- ...
1267
-
1268
- @typing.overload
1269
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1270
- """
1271
- Specifies secrets to be retrieved and injected as environment variables prior to
1272
- the execution of a step.
1139
+ This decorator is useful for handling transient errors, such as networking issues.
1140
+ If your task contains operations that can't be retried safely, e.g. database updates,
1141
+ it is advisable to annotate it with `@retry(times=0)`.
1273
1142
 
1274
- Parameters
1275
- ----------
1276
- sources : List[Union[str, Dict[str, Any]]], default: []
1277
- List of secret specs, defining how the secrets are to be retrieved
1278
- """
1279
- ...
1280
-
1281
- @typing.overload
1282
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1283
- ...
1284
-
1285
- @typing.overload
1286
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1287
- ...
1288
-
1289
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
1290
- """
1291
- Specifies secrets to be retrieved and injected as environment variables prior to
1292
- the execution of a step.
1143
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
1144
+ decorator will execute a no-op task after all retries have been exhausted,
1145
+ ensuring that the flow execution can continue.
1293
1146
 
1294
1147
  Parameters
1295
1148
  ----------
1296
- sources : List[Union[str, Dict[str, Any]]], default: []
1297
- List of secret specs, defining how the secrets are to be retrieved
1149
+ times : int, default 3
1150
+ Number of times to retry this task.
1151
+ minutes_between_retries : int, default 2
1152
+ Number of minutes between retries.
1298
1153
  """
1299
1154
  ...
1300
1155
 
@@ -1329,210 +1184,207 @@ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], Non
1329
1184
  """
1330
1185
  ...
1331
1186
 
1332
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = "KUBERNETES_IMAGE_PULL_POLICY", service_account: str = "METAFLOW_KUBERNETES_SERVICE_ACCOUNT", secrets: typing.Optional[typing.List[str]] = None, namespace: str = "METAFLOW_KUBERNETES_NAMESPACE", gpu: typing.Optional[int] = None, gpu_vendor: str = "KUBERNETES_GPU_VENDOR", tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = "/metaflow_temp", persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1187
+ @typing.overload
1188
+ def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1333
1189
  """
1334
- Specifies that this step should execute on Kubernetes.
1190
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
1335
1191
 
1336
1192
  Parameters
1337
1193
  ----------
1338
1194
  cpu : int, default 1
1339
1195
  Number of CPUs required for this step. If `@resources` is
1340
1196
  also present, the maximum value from all decorators is used.
1197
+ gpu : int, default 0
1198
+ Number of GPUs required for this step. If `@resources` is
1199
+ also present, the maximum value from all decorators is used.
1341
1200
  memory : int, default 4096
1342
1201
  Memory size (in MB) required for this step. If
1343
1202
  `@resources` is also present, the maximum value from all decorators is
1344
1203
  used.
1345
- disk : int, default 10240
1346
- Disk size (in MB) required for this step. If
1347
- `@resources` is also present, the maximum value from all decorators is
1348
- used.
1349
1204
  image : str, optional, default None
1350
- Docker image to use when launching on Kubernetes. If not specified, and
1351
- METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
1205
+ Docker image to use when launching on AWS Batch. If not specified, and
1206
+ METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
1352
1207
  not, a default Docker image mapping to the current version of Python is used.
1353
- image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
1354
- If given, the imagePullPolicy to be applied to the Docker image of the step.
1355
- service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
1356
- Kubernetes service account to use when launching pod in Kubernetes.
1357
- secrets : List[str], optional, default None
1358
- Kubernetes secrets to use when launching pod in Kubernetes. These
1359
- secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
1360
- in Metaflow configuration.
1361
- namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
1362
- Kubernetes namespace to use when launching pod in Kubernetes.
1363
- gpu : int, optional, default None
1364
- Number of GPUs required for this step. A value of zero implies that
1365
- the scheduled node should not have GPUs.
1366
- gpu_vendor : str, default KUBERNETES_GPU_VENDOR
1367
- The vendor of the GPUs to be used for this step.
1368
- tolerations : List[str], default []
1369
- The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
1370
- Kubernetes tolerations to use when launching pod in Kubernetes.
1208
+ queue : str, default METAFLOW_BATCH_JOB_QUEUE
1209
+ AWS Batch Job Queue to submit the job to.
1210
+ iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
1211
+ AWS IAM role that AWS Batch container uses to access AWS cloud resources.
1212
+ execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
1213
+ AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
1214
+ (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
1215
+ shared_memory : int, optional, default None
1216
+ The value for the size (in MiB) of the /dev/shm volume for this step.
1217
+ This parameter maps to the `--shm-size` option in Docker.
1218
+ max_swap : int, optional, default None
1219
+ The total amount of swap memory (in MiB) a container can use for this
1220
+ step. This parameter is translated to the `--memory-swap` option in
1221
+ Docker where the value is the sum of the container memory plus the
1222
+ `max_swap` value.
1223
+ swappiness : int, optional, default None
1224
+ This allows you to tune memory swappiness behavior for this step.
1225
+ A swappiness value of 0 causes swapping not to happen unless absolutely
1226
+ necessary. A swappiness value of 100 causes pages to be swapped very
1227
+ aggressively. Accepted values are whole numbers between 0 and 100.
1371
1228
  use_tmpfs : bool, default False
1372
- This enables an explicit tmpfs mount for this step.
1229
+ This enables an explicit tmpfs mount for this step. Note that tmpfs is
1230
+ not available on Fargate compute environments
1373
1231
  tmpfs_tempdir : bool, default True
1374
1232
  sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1375
- tmpfs_size : int, optional, default: None
1233
+ tmpfs_size : int, optional, default None
1376
1234
  The value for the size (in MiB) of the tmpfs mount for this step.
1377
1235
  This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1378
1236
  memory allocated for this step.
1379
- tmpfs_path : str, optional, default /metaflow_temp
1380
- Path to tmpfs mount for this step.
1381
- persistent_volume_claims : Dict[str, str], optional, default None
1382
- A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
1383
- volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
1384
- shared_memory: int, optional
1385
- Shared memory size (in MiB) required for this step
1386
- port: int, optional
1387
- Port number to specify in the Kubernetes job object
1237
+ tmpfs_path : str, optional, default None
1238
+ Path to tmpfs mount for this step. Defaults to /metaflow_temp.
1239
+ inferentia : int, default 0
1240
+ Number of Inferentia chips required for this step.
1241
+ trainium : int, default None
1242
+ Alias for inferentia. Use only one of the two.
1243
+ efa : int, default 0
1244
+ Number of elastic fabric adapter network devices to attach to container
1245
+ ephemeral_storage: int, default None
1246
+ The total amount, in GiB, of ephemeral storage to set for the task (21-200)
1247
+ This is only relevant for Fargate compute environments
1248
+ log_driver: str, optional, default None
1249
+ The log driver to use for the Amazon ECS container.
1250
+ log_options: List[str], optional, default None
1251
+ List of strings containing options for the chosen log driver. The configurable values
1252
+ depend on the `log driver` chosen. Validation of these options is not supported yet.
1253
+ Example usage: ["awslogs-group:aws/batch/job"]
1388
1254
  """
1389
1255
  ...
1390
1256
 
1391
1257
  @typing.overload
1392
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1393
- """
1394
- Specifies the PyPI packages for all steps of the flow.
1395
-
1396
- Use `@pypi_base` to set common packages required by all
1397
- steps and use `@pypi` to specify step-specific overrides.
1398
- Parameters
1399
- ----------
1400
- packages : Dict[str, str], default: {}
1401
- Packages to use for this flow. The key is the name of the package
1402
- and the value is the version to use.
1403
- python : str, optional, default: None
1404
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1405
- that the version used will correspond to the version of the Python interpreter used to start the run.
1406
- """
1258
+ def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1407
1259
  ...
1408
1260
 
1409
1261
  @typing.overload
1410
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1262
+ def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1411
1263
  ...
1412
1264
 
1413
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1265
+ def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
1414
1266
  """
1415
- Specifies the PyPI packages for all steps of the flow.
1267
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
1416
1268
 
1417
- Use `@pypi_base` to set common packages required by all
1418
- steps and use `@pypi` to specify step-specific overrides.
1419
1269
  Parameters
1420
1270
  ----------
1421
- packages : Dict[str, str], default: {}
1422
- Packages to use for this flow. The key is the name of the package
1423
- and the value is the version to use.
1424
- python : str, optional, default: None
1425
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1426
- that the version used will correspond to the version of the Python interpreter used to start the run.
1271
+ cpu : int, default 1
1272
+ Number of CPUs required for this step. If `@resources` is
1273
+ also present, the maximum value from all decorators is used.
1274
+ gpu : int, default 0
1275
+ Number of GPUs required for this step. If `@resources` is
1276
+ also present, the maximum value from all decorators is used.
1277
+ memory : int, default 4096
1278
+ Memory size (in MB) required for this step. If
1279
+ `@resources` is also present, the maximum value from all decorators is
1280
+ used.
1281
+ image : str, optional, default None
1282
+ Docker image to use when launching on AWS Batch. If not specified, and
1283
+ METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
1284
+ not, a default Docker image mapping to the current version of Python is used.
1285
+ queue : str, default METAFLOW_BATCH_JOB_QUEUE
1286
+ AWS Batch Job Queue to submit the job to.
1287
+ iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
1288
+ AWS IAM role that AWS Batch container uses to access AWS cloud resources.
1289
+ execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
1290
+ AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
1291
+ (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
1292
+ shared_memory : int, optional, default None
1293
+ The value for the size (in MiB) of the /dev/shm volume for this step.
1294
+ This parameter maps to the `--shm-size` option in Docker.
1295
+ max_swap : int, optional, default None
1296
+ The total amount of swap memory (in MiB) a container can use for this
1297
+ step. This parameter is translated to the `--memory-swap` option in
1298
+ Docker where the value is the sum of the container memory plus the
1299
+ `max_swap` value.
1300
+ swappiness : int, optional, default None
1301
+ This allows you to tune memory swappiness behavior for this step.
1302
+ A swappiness value of 0 causes swapping not to happen unless absolutely
1303
+ necessary. A swappiness value of 100 causes pages to be swapped very
1304
+ aggressively. Accepted values are whole numbers between 0 and 100.
1305
+ use_tmpfs : bool, default False
1306
+ This enables an explicit tmpfs mount for this step. Note that tmpfs is
1307
+ not available on Fargate compute environments
1308
+ tmpfs_tempdir : bool, default True
1309
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1310
+ tmpfs_size : int, optional, default None
1311
+ The value for the size (in MiB) of the tmpfs mount for this step.
1312
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1313
+ memory allocated for this step.
1314
+ tmpfs_path : str, optional, default None
1315
+ Path to tmpfs mount for this step. Defaults to /metaflow_temp.
1316
+ inferentia : int, default 0
1317
+ Number of Inferentia chips required for this step.
1318
+ trainium : int, default None
1319
+ Alias for inferentia. Use only one of the two.
1320
+ efa : int, default 0
1321
+ Number of elastic fabric adapter network devices to attach to container
1322
+ ephemeral_storage: int, default None
1323
+ The total amount, in GiB, of ephemeral storage to set for the task (21-200)
1324
+ This is only relevant for Fargate compute environments
1325
+ log_driver: str, optional, default None
1326
+ The log driver to use for the Amazon ECS container.
1327
+ log_options: List[str], optional, default None
1328
+ List of strings containing options for the chosen log driver. The configurable values
1329
+ depend on the `log driver` chosen. Validation of these options is not supported yet.
1330
+ Example usage: ["awslogs-group:aws/batch/job"]
1427
1331
  """
1428
1332
  ...
1429
1333
 
1430
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1334
+ @typing.overload
1335
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1431
1336
  """
1432
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1433
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1434
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1435
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
1436
- starts only after all sensors finish.
1337
+ Specifies a timeout for your step.
1437
1338
 
1438
- Parameters
1439
- ----------
1440
- timeout : int
1441
- Time, in seconds before the task times out and fails. (Default: 3600)
1442
- poke_interval : int
1443
- Time in seconds that the job should wait in between each try. (Default: 60)
1444
- mode : str
1445
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1446
- exponential_backoff : bool
1447
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1448
- pool : str
1449
- the slot pool this task should run in,
1450
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1451
- soft_fail : bool
1452
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1453
- name : str
1454
- Name of the sensor on Airflow
1455
- description : str
1456
- Description of sensor in the Airflow UI
1457
- bucket_key : Union[str, List[str]]
1458
- The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1459
- When it's specified as a full s3:// url, please leave `bucket_name` as None
1460
- bucket_name : str
1461
- Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1462
- When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1463
- wildcard_match : bool
1464
- whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1465
- aws_conn_id : str
1466
- a reference to the s3 connection on Airflow. (Default: None)
1467
- verify : bool
1468
- Whether or not to verify SSL certificates for S3 connection. (Default: None)
1469
- """
1470
- ...
1471
-
1472
- def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1473
- """
1474
- Specifies what flows belong to the same project.
1339
+ This decorator is useful if this step may hang indefinitely.
1475
1340
 
1476
- A project-specific namespace is created for all flows that
1477
- use the same `@project(name)`.
1341
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1342
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
1343
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
1344
+
1345
+ Note that all the values specified in parameters are added together so if you specify
1346
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1478
1347
 
1479
1348
  Parameters
1480
1349
  ----------
1481
- name : str
1482
- Project name. Make sure that the name is unique amongst all
1483
- projects that use the same production scheduler. The name may
1484
- contain only lowercase alphanumeric characters and underscores.
1485
-
1486
-
1350
+ seconds : int, default 0
1351
+ Number of seconds to wait prior to timing out.
1352
+ minutes : int, default 0
1353
+ Number of minutes to wait prior to timing out.
1354
+ hours : int, default 0
1355
+ Number of hours to wait prior to timing out.
1487
1356
  """
1488
1357
  ...
1489
1358
 
1490
1359
  @typing.overload
1491
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1492
- """
1493
- Specifies the times when the flow should be run when running on a
1494
- production scheduler.
1495
-
1496
- Parameters
1497
- ----------
1498
- hourly : bool, default False
1499
- Run the workflow hourly.
1500
- daily : bool, default True
1501
- Run the workflow daily.
1502
- weekly : bool, default False
1503
- Run the workflow weekly.
1504
- cron : str, optional, default None
1505
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1506
- specified by this expression.
1507
- timezone : str, optional, default None
1508
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1509
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1510
- """
1360
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1511
1361
  ...
1512
1362
 
1513
1363
  @typing.overload
1514
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1364
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1515
1365
  ...
1516
1366
 
1517
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1367
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
1518
1368
  """
1519
- Specifies the times when the flow should be run when running on a
1520
- production scheduler.
1369
+ Specifies a timeout for your step.
1521
1370
 
1522
- Parameters
1523
- ----------
1524
- hourly : bool, default False
1525
- Run the workflow hourly.
1526
- daily : bool, default True
1527
- Run the workflow daily.
1528
- weekly : bool, default False
1529
- Run the workflow weekly.
1530
- cron : str, optional, default None
1531
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1532
- specified by this expression.
1533
- timezone : str, optional, default None
1534
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1535
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1371
+ This decorator is useful if this step may hang indefinitely.
1372
+
1373
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1374
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
1375
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
1376
+
1377
+ Note that all the values specified in parameters are added together so if you specify
1378
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1379
+
1380
+ Parameters
1381
+ ----------
1382
+ seconds : int, default 0
1383
+ Number of seconds to wait prior to timing out.
1384
+ minutes : int, default 0
1385
+ Number of minutes to wait prior to timing out.
1386
+ hours : int, default 0
1387
+ Number of hours to wait prior to timing out.
1536
1388
  """
1537
1389
  ...
1538
1390
 
@@ -1631,6 +1483,112 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
1631
1483
  """
1632
1484
  ...
1633
1485
 
1486
+ @typing.overload
1487
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1488
+ """
1489
+ Specifies the times when the flow should be run when running on a
1490
+ production scheduler.
1491
+
1492
+ Parameters
1493
+ ----------
1494
+ hourly : bool, default False
1495
+ Run the workflow hourly.
1496
+ daily : bool, default True
1497
+ Run the workflow daily.
1498
+ weekly : bool, default False
1499
+ Run the workflow weekly.
1500
+ cron : str, optional, default None
1501
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1502
+ specified by this expression.
1503
+ timezone : str, optional, default None
1504
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1505
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1506
+ """
1507
+ ...
1508
+
1509
+ @typing.overload
1510
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1511
+ ...
1512
+
1513
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1514
+ """
1515
+ Specifies the times when the flow should be run when running on a
1516
+ production scheduler.
1517
+
1518
+ Parameters
1519
+ ----------
1520
+ hourly : bool, default False
1521
+ Run the workflow hourly.
1522
+ daily : bool, default True
1523
+ Run the workflow daily.
1524
+ weekly : bool, default False
1525
+ Run the workflow weekly.
1526
+ cron : str, optional, default None
1527
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1528
+ specified by this expression.
1529
+ timezone : str, optional, default None
1530
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1531
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1532
+ """
1533
+ ...
1534
+
1535
+ def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1536
+ """
1537
+ Specifies what flows belong to the same project.
1538
+
1539
+ A project-specific namespace is created for all flows that
1540
+ use the same `@project(name)`.
1541
+
1542
+ Parameters
1543
+ ----------
1544
+ name : str
1545
+ Project name. Make sure that the name is unique amongst all
1546
+ projects that use the same production scheduler. The name may
1547
+ contain only lowercase alphanumeric characters and underscores.
1548
+
1549
+
1550
+ """
1551
+ ...
1552
+
1553
+ @typing.overload
1554
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1555
+ """
1556
+ Specifies the PyPI packages for all steps of the flow.
1557
+
1558
+ Use `@pypi_base` to set common packages required by all
1559
+ steps and use `@pypi` to specify step-specific overrides.
1560
+ Parameters
1561
+ ----------
1562
+ packages : Dict[str, str], default: {}
1563
+ Packages to use for this flow. The key is the name of the package
1564
+ and the value is the version to use.
1565
+ python : str, optional, default: None
1566
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1567
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1568
+ """
1569
+ ...
1570
+
1571
+ @typing.overload
1572
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1573
+ ...
1574
+
1575
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1576
+ """
1577
+ Specifies the PyPI packages for all steps of the flow.
1578
+
1579
+ Use `@pypi_base` to set common packages required by all
1580
+ steps and use `@pypi` to specify step-specific overrides.
1581
+ Parameters
1582
+ ----------
1583
+ packages : Dict[str, str], default: {}
1584
+ Packages to use for this flow. The key is the name of the package
1585
+ and the value is the version to use.
1586
+ python : str, optional, default: None
1587
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1588
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1589
+ """
1590
+ ...
1591
+
1634
1592
  def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1635
1593
  """
1636
1594
  The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
@@ -1673,6 +1631,97 @@ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str,
1673
1631
  """
1674
1632
  ...
1675
1633
 
1634
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1635
+ """
1636
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1637
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1638
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1639
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
1640
+ starts only after all sensors finish.
1641
+
1642
+ Parameters
1643
+ ----------
1644
+ timeout : int
1645
+ Time, in seconds before the task times out and fails. (Default: 3600)
1646
+ poke_interval : int
1647
+ Time in seconds that the job should wait in between each try. (Default: 60)
1648
+ mode : str
1649
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1650
+ exponential_backoff : bool
1651
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1652
+ pool : str
1653
+ the slot pool this task should run in,
1654
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1655
+ soft_fail : bool
1656
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1657
+ name : str
1658
+ Name of the sensor on Airflow
1659
+ description : str
1660
+ Description of sensor in the Airflow UI
1661
+ bucket_key : Union[str, List[str]]
1662
+ The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1663
+ When it's specified as a full s3:// url, please leave `bucket_name` as None
1664
+ bucket_name : str
1665
+ Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1666
+ When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1667
+ wildcard_match : bool
1668
+ whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1669
+ aws_conn_id : str
1670
+ a reference to the s3 connection on Airflow. (Default: None)
1671
+ verify : bool
1672
+ Whether or not to verify SSL certificates for S3 connection. (Default: None)
1673
+ """
1674
+ ...
1675
+
1676
+ @typing.overload
1677
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1678
+ """
1679
+ Specifies the Conda environment for all steps of the flow.
1680
+
1681
+ Use `@conda_base` to set common libraries required by all
1682
+ steps and use `@conda` to specify step-specific additions.
1683
+
1684
+ Parameters
1685
+ ----------
1686
+ packages : Dict[str, str], default {}
1687
+ Packages to use for this flow. The key is the name of the package
1688
+ and the value is the version to use.
1689
+ libraries : Dict[str, str], default {}
1690
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1691
+ python : str, optional, default None
1692
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1693
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1694
+ disabled : bool, default False
1695
+ If set to True, disables Conda.
1696
+ """
1697
+ ...
1698
+
1699
+ @typing.overload
1700
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1701
+ ...
1702
+
1703
+ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1704
+ """
1705
+ Specifies the Conda environment for all steps of the flow.
1706
+
1707
+ Use `@conda_base` to set common libraries required by all
1708
+ steps and use `@conda` to specify step-specific additions.
1709
+
1710
+ Parameters
1711
+ ----------
1712
+ packages : Dict[str, str], default {}
1713
+ Packages to use for this flow. The key is the name of the package
1714
+ and the value is the version to use.
1715
+ libraries : Dict[str, str], default {}
1716
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1717
+ python : str, optional, default None
1718
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1719
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1720
+ disabled : bool, default False
1721
+ If set to True, disables Conda.
1722
+ """
1723
+ ...
1724
+
1676
1725
  @typing.overload
1677
1726
  def trigger_on_finish(*, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1678
1727
  """
@@ -1776,55 +1825,6 @@ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *
1776
1825
  """
1777
1826
  ...
1778
1827
 
1779
- @typing.overload
1780
- def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1781
- """
1782
- Specifies the Conda environment for all steps of the flow.
1783
-
1784
- Use `@conda_base` to set common libraries required by all
1785
- steps and use `@conda` to specify step-specific additions.
1786
-
1787
- Parameters
1788
- ----------
1789
- packages : Dict[str, str], default {}
1790
- Packages to use for this flow. The key is the name of the package
1791
- and the value is the version to use.
1792
- libraries : Dict[str, str], default {}
1793
- Supported for backward compatibility. When used with packages, packages will take precedence.
1794
- python : str, optional, default None
1795
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1796
- that the version used will correspond to the version of the Python interpreter used to start the run.
1797
- disabled : bool, default False
1798
- If set to True, disables Conda.
1799
- """
1800
- ...
1801
-
1802
- @typing.overload
1803
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1804
- ...
1805
-
1806
- def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1807
- """
1808
- Specifies the Conda environment for all steps of the flow.
1809
-
1810
- Use `@conda_base` to set common libraries required by all
1811
- steps and use `@conda` to specify step-specific additions.
1812
-
1813
- Parameters
1814
- ----------
1815
- packages : Dict[str, str], default {}
1816
- Packages to use for this flow. The key is the name of the package
1817
- and the value is the version to use.
1818
- libraries : Dict[str, str], default {}
1819
- Supported for backward compatibility. When used with packages, packages will take precedence.
1820
- python : str, optional, default None
1821
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1822
- that the version used will correspond to the version of the Python interpreter used to start the run.
1823
- disabled : bool, default False
1824
- If set to True, disables Conda.
1825
- """
1826
- ...
1827
-
1828
1828
  def namespace(ns: typing.Optional[str]) -> typing.Optional[str]:
1829
1829
  """
1830
1830
  Switch namespace to the one provided.