metaflow-stubs 2.12.15__py2.py3-none-any.whl → 2.12.17__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (150) hide show
  1. metaflow-stubs/__init__.pyi +479 -479
  2. metaflow-stubs/cards.pyi +6 -6
  3. metaflow-stubs/cli.pyi +2 -2
  4. metaflow-stubs/client/__init__.pyi +3 -3
  5. metaflow-stubs/client/core.pyi +7 -7
  6. metaflow-stubs/client/filecache.pyi +2 -2
  7. metaflow-stubs/clone_util.pyi +2 -2
  8. metaflow-stubs/events.pyi +2 -2
  9. metaflow-stubs/exception.pyi +2 -2
  10. metaflow-stubs/flowspec.pyi +5 -5
  11. metaflow-stubs/generated_for.txt +1 -1
  12. metaflow-stubs/includefile.pyi +5 -5
  13. metaflow-stubs/metadata/metadata.pyi +3 -3
  14. metaflow-stubs/metadata/util.pyi +2 -2
  15. metaflow-stubs/metaflow_config.pyi +2 -2
  16. metaflow-stubs/metaflow_current.pyi +22 -22
  17. metaflow-stubs/mflog/mflog.pyi +2 -2
  18. metaflow-stubs/multicore_utils.pyi +2 -2
  19. metaflow-stubs/parameters.pyi +2 -2
  20. metaflow-stubs/plugins/__init__.pyi +2 -2
  21. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  22. metaflow-stubs/plugins/airflow/airflow.pyi +2 -2
  23. metaflow-stubs/plugins/airflow/airflow_cli.pyi +3 -3
  24. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
  25. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  26. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  27. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  28. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +3 -3
  29. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +3 -3
  30. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +3 -3
  31. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  32. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  33. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  34. metaflow-stubs/plugins/argo/argo_workflows.pyi +4 -4
  35. metaflow-stubs/plugins/argo/argo_workflows_cli.pyi +7 -7
  36. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +7 -7
  37. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +3 -3
  38. metaflow-stubs/plugins/aws/__init__.pyi +2 -2
  39. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  40. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  41. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  42. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  43. metaflow-stubs/plugins/aws/batch/batch_cli.pyi +2 -2
  44. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  45. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +3 -3
  46. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  47. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +3 -3
  48. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  49. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
  50. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  51. metaflow-stubs/plugins/aws/step_functions/production_token.pyi +2 -2
  52. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  53. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  54. metaflow-stubs/plugins/aws/step_functions/step_functions_cli.pyi +4 -4
  55. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  56. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
  57. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +3 -3
  58. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  59. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  60. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  61. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +3 -3
  62. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  63. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  64. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  65. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  66. metaflow-stubs/plugins/cards/card_cli.pyi +5 -5
  67. metaflow-stubs/plugins/cards/card_client.pyi +3 -3
  68. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  69. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  70. metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
  71. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  72. metaflow-stubs/plugins/cards/card_modules/basic.pyi +2 -2
  73. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  74. metaflow-stubs/plugins/cards/card_modules/chevron/__init__.pyi +2 -2
  75. metaflow-stubs/plugins/cards/card_modules/chevron/main.pyi +2 -2
  76. metaflow-stubs/plugins/cards/card_modules/chevron/metadata.pyi +2 -2
  77. metaflow-stubs/plugins/cards/card_modules/chevron/renderer.pyi +2 -2
  78. metaflow-stubs/plugins/cards/card_modules/chevron/tokenizer.pyi +2 -2
  79. metaflow-stubs/plugins/cards/card_modules/components.pyi +3 -3
  80. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  81. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  82. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  83. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  84. metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
  85. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  86. metaflow-stubs/plugins/catch_decorator.pyi +3 -3
  87. metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
  88. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  89. metaflow-stubs/plugins/datatools/s3/__init__.pyi +3 -3
  90. metaflow-stubs/plugins/datatools/s3/s3.pyi +5 -5
  91. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  92. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  93. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  94. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  95. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  96. metaflow-stubs/plugins/events_decorator.pyi +2 -2
  97. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  98. metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
  99. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  100. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +3 -3
  101. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  102. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  103. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  104. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  105. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  106. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
  107. metaflow-stubs/plugins/kubernetes/kubernetes_cli.pyi +3 -3
  108. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  109. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +3 -3
  110. metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +2 -2
  111. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
  112. metaflow-stubs/plugins/logs_cli.pyi +3 -3
  113. metaflow-stubs/plugins/package_cli.pyi +2 -2
  114. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  115. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  116. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  117. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  118. metaflow-stubs/plugins/pypi/conda_environment.pyi +4 -4
  119. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  120. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  121. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  122. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  123. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  124. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  125. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +3 -3
  126. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  127. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  128. metaflow-stubs/plugins/tag_cli.pyi +5 -5
  129. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +2 -2
  130. metaflow-stubs/plugins/timeout_decorator.pyi +3 -3
  131. metaflow-stubs/procpoll.pyi +2 -2
  132. metaflow-stubs/pylint_wrapper.pyi +2 -2
  133. metaflow-stubs/runner/__init__.pyi +2 -2
  134. metaflow-stubs/runner/deployer.pyi +3 -3
  135. metaflow-stubs/runner/metaflow_runner.pyi +4 -4
  136. metaflow-stubs/runner/nbdeploy.pyi +2 -2
  137. metaflow-stubs/runner/nbrun.pyi +2 -2
  138. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  139. metaflow-stubs/runner/utils.pyi +2 -2
  140. metaflow-stubs/system/__init__.pyi +4 -4
  141. metaflow-stubs/system/system_logger.pyi +3 -3
  142. metaflow-stubs/system/system_monitor.pyi +3 -3
  143. metaflow-stubs/tagging_util.pyi +2 -2
  144. metaflow-stubs/tuple_util.pyi +2 -2
  145. metaflow-stubs/version.pyi +2 -2
  146. {metaflow_stubs-2.12.15.dist-info → metaflow_stubs-2.12.17.dist-info}/METADATA +2 -2
  147. metaflow_stubs-2.12.17.dist-info/RECORD +150 -0
  148. metaflow_stubs-2.12.15.dist-info/RECORD +0 -150
  149. {metaflow_stubs-2.12.15.dist-info → metaflow_stubs-2.12.17.dist-info}/WHEEL +0 -0
  150. {metaflow_stubs-2.12.15.dist-info → metaflow_stubs-2.12.17.dist-info}/top_level.txt +0 -0
@@ -1,25 +1,25 @@
1
1
  ##################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.12.15 #
4
- # Generated on 2024-08-22T20:18:55.258441 #
3
+ # MF version: 2.12.17 #
4
+ # Generated on 2024-08-27T00:52:58.058650 #
5
5
  ##################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
+ import metaflow.datastore.inputs
12
+ import metaflow._vendor.click.types
13
+ import metaflow.plugins.datatools.s3.s3
11
14
  import metaflow.runner.metaflow_runner
15
+ import typing
12
16
  import metaflow.parameters
17
+ import metaflow.metaflow_current
18
+ import metaflow.events
13
19
  import io
14
20
  import metaflow.flowspec
15
- import metaflow.client.core
16
- import metaflow.events
17
21
  import datetime
18
- import metaflow.datastore.inputs
19
- import typing
20
- import metaflow.plugins.datatools.s3.s3
21
- import metaflow._vendor.click.types
22
- import metaflow.metaflow_current
22
+ import metaflow.client.core
23
23
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
24
24
  StepFlag = typing.NewType("StepFlag", bool)
25
25
 
@@ -727,65 +727,117 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
727
727
  """
728
728
  ...
729
729
 
730
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = "KUBERNETES_IMAGE_PULL_POLICY", service_account: str = "METAFLOW_KUBERNETES_SERVICE_ACCOUNT", secrets: typing.Optional[typing.List[str]] = None, namespace: str = "METAFLOW_KUBERNETES_NAMESPACE", gpu: typing.Optional[int] = None, gpu_vendor: str = "KUBERNETES_GPU_VENDOR", tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = "/metaflow_temp", persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
730
+ @typing.overload
731
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
731
732
  """
732
- Specifies that this step should execute on Kubernetes.
733
+ Specifies a timeout for your step.
734
+
735
+ This decorator is useful if this step may hang indefinitely.
736
+
737
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
738
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
739
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
740
+
741
+ Note that all the values specified in parameters are added together so if you specify
742
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
733
743
 
734
744
  Parameters
735
745
  ----------
736
- cpu : int, default 1
737
- Number of CPUs required for this step. If `@resources` is
738
- also present, the maximum value from all decorators is used.
739
- memory : int, default 4096
740
- Memory size (in MB) required for this step. If
741
- `@resources` is also present, the maximum value from all decorators is
742
- used.
743
- disk : int, default 10240
744
- Disk size (in MB) required for this step. If
745
- `@resources` is also present, the maximum value from all decorators is
746
- used.
747
- image : str, optional, default None
748
- Docker image to use when launching on Kubernetes. If not specified, and
749
- METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
750
- not, a default Docker image mapping to the current version of Python is used.
751
- image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
752
- If given, the imagePullPolicy to be applied to the Docker image of the step.
753
- service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
754
- Kubernetes service account to use when launching pod in Kubernetes.
755
- secrets : List[str], optional, default None
756
- Kubernetes secrets to use when launching pod in Kubernetes. These
757
- secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
758
- in Metaflow configuration.
759
- namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
760
- Kubernetes namespace to use when launching pod in Kubernetes.
761
- gpu : int, optional, default None
762
- Number of GPUs required for this step. A value of zero implies that
763
- the scheduled node should not have GPUs.
764
- gpu_vendor : str, default KUBERNETES_GPU_VENDOR
765
- The vendor of the GPUs to be used for this step.
766
- tolerations : List[str], default []
767
- The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
768
- Kubernetes tolerations to use when launching pod in Kubernetes.
769
- use_tmpfs : bool, default False
770
- This enables an explicit tmpfs mount for this step.
771
- tmpfs_tempdir : bool, default True
772
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
773
- tmpfs_size : int, optional, default: None
774
- The value for the size (in MiB) of the tmpfs mount for this step.
775
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
776
- memory allocated for this step.
777
- tmpfs_path : str, optional, default /metaflow_temp
778
- Path to tmpfs mount for this step.
779
- persistent_volume_claims : Dict[str, str], optional, default None
780
- A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
781
- volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
782
- shared_memory: int, optional
783
- Shared memory size (in MiB) required for this step
784
- port: int, optional
785
- Port number to specify in the Kubernetes job object
786
- compute_pool : str, optional, default None
787
- Compute pool to be used for for this step.
788
- If not specified, any accessible compute pool within the perimeter is used.
746
+ seconds : int, default 0
747
+ Number of seconds to wait prior to timing out.
748
+ minutes : int, default 0
749
+ Number of minutes to wait prior to timing out.
750
+ hours : int, default 0
751
+ Number of hours to wait prior to timing out.
752
+ """
753
+ ...
754
+
755
+ @typing.overload
756
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
757
+ ...
758
+
759
+ @typing.overload
760
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
761
+ ...
762
+
763
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
764
+ """
765
+ Specifies a timeout for your step.
766
+
767
+ This decorator is useful if this step may hang indefinitely.
768
+
769
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
770
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
771
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
772
+
773
+ Note that all the values specified in parameters are added together so if you specify
774
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
775
+
776
+ Parameters
777
+ ----------
778
+ seconds : int, default 0
779
+ Number of seconds to wait prior to timing out.
780
+ minutes : int, default 0
781
+ Number of minutes to wait prior to timing out.
782
+ hours : int, default 0
783
+ Number of hours to wait prior to timing out.
784
+ """
785
+ ...
786
+
787
+ @typing.overload
788
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
789
+ """
790
+ Specifies the Conda environment for the step.
791
+
792
+ Information in this decorator will augment any
793
+ attributes set in the `@conda_base` flow-level decorator. Hence,
794
+ you can use `@conda_base` to set packages required by all
795
+ steps and use `@conda` to specify step-specific overrides.
796
+
797
+ Parameters
798
+ ----------
799
+ packages : Dict[str, str], default {}
800
+ Packages to use for this step. The key is the name of the package
801
+ and the value is the version to use.
802
+ libraries : Dict[str, str], default {}
803
+ Supported for backward compatibility. When used with packages, packages will take precedence.
804
+ python : str, optional, default None
805
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
806
+ that the version used will correspond to the version of the Python interpreter used to start the run.
807
+ disabled : bool, default False
808
+ If set to True, disables @conda.
809
+ """
810
+ ...
811
+
812
+ @typing.overload
813
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
814
+ ...
815
+
816
+ @typing.overload
817
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
818
+ ...
819
+
820
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
821
+ """
822
+ Specifies the Conda environment for the step.
823
+
824
+ Information in this decorator will augment any
825
+ attributes set in the `@conda_base` flow-level decorator. Hence,
826
+ you can use `@conda_base` to set packages required by all
827
+ steps and use `@conda` to specify step-specific overrides.
828
+
829
+ Parameters
830
+ ----------
831
+ packages : Dict[str, str], default {}
832
+ Packages to use for this step. The key is the name of the package
833
+ and the value is the version to use.
834
+ libraries : Dict[str, str], default {}
835
+ Supported for backward compatibility. When used with packages, packages will take precedence.
836
+ python : str, optional, default None
837
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
838
+ that the version used will correspond to the version of the Python interpreter used to start the run.
839
+ disabled : bool, default False
840
+ If set to True, disables @conda.
789
841
  """
790
842
  ...
791
843
 
@@ -843,35 +895,244 @@ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
843
895
  ...
844
896
 
845
897
  @typing.overload
846
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
898
+ def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
847
899
  """
848
- Specifies that the step will success under all circumstances.
849
-
850
- The decorator will create an optional artifact, specified by `var`, which
851
- contains the exception raised. You can use it to detect the presence
852
- of errors, indicating that all happy-path artifacts produced by the step
853
- are missing.
900
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
854
901
 
855
902
  Parameters
856
903
  ----------
857
- var : str, optional, default None
858
- Name of the artifact in which to store the caught exception.
859
- If not specified, the exception is not stored.
860
- print_exception : bool, default True
861
- Determines whether or not the exception is printed to
862
- stdout when caught.
904
+ cpu : int, default 1
905
+ Number of CPUs required for this step. If `@resources` is
906
+ also present, the maximum value from all decorators is used.
907
+ gpu : int, default 0
908
+ Number of GPUs required for this step. If `@resources` is
909
+ also present, the maximum value from all decorators is used.
910
+ memory : int, default 4096
911
+ Memory size (in MB) required for this step. If
912
+ `@resources` is also present, the maximum value from all decorators is
913
+ used.
914
+ image : str, optional, default None
915
+ Docker image to use when launching on AWS Batch. If not specified, and
916
+ METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
917
+ not, a default Docker image mapping to the current version of Python is used.
918
+ queue : str, default METAFLOW_BATCH_JOB_QUEUE
919
+ AWS Batch Job Queue to submit the job to.
920
+ iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
921
+ AWS IAM role that AWS Batch container uses to access AWS cloud resources.
922
+ execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
923
+ AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
924
+ (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
925
+ shared_memory : int, optional, default None
926
+ The value for the size (in MiB) of the /dev/shm volume for this step.
927
+ This parameter maps to the `--shm-size` option in Docker.
928
+ max_swap : int, optional, default None
929
+ The total amount of swap memory (in MiB) a container can use for this
930
+ step. This parameter is translated to the `--memory-swap` option in
931
+ Docker where the value is the sum of the container memory plus the
932
+ `max_swap` value.
933
+ swappiness : int, optional, default None
934
+ This allows you to tune memory swappiness behavior for this step.
935
+ A swappiness value of 0 causes swapping not to happen unless absolutely
936
+ necessary. A swappiness value of 100 causes pages to be swapped very
937
+ aggressively. Accepted values are whole numbers between 0 and 100.
938
+ use_tmpfs : bool, default False
939
+ This enables an explicit tmpfs mount for this step. Note that tmpfs is
940
+ not available on Fargate compute environments
941
+ tmpfs_tempdir : bool, default True
942
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
943
+ tmpfs_size : int, optional, default None
944
+ The value for the size (in MiB) of the tmpfs mount for this step.
945
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
946
+ memory allocated for this step.
947
+ tmpfs_path : str, optional, default None
948
+ Path to tmpfs mount for this step. Defaults to /metaflow_temp.
949
+ inferentia : int, default 0
950
+ Number of Inferentia chips required for this step.
951
+ trainium : int, default None
952
+ Alias for inferentia. Use only one of the two.
953
+ efa : int, default 0
954
+ Number of elastic fabric adapter network devices to attach to container
955
+ ephemeral_storage : int, default None
956
+ The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
957
+ This is only relevant for Fargate compute environments
958
+ log_driver: str, optional, default None
959
+ The log driver to use for the Amazon ECS container.
960
+ log_options: List[str], optional, default None
961
+ List of strings containing options for the chosen log driver. The configurable values
962
+ depend on the `log driver` chosen. Validation of these options is not supported yet.
963
+ Example: [`awslogs-group:aws/batch/job`]
863
964
  """
864
965
  ...
865
966
 
866
967
  @typing.overload
867
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
968
+ def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
868
969
  ...
869
970
 
870
971
  @typing.overload
871
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
972
+ def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
872
973
  ...
873
974
 
874
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
975
+ def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
976
+ """
977
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
978
+
979
+ Parameters
980
+ ----------
981
+ cpu : int, default 1
982
+ Number of CPUs required for this step. If `@resources` is
983
+ also present, the maximum value from all decorators is used.
984
+ gpu : int, default 0
985
+ Number of GPUs required for this step. If `@resources` is
986
+ also present, the maximum value from all decorators is used.
987
+ memory : int, default 4096
988
+ Memory size (in MB) required for this step. If
989
+ `@resources` is also present, the maximum value from all decorators is
990
+ used.
991
+ image : str, optional, default None
992
+ Docker image to use when launching on AWS Batch. If not specified, and
993
+ METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
994
+ not, a default Docker image mapping to the current version of Python is used.
995
+ queue : str, default METAFLOW_BATCH_JOB_QUEUE
996
+ AWS Batch Job Queue to submit the job to.
997
+ iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
998
+ AWS IAM role that AWS Batch container uses to access AWS cloud resources.
999
+ execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
1000
+ AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
1001
+ (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
1002
+ shared_memory : int, optional, default None
1003
+ The value for the size (in MiB) of the /dev/shm volume for this step.
1004
+ This parameter maps to the `--shm-size` option in Docker.
1005
+ max_swap : int, optional, default None
1006
+ The total amount of swap memory (in MiB) a container can use for this
1007
+ step. This parameter is translated to the `--memory-swap` option in
1008
+ Docker where the value is the sum of the container memory plus the
1009
+ `max_swap` value.
1010
+ swappiness : int, optional, default None
1011
+ This allows you to tune memory swappiness behavior for this step.
1012
+ A swappiness value of 0 causes swapping not to happen unless absolutely
1013
+ necessary. A swappiness value of 100 causes pages to be swapped very
1014
+ aggressively. Accepted values are whole numbers between 0 and 100.
1015
+ use_tmpfs : bool, default False
1016
+ This enables an explicit tmpfs mount for this step. Note that tmpfs is
1017
+ not available on Fargate compute environments
1018
+ tmpfs_tempdir : bool, default True
1019
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1020
+ tmpfs_size : int, optional, default None
1021
+ The value for the size (in MiB) of the tmpfs mount for this step.
1022
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1023
+ memory allocated for this step.
1024
+ tmpfs_path : str, optional, default None
1025
+ Path to tmpfs mount for this step. Defaults to /metaflow_temp.
1026
+ inferentia : int, default 0
1027
+ Number of Inferentia chips required for this step.
1028
+ trainium : int, default None
1029
+ Alias for inferentia. Use only one of the two.
1030
+ efa : int, default 0
1031
+ Number of elastic fabric adapter network devices to attach to container
1032
+ ephemeral_storage : int, default None
1033
+ The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
1034
+ This is only relevant for Fargate compute environments
1035
+ log_driver: str, optional, default None
1036
+ The log driver to use for the Amazon ECS container.
1037
+ log_options: List[str], optional, default None
1038
+ List of strings containing options for the chosen log driver. The configurable values
1039
+ depend on the `log driver` chosen. Validation of these options is not supported yet.
1040
+ Example: [`awslogs-group:aws/batch/job`]
1041
+ """
1042
+ ...
1043
+
1044
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = "KUBERNETES_IMAGE_PULL_POLICY", service_account: str = "METAFLOW_KUBERNETES_SERVICE_ACCOUNT", secrets: typing.Optional[typing.List[str]] = None, namespace: str = "METAFLOW_KUBERNETES_NAMESPACE", gpu: typing.Optional[int] = None, gpu_vendor: str = "KUBERNETES_GPU_VENDOR", tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = "/metaflow_temp", persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1045
+ """
1046
+ Specifies that this step should execute on Kubernetes.
1047
+
1048
+ Parameters
1049
+ ----------
1050
+ cpu : int, default 1
1051
+ Number of CPUs required for this step. If `@resources` is
1052
+ also present, the maximum value from all decorators is used.
1053
+ memory : int, default 4096
1054
+ Memory size (in MB) required for this step. If
1055
+ `@resources` is also present, the maximum value from all decorators is
1056
+ used.
1057
+ disk : int, default 10240
1058
+ Disk size (in MB) required for this step. If
1059
+ `@resources` is also present, the maximum value from all decorators is
1060
+ used.
1061
+ image : str, optional, default None
1062
+ Docker image to use when launching on Kubernetes. If not specified, and
1063
+ METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
1064
+ not, a default Docker image mapping to the current version of Python is used.
1065
+ image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
1066
+ If given, the imagePullPolicy to be applied to the Docker image of the step.
1067
+ service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
1068
+ Kubernetes service account to use when launching pod in Kubernetes.
1069
+ secrets : List[str], optional, default None
1070
+ Kubernetes secrets to use when launching pod in Kubernetes. These
1071
+ secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
1072
+ in Metaflow configuration.
1073
+ namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
1074
+ Kubernetes namespace to use when launching pod in Kubernetes.
1075
+ gpu : int, optional, default None
1076
+ Number of GPUs required for this step. A value of zero implies that
1077
+ the scheduled node should not have GPUs.
1078
+ gpu_vendor : str, default KUBERNETES_GPU_VENDOR
1079
+ The vendor of the GPUs to be used for this step.
1080
+ tolerations : List[str], default []
1081
+ The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
1082
+ Kubernetes tolerations to use when launching pod in Kubernetes.
1083
+ use_tmpfs : bool, default False
1084
+ This enables an explicit tmpfs mount for this step.
1085
+ tmpfs_tempdir : bool, default True
1086
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1087
+ tmpfs_size : int, optional, default: None
1088
+ The value for the size (in MiB) of the tmpfs mount for this step.
1089
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1090
+ memory allocated for this step.
1091
+ tmpfs_path : str, optional, default /metaflow_temp
1092
+ Path to tmpfs mount for this step.
1093
+ persistent_volume_claims : Dict[str, str], optional, default None
1094
+ A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
1095
+ volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
1096
+ shared_memory: int, optional
1097
+ Shared memory size (in MiB) required for this step
1098
+ port: int, optional
1099
+ Port number to specify in the Kubernetes job object
1100
+ compute_pool : str, optional, default None
1101
+ Compute pool to be used for for this step.
1102
+ If not specified, any accessible compute pool within the perimeter is used.
1103
+ """
1104
+ ...
1105
+
1106
+ @typing.overload
1107
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1108
+ """
1109
+ Specifies that the step will success under all circumstances.
1110
+
1111
+ The decorator will create an optional artifact, specified by `var`, which
1112
+ contains the exception raised. You can use it to detect the presence
1113
+ of errors, indicating that all happy-path artifacts produced by the step
1114
+ are missing.
1115
+
1116
+ Parameters
1117
+ ----------
1118
+ var : str, optional, default None
1119
+ Name of the artifact in which to store the caught exception.
1120
+ If not specified, the exception is not stored.
1121
+ print_exception : bool, default True
1122
+ Determines whether or not the exception is printed to
1123
+ stdout when caught.
1124
+ """
1125
+ ...
1126
+
1127
+ @typing.overload
1128
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1129
+ ...
1130
+
1131
+ @typing.overload
1132
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1133
+ ...
1134
+
1135
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
875
1136
  """
876
1137
  Specifies that the step will success under all circumstances.
877
1138
 
@@ -969,109 +1230,35 @@ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None]
969
1230
  ...
970
1231
 
971
1232
  @typing.overload
972
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
973
- """
974
- Specifies environment variables to be set prior to the execution of a step.
975
-
976
- Parameters
977
- ----------
978
- vars : Dict[str, str], default {}
979
- Dictionary of environment variables to set.
980
- """
981
- ...
982
-
983
- @typing.overload
984
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
985
- ...
986
-
987
- @typing.overload
988
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
989
- ...
990
-
991
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
992
- """
993
- Specifies environment variables to be set prior to the execution of a step.
994
-
995
- Parameters
996
- ----------
997
- vars : Dict[str, str], default {}
998
- Dictionary of environment variables to set.
999
- """
1000
- ...
1001
-
1002
- @typing.overload
1003
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1004
- """
1005
- Decorator prototype for all step decorators. This function gets specialized
1006
- and imported for all decorators types by _import_plugin_decorators().
1007
- """
1008
- ...
1009
-
1010
- @typing.overload
1011
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1012
- ...
1013
-
1014
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1015
- """
1016
- Decorator prototype for all step decorators. This function gets specialized
1017
- and imported for all decorators types by _import_plugin_decorators().
1018
- """
1019
- ...
1020
-
1021
- @typing.overload
1022
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1233
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1023
1234
  """
1024
- Specifies a timeout for your step.
1025
-
1026
- This decorator is useful if this step may hang indefinitely.
1027
-
1028
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1029
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
1030
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
1031
-
1032
- Note that all the values specified in parameters are added together so if you specify
1033
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1235
+ Specifies secrets to be retrieved and injected as environment variables prior to
1236
+ the execution of a step.
1034
1237
 
1035
1238
  Parameters
1036
1239
  ----------
1037
- seconds : int, default 0
1038
- Number of seconds to wait prior to timing out.
1039
- minutes : int, default 0
1040
- Number of minutes to wait prior to timing out.
1041
- hours : int, default 0
1042
- Number of hours to wait prior to timing out.
1240
+ sources : List[Union[str, Dict[str, Any]]], default: []
1241
+ List of secret specs, defining how the secrets are to be retrieved
1043
1242
  """
1044
1243
  ...
1045
1244
 
1046
1245
  @typing.overload
1047
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1246
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1048
1247
  ...
1049
1248
 
1050
1249
  @typing.overload
1051
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1250
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1052
1251
  ...
1053
1252
 
1054
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
1253
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
1055
1254
  """
1056
- Specifies a timeout for your step.
1057
-
1058
- This decorator is useful if this step may hang indefinitely.
1059
-
1060
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1061
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
1062
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
1063
-
1064
- Note that all the values specified in parameters are added together so if you specify
1065
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1255
+ Specifies secrets to be retrieved and injected as environment variables prior to
1256
+ the execution of a step.
1066
1257
 
1067
1258
  Parameters
1068
1259
  ----------
1069
- seconds : int, default 0
1070
- Number of seconds to wait prior to timing out.
1071
- minutes : int, default 0
1072
- Number of minutes to wait prior to timing out.
1073
- hours : int, default 0
1074
- Number of hours to wait prior to timing out.
1260
+ sources : List[Union[str, Dict[str, Any]]], default: []
1261
+ List of secret specs, defining how the secrets are to be retrieved
1075
1262
  """
1076
1263
  ...
1077
1264
 
@@ -1126,153 +1313,6 @@ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
1126
1313
  """
1127
1314
  ...
1128
1315
 
1129
- @typing.overload
1130
- def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1131
- """
1132
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
1133
-
1134
- Parameters
1135
- ----------
1136
- cpu : int, default 1
1137
- Number of CPUs required for this step. If `@resources` is
1138
- also present, the maximum value from all decorators is used.
1139
- gpu : int, default 0
1140
- Number of GPUs required for this step. If `@resources` is
1141
- also present, the maximum value from all decorators is used.
1142
- memory : int, default 4096
1143
- Memory size (in MB) required for this step. If
1144
- `@resources` is also present, the maximum value from all decorators is
1145
- used.
1146
- image : str, optional, default None
1147
- Docker image to use when launching on AWS Batch. If not specified, and
1148
- METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
1149
- not, a default Docker image mapping to the current version of Python is used.
1150
- queue : str, default METAFLOW_BATCH_JOB_QUEUE
1151
- AWS Batch Job Queue to submit the job to.
1152
- iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
1153
- AWS IAM role that AWS Batch container uses to access AWS cloud resources.
1154
- execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
1155
- AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
1156
- (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
1157
- shared_memory : int, optional, default None
1158
- The value for the size (in MiB) of the /dev/shm volume for this step.
1159
- This parameter maps to the `--shm-size` option in Docker.
1160
- max_swap : int, optional, default None
1161
- The total amount of swap memory (in MiB) a container can use for this
1162
- step. This parameter is translated to the `--memory-swap` option in
1163
- Docker where the value is the sum of the container memory plus the
1164
- `max_swap` value.
1165
- swappiness : int, optional, default None
1166
- This allows you to tune memory swappiness behavior for this step.
1167
- A swappiness value of 0 causes swapping not to happen unless absolutely
1168
- necessary. A swappiness value of 100 causes pages to be swapped very
1169
- aggressively. Accepted values are whole numbers between 0 and 100.
1170
- use_tmpfs : bool, default False
1171
- This enables an explicit tmpfs mount for this step. Note that tmpfs is
1172
- not available on Fargate compute environments
1173
- tmpfs_tempdir : bool, default True
1174
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1175
- tmpfs_size : int, optional, default None
1176
- The value for the size (in MiB) of the tmpfs mount for this step.
1177
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1178
- memory allocated for this step.
1179
- tmpfs_path : str, optional, default None
1180
- Path to tmpfs mount for this step. Defaults to /metaflow_temp.
1181
- inferentia : int, default 0
1182
- Number of Inferentia chips required for this step.
1183
- trainium : int, default None
1184
- Alias for inferentia. Use only one of the two.
1185
- efa : int, default 0
1186
- Number of elastic fabric adapter network devices to attach to container
1187
- ephemeral_storage : int, default None
1188
- The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
1189
- This is only relevant for Fargate compute environments
1190
- log_driver: str, optional, default None
1191
- The log driver to use for the Amazon ECS container.
1192
- log_options: List[str], optional, default None
1193
- List of strings containing options for the chosen log driver. The configurable values
1194
- depend on the `log driver` chosen. Validation of these options is not supported yet.
1195
- Example: [`awslogs-group:aws/batch/job`]
1196
- """
1197
- ...
1198
-
1199
- @typing.overload
1200
- def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1201
- ...
1202
-
1203
- @typing.overload
1204
- def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1205
- ...
1206
-
1207
- def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
1208
- """
1209
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
1210
-
1211
- Parameters
1212
- ----------
1213
- cpu : int, default 1
1214
- Number of CPUs required for this step. If `@resources` is
1215
- also present, the maximum value from all decorators is used.
1216
- gpu : int, default 0
1217
- Number of GPUs required for this step. If `@resources` is
1218
- also present, the maximum value from all decorators is used.
1219
- memory : int, default 4096
1220
- Memory size (in MB) required for this step. If
1221
- `@resources` is also present, the maximum value from all decorators is
1222
- used.
1223
- image : str, optional, default None
1224
- Docker image to use when launching on AWS Batch. If not specified, and
1225
- METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
1226
- not, a default Docker image mapping to the current version of Python is used.
1227
- queue : str, default METAFLOW_BATCH_JOB_QUEUE
1228
- AWS Batch Job Queue to submit the job to.
1229
- iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
1230
- AWS IAM role that AWS Batch container uses to access AWS cloud resources.
1231
- execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
1232
- AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
1233
- (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
1234
- shared_memory : int, optional, default None
1235
- The value for the size (in MiB) of the /dev/shm volume for this step.
1236
- This parameter maps to the `--shm-size` option in Docker.
1237
- max_swap : int, optional, default None
1238
- The total amount of swap memory (in MiB) a container can use for this
1239
- step. This parameter is translated to the `--memory-swap` option in
1240
- Docker where the value is the sum of the container memory plus the
1241
- `max_swap` value.
1242
- swappiness : int, optional, default None
1243
- This allows you to tune memory swappiness behavior for this step.
1244
- A swappiness value of 0 causes swapping not to happen unless absolutely
1245
- necessary. A swappiness value of 100 causes pages to be swapped very
1246
- aggressively. Accepted values are whole numbers between 0 and 100.
1247
- use_tmpfs : bool, default False
1248
- This enables an explicit tmpfs mount for this step. Note that tmpfs is
1249
- not available on Fargate compute environments
1250
- tmpfs_tempdir : bool, default True
1251
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1252
- tmpfs_size : int, optional, default None
1253
- The value for the size (in MiB) of the tmpfs mount for this step.
1254
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1255
- memory allocated for this step.
1256
- tmpfs_path : str, optional, default None
1257
- Path to tmpfs mount for this step. Defaults to /metaflow_temp.
1258
- inferentia : int, default 0
1259
- Number of Inferentia chips required for this step.
1260
- trainium : int, default None
1261
- Alias for inferentia. Use only one of the two.
1262
- efa : int, default 0
1263
- Number of elastic fabric adapter network devices to attach to container
1264
- ephemeral_storage : int, default None
1265
- The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
1266
- This is only relevant for Fargate compute environments
1267
- log_driver: str, optional, default None
1268
- The log driver to use for the Amazon ECS container.
1269
- log_options: List[str], optional, default None
1270
- List of strings containing options for the chosen log driver. The configurable values
1271
- depend on the `log driver` chosen. Validation of these options is not supported yet.
1272
- Example: [`awslogs-group:aws/batch/job`]
1273
- """
1274
- ...
1275
-
1276
1316
  @typing.overload
1277
1317
  def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1278
1318
  """
@@ -1323,52 +1363,67 @@ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
1323
1363
  ...
1324
1364
 
1325
1365
  @typing.overload
1326
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1366
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1327
1367
  """
1328
- Specifies secrets to be retrieved and injected as environment variables prior to
1329
- the execution of a step.
1368
+ Decorator prototype for all step decorators. This function gets specialized
1369
+ and imported for all decorators types by _import_plugin_decorators().
1370
+ """
1371
+ ...
1372
+
1373
+ @typing.overload
1374
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1375
+ ...
1376
+
1377
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1378
+ """
1379
+ Decorator prototype for all step decorators. This function gets specialized
1380
+ and imported for all decorators types by _import_plugin_decorators().
1381
+ """
1382
+ ...
1383
+
1384
+ @typing.overload
1385
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1386
+ """
1387
+ Specifies environment variables to be set prior to the execution of a step.
1330
1388
 
1331
1389
  Parameters
1332
1390
  ----------
1333
- sources : List[Union[str, Dict[str, Any]]], default: []
1334
- List of secret specs, defining how the secrets are to be retrieved
1391
+ vars : Dict[str, str], default {}
1392
+ Dictionary of environment variables to set.
1335
1393
  """
1336
1394
  ...
1337
1395
 
1338
1396
  @typing.overload
1339
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1397
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1340
1398
  ...
1341
1399
 
1342
1400
  @typing.overload
1343
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1401
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1344
1402
  ...
1345
1403
 
1346
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
1404
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
1347
1405
  """
1348
- Specifies secrets to be retrieved and injected as environment variables prior to
1349
- the execution of a step.
1406
+ Specifies environment variables to be set prior to the execution of a step.
1350
1407
 
1351
1408
  Parameters
1352
1409
  ----------
1353
- sources : List[Union[str, Dict[str, Any]]], default: []
1354
- List of secret specs, defining how the secrets are to be retrieved
1410
+ vars : Dict[str, str], default {}
1411
+ Dictionary of environment variables to set.
1355
1412
  """
1356
1413
  ...
1357
1414
 
1358
1415
  @typing.overload
1359
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1416
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1360
1417
  """
1361
- Specifies the Conda environment for the step.
1418
+ Specifies the Conda environment for all steps of the flow.
1362
1419
 
1363
- Information in this decorator will augment any
1364
- attributes set in the `@conda_base` flow-level decorator. Hence,
1365
- you can use `@conda_base` to set packages required by all
1366
- steps and use `@conda` to specify step-specific overrides.
1420
+ Use `@conda_base` to set common libraries required by all
1421
+ steps and use `@conda` to specify step-specific additions.
1367
1422
 
1368
1423
  Parameters
1369
1424
  ----------
1370
1425
  packages : Dict[str, str], default {}
1371
- Packages to use for this step. The key is the name of the package
1426
+ Packages to use for this flow. The key is the name of the package
1372
1427
  and the value is the version to use.
1373
1428
  libraries : Dict[str, str], default {}
1374
1429
  Supported for backward compatibility. When used with packages, packages will take precedence.
@@ -1376,31 +1431,25 @@ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, s
1376
1431
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1377
1432
  that the version used will correspond to the version of the Python interpreter used to start the run.
1378
1433
  disabled : bool, default False
1379
- If set to True, disables @conda.
1434
+ If set to True, disables Conda.
1380
1435
  """
1381
1436
  ...
1382
1437
 
1383
1438
  @typing.overload
1384
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1385
- ...
1386
-
1387
- @typing.overload
1388
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1439
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1389
1440
  ...
1390
1441
 
1391
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1442
+ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1392
1443
  """
1393
- Specifies the Conda environment for the step.
1444
+ Specifies the Conda environment for all steps of the flow.
1394
1445
 
1395
- Information in this decorator will augment any
1396
- attributes set in the `@conda_base` flow-level decorator. Hence,
1397
- you can use `@conda_base` to set packages required by all
1398
- steps and use `@conda` to specify step-specific overrides.
1446
+ Use `@conda_base` to set common libraries required by all
1447
+ steps and use `@conda` to specify step-specific additions.
1399
1448
 
1400
1449
  Parameters
1401
1450
  ----------
1402
1451
  packages : Dict[str, str], default {}
1403
- Packages to use for this step. The key is the name of the package
1452
+ Packages to use for this flow. The key is the name of the package
1404
1453
  and the value is the version to use.
1405
1454
  libraries : Dict[str, str], default {}
1406
1455
  Supported for backward compatibility. When used with packages, packages will take precedence.
@@ -1408,56 +1457,49 @@ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
1408
1457
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1409
1458
  that the version used will correspond to the version of the Python interpreter used to start the run.
1410
1459
  disabled : bool, default False
1411
- If set to True, disables @conda.
1412
- """
1413
- ...
1414
-
1415
- @typing.overload
1416
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1417
- """
1418
- Specifies the times when the flow should be run when running on a
1419
- production scheduler.
1420
-
1421
- Parameters
1422
- ----------
1423
- hourly : bool, default False
1424
- Run the workflow hourly.
1425
- daily : bool, default True
1426
- Run the workflow daily.
1427
- weekly : bool, default False
1428
- Run the workflow weekly.
1429
- cron : str, optional, default None
1430
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1431
- specified by this expression.
1432
- timezone : str, optional, default None
1433
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1434
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1460
+ If set to True, disables Conda.
1435
1461
  """
1436
1462
  ...
1437
1463
 
1438
- @typing.overload
1439
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1440
- ...
1441
-
1442
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1464
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1443
1465
  """
1444
- Specifies the times when the flow should be run when running on a
1445
- production scheduler.
1466
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1467
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1468
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1469
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
1470
+ starts only after all sensors finish.
1446
1471
 
1447
1472
  Parameters
1448
1473
  ----------
1449
- hourly : bool, default False
1450
- Run the workflow hourly.
1451
- daily : bool, default True
1452
- Run the workflow daily.
1453
- weekly : bool, default False
1454
- Run the workflow weekly.
1455
- cron : str, optional, default None
1456
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1457
- specified by this expression.
1458
- timezone : str, optional, default None
1459
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1460
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1474
+ timeout : int
1475
+ Time, in seconds before the task times out and fails. (Default: 3600)
1476
+ poke_interval : int
1477
+ Time in seconds that the job should wait in between each try. (Default: 60)
1478
+ mode : str
1479
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1480
+ exponential_backoff : bool
1481
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1482
+ pool : str
1483
+ the slot pool this task should run in,
1484
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1485
+ soft_fail : bool
1486
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1487
+ name : str
1488
+ Name of the sensor on Airflow
1489
+ description : str
1490
+ Description of sensor in the Airflow UI
1491
+ bucket_key : Union[str, List[str]]
1492
+ The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1493
+ When it's specified as a full s3:// url, please leave `bucket_name` as None
1494
+ bucket_name : str
1495
+ Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1496
+ When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1497
+ wildcard_match : bool
1498
+ whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1499
+ aws_conn_id : str
1500
+ a reference to the s3 connection on Airflow. (Default: None)
1501
+ verify : bool
1502
+ Whether or not to verify SSL certificates for S3 connection. (Default: None)
1461
1503
  """
1462
1504
  ...
1463
1505
 
@@ -1564,52 +1606,70 @@ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *
1564
1606
  """
1565
1607
  ...
1566
1608
 
1567
- @typing.overload
1568
- def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1609
+ def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1569
1610
  """
1570
- Specifies the Conda environment for all steps of the flow.
1611
+ Specifies what flows belong to the same project.
1571
1612
 
1572
- Use `@conda_base` to set common libraries required by all
1573
- steps and use `@conda` to specify step-specific additions.
1613
+ A project-specific namespace is created for all flows that
1614
+ use the same `@project(name)`.
1574
1615
 
1575
1616
  Parameters
1576
1617
  ----------
1577
- packages : Dict[str, str], default {}
1578
- Packages to use for this flow. The key is the name of the package
1579
- and the value is the version to use.
1580
- libraries : Dict[str, str], default {}
1581
- Supported for backward compatibility. When used with packages, packages will take precedence.
1582
- python : str, optional, default None
1583
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1584
- that the version used will correspond to the version of the Python interpreter used to start the run.
1585
- disabled : bool, default False
1586
- If set to True, disables Conda.
1618
+ name : str
1619
+ Project name. Make sure that the name is unique amongst all
1620
+ projects that use the same production scheduler. The name may
1621
+ contain only lowercase alphanumeric characters and underscores.
1622
+
1623
+
1587
1624
  """
1588
1625
  ...
1589
1626
 
1590
1627
  @typing.overload
1591
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1628
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1629
+ """
1630
+ Specifies the times when the flow should be run when running on a
1631
+ production scheduler.
1632
+
1633
+ Parameters
1634
+ ----------
1635
+ hourly : bool, default False
1636
+ Run the workflow hourly.
1637
+ daily : bool, default True
1638
+ Run the workflow daily.
1639
+ weekly : bool, default False
1640
+ Run the workflow weekly.
1641
+ cron : str, optional, default None
1642
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1643
+ specified by this expression.
1644
+ timezone : str, optional, default None
1645
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1646
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1647
+ """
1592
1648
  ...
1593
1649
 
1594
- def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1650
+ @typing.overload
1651
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1652
+ ...
1653
+
1654
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1595
1655
  """
1596
- Specifies the Conda environment for all steps of the flow.
1597
-
1598
- Use `@conda_base` to set common libraries required by all
1599
- steps and use `@conda` to specify step-specific additions.
1656
+ Specifies the times when the flow should be run when running on a
1657
+ production scheduler.
1600
1658
 
1601
1659
  Parameters
1602
1660
  ----------
1603
- packages : Dict[str, str], default {}
1604
- Packages to use for this flow. The key is the name of the package
1605
- and the value is the version to use.
1606
- libraries : Dict[str, str], default {}
1607
- Supported for backward compatibility. When used with packages, packages will take precedence.
1608
- python : str, optional, default None
1609
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1610
- that the version used will correspond to the version of the Python interpreter used to start the run.
1611
- disabled : bool, default False
1612
- If set to True, disables Conda.
1661
+ hourly : bool, default False
1662
+ Run the workflow hourly.
1663
+ daily : bool, default True
1664
+ Run the workflow daily.
1665
+ weekly : bool, default False
1666
+ Run the workflow weekly.
1667
+ cron : str, optional, default None
1668
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1669
+ specified by this expression.
1670
+ timezone : str, optional, default None
1671
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1672
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1613
1673
  """
1614
1674
  ...
1615
1675
 
@@ -1750,66 +1810,6 @@ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str,
1750
1810
  """
1751
1811
  ...
1752
1812
 
1753
- def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1754
- """
1755
- Specifies what flows belong to the same project.
1756
-
1757
- A project-specific namespace is created for all flows that
1758
- use the same `@project(name)`.
1759
-
1760
- Parameters
1761
- ----------
1762
- name : str
1763
- Project name. Make sure that the name is unique amongst all
1764
- projects that use the same production scheduler. The name may
1765
- contain only lowercase alphanumeric characters and underscores.
1766
-
1767
-
1768
- """
1769
- ...
1770
-
1771
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1772
- """
1773
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1774
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1775
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1776
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
1777
- starts only after all sensors finish.
1778
-
1779
- Parameters
1780
- ----------
1781
- timeout : int
1782
- Time, in seconds before the task times out and fails. (Default: 3600)
1783
- poke_interval : int
1784
- Time in seconds that the job should wait in between each try. (Default: 60)
1785
- mode : str
1786
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1787
- exponential_backoff : bool
1788
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1789
- pool : str
1790
- the slot pool this task should run in,
1791
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1792
- soft_fail : bool
1793
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1794
- name : str
1795
- Name of the sensor on Airflow
1796
- description : str
1797
- Description of sensor in the Airflow UI
1798
- bucket_key : Union[str, List[str]]
1799
- The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1800
- When it's specified as a full s3:// url, please leave `bucket_name` as None
1801
- bucket_name : str
1802
- Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1803
- When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1804
- wildcard_match : bool
1805
- whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1806
- aws_conn_id : str
1807
- a reference to the s3 connection on Airflow. (Default: None)
1808
- verify : bool
1809
- Whether or not to verify SSL certificates for S3 connection. (Default: None)
1810
- """
1811
- ...
1812
-
1813
1813
  @typing.overload
1814
1814
  def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1815
1815
  """