ob-metaflow-stubs 3.2__py2.py3-none-any.whl → 3.4__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (136) hide show
  1. metaflow-stubs/__init__.pyi +482 -482
  2. metaflow-stubs/cards.pyi +5 -5
  3. metaflow-stubs/cli.pyi +2 -2
  4. metaflow-stubs/client/__init__.pyi +5 -5
  5. metaflow-stubs/client/core.pyi +4 -4
  6. metaflow-stubs/client/filecache.pyi +3 -3
  7. metaflow-stubs/clone_util.pyi +2 -2
  8. metaflow-stubs/events.pyi +2 -2
  9. metaflow-stubs/exception.pyi +2 -2
  10. metaflow-stubs/flowspec.pyi +4 -4
  11. metaflow-stubs/generated_for.txt +1 -1
  12. metaflow-stubs/includefile.pyi +3 -3
  13. metaflow-stubs/metadata/metadata.pyi +3 -3
  14. metaflow-stubs/metadata/util.pyi +2 -2
  15. metaflow-stubs/metaflow_config.pyi +4 -2
  16. metaflow-stubs/metaflow_current.pyi +3 -3
  17. metaflow-stubs/mflog/mflog.pyi +2 -2
  18. metaflow-stubs/multicore_utils.pyi +2 -2
  19. metaflow-stubs/parameters.pyi +4 -4
  20. metaflow-stubs/plugins/__init__.pyi +3 -3
  21. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  22. metaflow-stubs/plugins/airflow/airflow.pyi +5 -3
  23. metaflow-stubs/plugins/airflow/airflow_cli.pyi +3 -3
  24. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
  25. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  26. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  27. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  28. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +3 -3
  29. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +3 -3
  30. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +3 -3
  31. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  32. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  33. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  34. metaflow-stubs/plugins/argo/argo_workflows.pyi +5 -3
  35. metaflow-stubs/plugins/argo/argo_workflows_cli.pyi +3 -3
  36. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +4 -4
  37. metaflow-stubs/plugins/aws/__init__.pyi +2 -2
  38. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  39. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  40. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  41. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  42. metaflow-stubs/plugins/aws/batch/batch_cli.pyi +6 -2
  43. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  44. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +3 -3
  45. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  46. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +2 -2
  47. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  48. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
  49. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  50. metaflow-stubs/plugins/aws/step_functions/production_token.pyi +2 -2
  51. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  52. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  53. metaflow-stubs/plugins/aws/step_functions/step_functions_cli.pyi +3 -3
  54. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  55. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
  56. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  57. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  58. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  59. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +58 -0
  60. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  61. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  62. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  63. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  64. metaflow-stubs/plugins/cards/card_cli.pyi +3 -3
  65. metaflow-stubs/plugins/cards/card_client.pyi +3 -3
  66. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  67. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  68. metaflow-stubs/plugins/cards/card_decorator.pyi +3 -3
  69. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  70. metaflow-stubs/plugins/cards/card_modules/basic.pyi +3 -3
  71. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  72. metaflow-stubs/plugins/cards/card_modules/chevron/__init__.pyi +2 -2
  73. metaflow-stubs/plugins/cards/card_modules/chevron/main.pyi +2 -2
  74. metaflow-stubs/plugins/cards/card_modules/chevron/metadata.pyi +2 -2
  75. metaflow-stubs/plugins/cards/card_modules/chevron/renderer.pyi +2 -2
  76. metaflow-stubs/plugins/cards/card_modules/chevron/tokenizer.pyi +2 -2
  77. metaflow-stubs/plugins/cards/card_modules/components.pyi +4 -4
  78. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  79. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  80. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  81. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  82. metaflow-stubs/plugins/cards/component_serializer.pyi +3 -3
  83. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  84. metaflow-stubs/plugins/catch_decorator.pyi +3 -3
  85. metaflow-stubs/plugins/datatools/__init__.pyi +3 -3
  86. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  87. metaflow-stubs/plugins/datatools/s3/__init__.pyi +3 -3
  88. metaflow-stubs/plugins/datatools/s3/s3.pyi +4 -4
  89. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  90. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  91. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  92. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  93. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  94. metaflow-stubs/plugins/events_decorator.pyi +3 -3
  95. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  96. metaflow-stubs/plugins/frameworks/pytorch.pyi +3 -3
  97. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  98. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +2 -2
  99. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  100. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  101. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  102. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  103. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  104. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +4 -2
  105. metaflow-stubs/plugins/kubernetes/kubernetes_cli.pyi +3 -3
  106. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  107. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +3 -3
  108. metaflow-stubs/plugins/logs_cli.pyi +45 -0
  109. metaflow-stubs/plugins/package_cli.pyi +2 -2
  110. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  111. metaflow-stubs/plugins/perimeters.pyi +2 -2
  112. metaflow-stubs/plugins/project_decorator.pyi +3 -3
  113. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  114. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  115. metaflow-stubs/plugins/pypi/conda_environment.pyi +4 -4
  116. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  117. metaflow-stubs/plugins/pypi/pypi_environment.pyi +3 -3
  118. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  119. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  120. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  121. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  122. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +2 -2
  123. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  124. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  125. metaflow-stubs/plugins/tag_cli.pyi +4 -4
  126. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +3 -3
  127. metaflow-stubs/plugins/timeout_decorator.pyi +3 -3
  128. metaflow-stubs/procpoll.pyi +2 -2
  129. metaflow-stubs/profilers/__init__.pyi +2 -2
  130. metaflow-stubs/pylint_wrapper.pyi +2 -2
  131. metaflow-stubs/tagging_util.pyi +2 -2
  132. {ob_metaflow_stubs-3.2.dist-info → ob_metaflow_stubs-3.4.dist-info}/METADATA +1 -1
  133. ob_metaflow_stubs-3.4.dist-info/RECORD +136 -0
  134. ob_metaflow_stubs-3.2.dist-info/RECORD +0 -134
  135. {ob_metaflow_stubs-3.2.dist-info → ob_metaflow_stubs-3.4.dist-info}/WHEEL +0 -0
  136. {ob_metaflow_stubs-3.2.dist-info → ob_metaflow_stubs-3.4.dist-info}/top_level.txt +0 -0
@@ -1,23 +1,23 @@
1
1
  ##################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.11.14.1+ob(v1) #
4
- # Generated on 2024-05-07T00:05:40.513974 #
3
+ # MF version: 2.11.15.2+ob(v1) #
4
+ # Generated on 2024-05-17T19:44:44.623630 #
5
5
  ##################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
+ import datetime
11
12
  import io
13
+ import metaflow.events
12
14
  import metaflow.parameters
13
- import datetime
14
15
  import typing
15
- import metaflow.events
16
- import metaflow.plugins.datatools.s3.s3
17
- import metaflow.datastore.inputs
18
16
  import metaflow.metaflow_current
19
17
  import metaflow.client.core
20
18
  import metaflow._vendor.click.types
19
+ import metaflow.plugins.datatools.s3.s3
20
+ import metaflow.datastore.inputs
21
21
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
22
22
  StepFlag = typing.NewType("StepFlag", bool)
23
23
 
@@ -726,59 +726,118 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
726
726
  ...
727
727
 
728
728
  @typing.overload
729
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
729
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
730
730
  """
731
- Specifies a timeout for your step.
732
-
733
- This decorator is useful if this step may hang indefinitely.
734
-
735
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
736
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
737
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
731
+ Specifies the Conda environment for the step.
738
732
 
739
- Note that all the values specified in parameters are added together so if you specify
740
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
733
+ Information in this decorator will augment any
734
+ attributes set in the `@conda_base` flow-level decorator. Hence,
735
+ you can use `@conda_base` to set packages required by all
736
+ steps and use `@conda` to specify step-specific overrides.
741
737
 
742
738
  Parameters
743
739
  ----------
744
- seconds : int, default 0
745
- Number of seconds to wait prior to timing out.
746
- minutes : int, default 0
747
- Number of minutes to wait prior to timing out.
748
- hours : int, default 0
749
- Number of hours to wait prior to timing out.
740
+ packages : Dict[str, str], default {}
741
+ Packages to use for this step. The key is the name of the package
742
+ and the value is the version to use.
743
+ libraries : Dict[str, str], default {}
744
+ Supported for backward compatibility. When used with packages, packages will take precedence.
745
+ python : str, optional, default None
746
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
747
+ that the version used will correspond to the version of the Python interpreter used to start the run.
748
+ disabled : bool, default False
749
+ If set to True, disables @conda.
750
750
  """
751
751
  ...
752
752
 
753
753
  @typing.overload
754
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
754
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
755
755
  ...
756
756
 
757
757
  @typing.overload
758
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
758
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
759
759
  ...
760
760
 
761
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
761
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
762
762
  """
763
- Specifies a timeout for your step.
764
-
765
- This decorator is useful if this step may hang indefinitely.
763
+ Specifies the Conda environment for the step.
766
764
 
767
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
768
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
769
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
765
+ Information in this decorator will augment any
766
+ attributes set in the `@conda_base` flow-level decorator. Hence,
767
+ you can use `@conda_base` to set packages required by all
768
+ steps and use `@conda` to specify step-specific overrides.
770
769
 
771
- Note that all the values specified in parameters are added together so if you specify
772
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
770
+ Parameters
771
+ ----------
772
+ packages : Dict[str, str], default {}
773
+ Packages to use for this step. The key is the name of the package
774
+ and the value is the version to use.
775
+ libraries : Dict[str, str], default {}
776
+ Supported for backward compatibility. When used with packages, packages will take precedence.
777
+ python : str, optional, default None
778
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
779
+ that the version used will correspond to the version of the Python interpreter used to start the run.
780
+ disabled : bool, default False
781
+ If set to True, disables @conda.
782
+ """
783
+ ...
784
+
785
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = "KUBERNETES_IMAGE_PULL_POLICY", service_account: str = "METAFLOW_KUBERNETES_SERVICE_ACCOUNT", secrets: typing.Optional[typing.List[str]] = None, namespace: str = "METAFLOW_KUBERNETES_NAMESPACE", gpu: typing.Optional[int] = None, gpu_vendor: str = "KUBERNETES_GPU_VENDOR", tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = "/metaflow_temp", persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
786
+ """
787
+ Specifies that this step should execute on Kubernetes.
773
788
 
774
789
  Parameters
775
790
  ----------
776
- seconds : int, default 0
777
- Number of seconds to wait prior to timing out.
778
- minutes : int, default 0
779
- Number of minutes to wait prior to timing out.
780
- hours : int, default 0
781
- Number of hours to wait prior to timing out.
791
+ cpu : int, default 1
792
+ Number of CPUs required for this step. If `@resources` is
793
+ also present, the maximum value from all decorators is used.
794
+ memory : int, default 4096
795
+ Memory size (in MB) required for this step. If
796
+ `@resources` is also present, the maximum value from all decorators is
797
+ used.
798
+ disk : int, default 10240
799
+ Disk size (in MB) required for this step. If
800
+ `@resources` is also present, the maximum value from all decorators is
801
+ used.
802
+ image : str, optional, default None
803
+ Docker image to use when launching on Kubernetes. If not specified, and
804
+ METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
805
+ not, a default Docker image mapping to the current version of Python is used.
806
+ image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
807
+ If given, the imagePullPolicy to be applied to the Docker image of the step.
808
+ service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
809
+ Kubernetes service account to use when launching pod in Kubernetes.
810
+ secrets : List[str], optional, default None
811
+ Kubernetes secrets to use when launching pod in Kubernetes. These
812
+ secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
813
+ in Metaflow configuration.
814
+ namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
815
+ Kubernetes namespace to use when launching pod in Kubernetes.
816
+ gpu : int, optional, default None
817
+ Number of GPUs required for this step. A value of zero implies that
818
+ the scheduled node should not have GPUs.
819
+ gpu_vendor : str, default KUBERNETES_GPU_VENDOR
820
+ The vendor of the GPUs to be used for this step.
821
+ tolerations : List[str], default []
822
+ The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
823
+ Kubernetes tolerations to use when launching pod in Kubernetes.
824
+ use_tmpfs : bool, default False
825
+ This enables an explicit tmpfs mount for this step.
826
+ tmpfs_tempdir : bool, default True
827
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
828
+ tmpfs_size : int, optional, default: None
829
+ The value for the size (in MiB) of the tmpfs mount for this step.
830
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
831
+ memory allocated for this step.
832
+ tmpfs_path : str, optional, default /metaflow_temp
833
+ Path to tmpfs mount for this step.
834
+ persistent_volume_claims : Dict[str, str], optional, default None
835
+ A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
836
+ volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
837
+ shared_memory: int, optional
838
+ Shared memory size (in MiB) required for this step
839
+ port: int, optional
840
+ Port number to specify in the Kubernetes job object
782
841
  """
783
842
  ...
784
843
 
@@ -860,86 +919,149 @@ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None]
860
919
  ...
861
920
 
862
921
  @typing.overload
863
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
922
+ def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
864
923
  """
865
- Specifies the number of times the task corresponding
866
- to a step needs to be retried.
867
-
868
- This decorator is useful for handling transient errors, such as networking issues.
869
- If your task contains operations that can't be retried safely, e.g. database updates,
870
- it is advisable to annotate it with `@retry(times=0)`.
871
-
872
- This can be used in conjunction with the `@catch` decorator. The `@catch`
873
- decorator will execute a no-op task after all retries have been exhausted,
874
- ensuring that the flow execution can continue.
924
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
875
925
 
876
926
  Parameters
877
927
  ----------
878
- times : int, default 3
879
- Number of times to retry this task.
880
- minutes_between_retries : int, default 2
881
- Number of minutes between retries.
928
+ cpu : int, default 1
929
+ Number of CPUs required for this step. If `@resources` is
930
+ also present, the maximum value from all decorators is used.
931
+ gpu : int, default 0
932
+ Number of GPUs required for this step. If `@resources` is
933
+ also present, the maximum value from all decorators is used.
934
+ memory : int, default 4096
935
+ Memory size (in MB) required for this step. If
936
+ `@resources` is also present, the maximum value from all decorators is
937
+ used.
938
+ image : str, optional, default None
939
+ Docker image to use when launching on AWS Batch. If not specified, and
940
+ METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
941
+ not, a default Docker image mapping to the current version of Python is used.
942
+ queue : str, default METAFLOW_BATCH_JOB_QUEUE
943
+ AWS Batch Job Queue to submit the job to.
944
+ iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
945
+ AWS IAM role that AWS Batch container uses to access AWS cloud resources.
946
+ execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
947
+ AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
948
+ (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
949
+ shared_memory : int, optional, default None
950
+ The value for the size (in MiB) of the /dev/shm volume for this step.
951
+ This parameter maps to the `--shm-size` option in Docker.
952
+ max_swap : int, optional, default None
953
+ The total amount of swap memory (in MiB) a container can use for this
954
+ step. This parameter is translated to the `--memory-swap` option in
955
+ Docker where the value is the sum of the container memory plus the
956
+ `max_swap` value.
957
+ swappiness : int, optional, default None
958
+ This allows you to tune memory swappiness behavior for this step.
959
+ A swappiness value of 0 causes swapping not to happen unless absolutely
960
+ necessary. A swappiness value of 100 causes pages to be swapped very
961
+ aggressively. Accepted values are whole numbers between 0 and 100.
962
+ use_tmpfs : bool, default False
963
+ This enables an explicit tmpfs mount for this step. Note that tmpfs is
964
+ not available on Fargate compute environments
965
+ tmpfs_tempdir : bool, default True
966
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
967
+ tmpfs_size : int, optional, default None
968
+ The value for the size (in MiB) of the tmpfs mount for this step.
969
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
970
+ memory allocated for this step.
971
+ tmpfs_path : str, optional, default None
972
+ Path to tmpfs mount for this step. Defaults to /metaflow_temp.
973
+ inferentia : int, default 0
974
+ Number of Inferentia chips required for this step.
975
+ trainium : int, default None
976
+ Alias for inferentia. Use only one of the two.
977
+ efa : int, default 0
978
+ Number of elastic fabric adapter network devices to attach to container
979
+ ephemeral_storage: int, default None
980
+ The total amount, in GiB, of ephemeral storage to set for the task (21-200)
981
+ This is only relevant for Fargate compute environments
982
+ log_driver: str, optional, default None
983
+ The log driver to use for the Amazon ECS container.
984
+ log_options: List[str], optional, default None
985
+ List of strings containing options for the chosen log driver. The configurable values
986
+ depend on the `log driver` chosen. Validation of these options is not supported yet.
987
+ Example usage: ["awslogs-group:aws/batch/job"]
882
988
  """
883
989
  ...
884
990
 
885
991
  @typing.overload
886
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
992
+ def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
887
993
  ...
888
994
 
889
995
  @typing.overload
890
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
891
- ...
892
-
893
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
894
- """
895
- Specifies the number of times the task corresponding
896
- to a step needs to be retried.
897
-
898
- This decorator is useful for handling transient errors, such as networking issues.
899
- If your task contains operations that can't be retried safely, e.g. database updates,
900
- it is advisable to annotate it with `@retry(times=0)`.
901
-
902
- This can be used in conjunction with the `@catch` decorator. The `@catch`
903
- decorator will execute a no-op task after all retries have been exhausted,
904
- ensuring that the flow execution can continue.
905
-
906
- Parameters
907
- ----------
908
- times : int, default 3
909
- Number of times to retry this task.
910
- minutes_between_retries : int, default 2
911
- Number of minutes between retries.
912
- """
996
+ def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
913
997
  ...
914
998
 
915
- @typing.overload
916
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
999
+ def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
917
1000
  """
918
- Specifies environment variables to be set prior to the execution of a step.
1001
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
919
1002
 
920
1003
  Parameters
921
1004
  ----------
922
- vars : Dict[str, str], default {}
923
- Dictionary of environment variables to set.
924
- """
925
- ...
926
-
927
- @typing.overload
928
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
929
- ...
930
-
931
- @typing.overload
932
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
933
- ...
934
-
935
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
936
- """
937
- Specifies environment variables to be set prior to the execution of a step.
938
-
939
- Parameters
940
- ----------
941
- vars : Dict[str, str], default {}
942
- Dictionary of environment variables to set.
1005
+ cpu : int, default 1
1006
+ Number of CPUs required for this step. If `@resources` is
1007
+ also present, the maximum value from all decorators is used.
1008
+ gpu : int, default 0
1009
+ Number of GPUs required for this step. If `@resources` is
1010
+ also present, the maximum value from all decorators is used.
1011
+ memory : int, default 4096
1012
+ Memory size (in MB) required for this step. If
1013
+ `@resources` is also present, the maximum value from all decorators is
1014
+ used.
1015
+ image : str, optional, default None
1016
+ Docker image to use when launching on AWS Batch. If not specified, and
1017
+ METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
1018
+ not, a default Docker image mapping to the current version of Python is used.
1019
+ queue : str, default METAFLOW_BATCH_JOB_QUEUE
1020
+ AWS Batch Job Queue to submit the job to.
1021
+ iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
1022
+ AWS IAM role that AWS Batch container uses to access AWS cloud resources.
1023
+ execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
1024
+ AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
1025
+ (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
1026
+ shared_memory : int, optional, default None
1027
+ The value for the size (in MiB) of the /dev/shm volume for this step.
1028
+ This parameter maps to the `--shm-size` option in Docker.
1029
+ max_swap : int, optional, default None
1030
+ The total amount of swap memory (in MiB) a container can use for this
1031
+ step. This parameter is translated to the `--memory-swap` option in
1032
+ Docker where the value is the sum of the container memory plus the
1033
+ `max_swap` value.
1034
+ swappiness : int, optional, default None
1035
+ This allows you to tune memory swappiness behavior for this step.
1036
+ A swappiness value of 0 causes swapping not to happen unless absolutely
1037
+ necessary. A swappiness value of 100 causes pages to be swapped very
1038
+ aggressively. Accepted values are whole numbers between 0 and 100.
1039
+ use_tmpfs : bool, default False
1040
+ This enables an explicit tmpfs mount for this step. Note that tmpfs is
1041
+ not available on Fargate compute environments
1042
+ tmpfs_tempdir : bool, default True
1043
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1044
+ tmpfs_size : int, optional, default None
1045
+ The value for the size (in MiB) of the tmpfs mount for this step.
1046
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1047
+ memory allocated for this step.
1048
+ tmpfs_path : str, optional, default None
1049
+ Path to tmpfs mount for this step. Defaults to /metaflow_temp.
1050
+ inferentia : int, default 0
1051
+ Number of Inferentia chips required for this step.
1052
+ trainium : int, default None
1053
+ Alias for inferentia. Use only one of the two.
1054
+ efa : int, default 0
1055
+ Number of elastic fabric adapter network devices to attach to container
1056
+ ephemeral_storage: int, default None
1057
+ The total amount, in GiB, of ephemeral storage to set for the task (21-200)
1058
+ This is only relevant for Fargate compute environments
1059
+ log_driver: str, optional, default None
1060
+ The log driver to use for the Amazon ECS container.
1061
+ log_options: List[str], optional, default None
1062
+ List of strings containing options for the chosen log driver. The configurable values
1063
+ depend on the `log driver` chosen. Validation of these options is not supported yet.
1064
+ Example usage: ["awslogs-group:aws/batch/job"]
943
1065
  """
944
1066
  ...
945
1067
 
@@ -992,168 +1114,87 @@ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
992
1114
  """
993
1115
  ...
994
1116
 
995
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = "KUBERNETES_IMAGE_PULL_POLICY", service_account: str = "METAFLOW_KUBERNETES_SERVICE_ACCOUNT", secrets: typing.Optional[typing.List[str]] = None, namespace: str = "METAFLOW_KUBERNETES_NAMESPACE", gpu: typing.Optional[int] = None, gpu_vendor: str = "KUBERNETES_GPU_VENDOR", tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = "/metaflow_temp", persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
996
- """
997
- Specifies that this step should execute on Kubernetes.
998
-
999
- Parameters
1000
- ----------
1001
- cpu : int, default 1
1002
- Number of CPUs required for this step. If `@resources` is
1003
- also present, the maximum value from all decorators is used.
1004
- memory : int, default 4096
1005
- Memory size (in MB) required for this step. If
1006
- `@resources` is also present, the maximum value from all decorators is
1007
- used.
1008
- disk : int, default 10240
1009
- Disk size (in MB) required for this step. If
1010
- `@resources` is also present, the maximum value from all decorators is
1011
- used.
1012
- image : str, optional, default None
1013
- Docker image to use when launching on Kubernetes. If not specified, and
1014
- METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
1015
- not, a default Docker image mapping to the current version of Python is used.
1016
- image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
1017
- If given, the imagePullPolicy to be applied to the Docker image of the step.
1018
- service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
1019
- Kubernetes service account to use when launching pod in Kubernetes.
1020
- secrets : List[str], optional, default None
1021
- Kubernetes secrets to use when launching pod in Kubernetes. These
1022
- secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
1023
- in Metaflow configuration.
1024
- namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
1025
- Kubernetes namespace to use when launching pod in Kubernetes.
1026
- gpu : int, optional, default None
1027
- Number of GPUs required for this step. A value of zero implies that
1028
- the scheduled node should not have GPUs.
1029
- gpu_vendor : str, default KUBERNETES_GPU_VENDOR
1030
- The vendor of the GPUs to be used for this step.
1031
- tolerations : List[str], default []
1032
- The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
1033
- Kubernetes tolerations to use when launching pod in Kubernetes.
1034
- use_tmpfs : bool, default False
1035
- This enables an explicit tmpfs mount for this step.
1036
- tmpfs_tempdir : bool, default True
1037
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1038
- tmpfs_size : int, optional, default: None
1039
- The value for the size (in MiB) of the tmpfs mount for this step.
1040
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1041
- memory allocated for this step.
1042
- tmpfs_path : str, optional, default /metaflow_temp
1043
- Path to tmpfs mount for this step.
1044
- persistent_volume_claims : Dict[str, str], optional, default None
1045
- A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
1046
- volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
1047
- shared_memory: int, optional
1048
- Shared memory size (in MiB) required for this step
1049
- port: int, optional
1050
- Port number to specify in the Kubernetes job object
1051
- """
1052
- ...
1053
-
1054
1117
  @typing.overload
1055
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1118
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1056
1119
  """
1057
- Specifies the PyPI packages for the step.
1058
-
1059
- Information in this decorator will augment any
1060
- attributes set in the `@pyi_base` flow-level decorator. Hence,
1061
- you can use `@pypi_base` to set packages required by all
1062
- steps and use `@pypi` to specify step-specific overrides.
1120
+ Specifies environment variables to be set prior to the execution of a step.
1063
1121
 
1064
1122
  Parameters
1065
1123
  ----------
1066
- packages : Dict[str, str], default: {}
1067
- Packages to use for this step. The key is the name of the package
1068
- and the value is the version to use.
1069
- python : str, optional, default: None
1070
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1071
- that the version used will correspond to the version of the Python interpreter used to start the run.
1124
+ vars : Dict[str, str], default {}
1125
+ Dictionary of environment variables to set.
1072
1126
  """
1073
1127
  ...
1074
1128
 
1075
1129
  @typing.overload
1076
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1130
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1077
1131
  ...
1078
1132
 
1079
1133
  @typing.overload
1080
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1134
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1081
1135
  ...
1082
1136
 
1083
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1137
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
1084
1138
  """
1085
- Specifies the PyPI packages for the step.
1086
-
1087
- Information in this decorator will augment any
1088
- attributes set in the `@pyi_base` flow-level decorator. Hence,
1089
- you can use `@pypi_base` to set packages required by all
1090
- steps and use `@pypi` to specify step-specific overrides.
1139
+ Specifies environment variables to be set prior to the execution of a step.
1091
1140
 
1092
1141
  Parameters
1093
1142
  ----------
1094
- packages : Dict[str, str], default: {}
1095
- Packages to use for this step. The key is the name of the package
1096
- and the value is the version to use.
1097
- python : str, optional, default: None
1098
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1099
- that the version used will correspond to the version of the Python interpreter used to start the run.
1143
+ vars : Dict[str, str], default {}
1144
+ Dictionary of environment variables to set.
1100
1145
  """
1101
1146
  ...
1102
1147
 
1103
1148
  @typing.overload
1104
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1149
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1105
1150
  """
1106
- Specifies the Conda environment for the step.
1151
+ Specifies the number of times the task corresponding
1152
+ to a step needs to be retried.
1107
1153
 
1108
- Information in this decorator will augment any
1109
- attributes set in the `@conda_base` flow-level decorator. Hence,
1110
- you can use `@conda_base` to set packages required by all
1111
- steps and use `@conda` to specify step-specific overrides.
1154
+ This decorator is useful for handling transient errors, such as networking issues.
1155
+ If your task contains operations that can't be retried safely, e.g. database updates,
1156
+ it is advisable to annotate it with `@retry(times=0)`.
1157
+
1158
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
1159
+ decorator will execute a no-op task after all retries have been exhausted,
1160
+ ensuring that the flow execution can continue.
1112
1161
 
1113
1162
  Parameters
1114
1163
  ----------
1115
- packages : Dict[str, str], default {}
1116
- Packages to use for this step. The key is the name of the package
1117
- and the value is the version to use.
1118
- libraries : Dict[str, str], default {}
1119
- Supported for backward compatibility. When used with packages, packages will take precedence.
1120
- python : str, optional, default None
1121
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1122
- that the version used will correspond to the version of the Python interpreter used to start the run.
1123
- disabled : bool, default False
1124
- If set to True, disables @conda.
1164
+ times : int, default 3
1165
+ Number of times to retry this task.
1166
+ minutes_between_retries : int, default 2
1167
+ Number of minutes between retries.
1125
1168
  """
1126
1169
  ...
1127
1170
 
1128
1171
  @typing.overload
1129
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1172
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1130
1173
  ...
1131
1174
 
1132
1175
  @typing.overload
1133
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1176
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1134
1177
  ...
1135
1178
 
1136
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1179
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
1137
1180
  """
1138
- Specifies the Conda environment for the step.
1181
+ Specifies the number of times the task corresponding
1182
+ to a step needs to be retried.
1139
1183
 
1140
- Information in this decorator will augment any
1141
- attributes set in the `@conda_base` flow-level decorator. Hence,
1142
- you can use `@conda_base` to set packages required by all
1143
- steps and use `@conda` to specify step-specific overrides.
1184
+ This decorator is useful for handling transient errors, such as networking issues.
1185
+ If your task contains operations that can't be retried safely, e.g. database updates,
1186
+ it is advisable to annotate it with `@retry(times=0)`.
1187
+
1188
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
1189
+ decorator will execute a no-op task after all retries have been exhausted,
1190
+ ensuring that the flow execution can continue.
1144
1191
 
1145
1192
  Parameters
1146
1193
  ----------
1147
- packages : Dict[str, str], default {}
1148
- Packages to use for this step. The key is the name of the package
1149
- and the value is the version to use.
1150
- libraries : Dict[str, str], default {}
1151
- Supported for backward compatibility. When used with packages, packages will take precedence.
1152
- python : str, optional, default None
1153
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1154
- that the version used will correspond to the version of the Python interpreter used to start the run.
1155
- disabled : bool, default False
1156
- If set to True, disables @conda.
1194
+ times : int, default 3
1195
+ Number of times to retry this task.
1196
+ minutes_between_retries : int, default 2
1197
+ Number of minutes between retries.
1157
1198
  """
1158
1199
  ...
1159
1200
 
@@ -1209,182 +1250,180 @@ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
1209
1250
  ...
1210
1251
 
1211
1252
  @typing.overload
1212
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1253
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1213
1254
  """
1214
- Specifies secrets to be retrieved and injected as environment variables prior to
1215
- the execution of a step.
1255
+ Specifies a timeout for your step.
1256
+
1257
+ This decorator is useful if this step may hang indefinitely.
1258
+
1259
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1260
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
1261
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
1262
+
1263
+ Note that all the values specified in parameters are added together so if you specify
1264
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1216
1265
 
1217
1266
  Parameters
1218
1267
  ----------
1219
- sources : List[Union[str, Dict[str, Any]]], default: []
1220
- List of secret specs, defining how the secrets are to be retrieved
1268
+ seconds : int, default 0
1269
+ Number of seconds to wait prior to timing out.
1270
+ minutes : int, default 0
1271
+ Number of minutes to wait prior to timing out.
1272
+ hours : int, default 0
1273
+ Number of hours to wait prior to timing out.
1221
1274
  """
1222
1275
  ...
1223
1276
 
1224
1277
  @typing.overload
1225
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1278
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1226
1279
  ...
1227
1280
 
1228
1281
  @typing.overload
1229
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1282
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1230
1283
  ...
1231
1284
 
1232
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
1285
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
1233
1286
  """
1234
- Specifies secrets to be retrieved and injected as environment variables prior to
1235
- the execution of a step.
1287
+ Specifies a timeout for your step.
1288
+
1289
+ This decorator is useful if this step may hang indefinitely.
1290
+
1291
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1292
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
1293
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
1294
+
1295
+ Note that all the values specified in parameters are added together so if you specify
1296
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1236
1297
 
1237
1298
  Parameters
1238
1299
  ----------
1239
- sources : List[Union[str, Dict[str, Any]]], default: []
1240
- List of secret specs, defining how the secrets are to be retrieved
1300
+ seconds : int, default 0
1301
+ Number of seconds to wait prior to timing out.
1302
+ minutes : int, default 0
1303
+ Number of minutes to wait prior to timing out.
1304
+ hours : int, default 0
1305
+ Number of hours to wait prior to timing out.
1241
1306
  """
1242
1307
  ...
1243
1308
 
1244
1309
  @typing.overload
1245
- def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1310
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1246
1311
  """
1247
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
1312
+ Specifies the PyPI packages for the step.
1313
+
1314
+ Information in this decorator will augment any
1315
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
1316
+ you can use `@pypi_base` to set packages required by all
1317
+ steps and use `@pypi` to specify step-specific overrides.
1248
1318
 
1249
1319
  Parameters
1250
1320
  ----------
1251
- cpu : int, default 1
1252
- Number of CPUs required for this step. If `@resources` is
1253
- also present, the maximum value from all decorators is used.
1254
- gpu : int, default 0
1255
- Number of GPUs required for this step. If `@resources` is
1256
- also present, the maximum value from all decorators is used.
1257
- memory : int, default 4096
1258
- Memory size (in MB) required for this step. If
1259
- `@resources` is also present, the maximum value from all decorators is
1260
- used.
1261
- image : str, optional, default None
1262
- Docker image to use when launching on AWS Batch. If not specified, and
1263
- METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
1264
- not, a default Docker image mapping to the current version of Python is used.
1265
- queue : str, default METAFLOW_BATCH_JOB_QUEUE
1266
- AWS Batch Job Queue to submit the job to.
1267
- iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
1268
- AWS IAM role that AWS Batch container uses to access AWS cloud resources.
1269
- execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
1270
- AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
1271
- (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
1272
- shared_memory : int, optional, default None
1273
- The value for the size (in MiB) of the /dev/shm volume for this step.
1274
- This parameter maps to the `--shm-size` option in Docker.
1275
- max_swap : int, optional, default None
1276
- The total amount of swap memory (in MiB) a container can use for this
1277
- step. This parameter is translated to the `--memory-swap` option in
1278
- Docker where the value is the sum of the container memory plus the
1279
- `max_swap` value.
1280
- swappiness : int, optional, default None
1281
- This allows you to tune memory swappiness behavior for this step.
1282
- A swappiness value of 0 causes swapping not to happen unless absolutely
1283
- necessary. A swappiness value of 100 causes pages to be swapped very
1284
- aggressively. Accepted values are whole numbers between 0 and 100.
1285
- use_tmpfs : bool, default False
1286
- This enables an explicit tmpfs mount for this step. Note that tmpfs is
1287
- not available on Fargate compute environments
1288
- tmpfs_tempdir : bool, default True
1289
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1290
- tmpfs_size : int, optional, default None
1291
- The value for the size (in MiB) of the tmpfs mount for this step.
1292
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1293
- memory allocated for this step.
1294
- tmpfs_path : str, optional, default None
1295
- Path to tmpfs mount for this step. Defaults to /metaflow_temp.
1296
- inferentia : int, default 0
1297
- Number of Inferentia chips required for this step.
1298
- trainium : int, default None
1299
- Alias for inferentia. Use only one of the two.
1300
- efa : int, default 0
1301
- Number of elastic fabric adapter network devices to attach to container
1302
- ephemeral_storage: int, default None
1303
- The total amount, in GiB, of ephemeral storage to set for the task (21-200)
1304
- This is only relevant for Fargate compute environments
1305
- log_driver: str, optional, default None
1306
- The log driver to use for the Amazon ECS container.
1307
- log_options: List[str], optional, default None
1308
- List of strings containing options for the chosen log driver. The configurable values
1309
- depend on the `log driver` chosen. Validation of these options is not supported yet.
1310
- Example usage: ["awslogs-group:aws/batch/job"]
1321
+ packages : Dict[str, str], default: {}
1322
+ Packages to use for this step. The key is the name of the package
1323
+ and the value is the version to use.
1324
+ python : str, optional, default: None
1325
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1326
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1311
1327
  """
1312
1328
  ...
1313
1329
 
1314
1330
  @typing.overload
1315
- def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1331
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1316
1332
  ...
1317
1333
 
1318
1334
  @typing.overload
1319
- def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1335
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1320
1336
  ...
1321
1337
 
1322
- def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
1338
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1323
1339
  """
1324
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
1340
+ Specifies the PyPI packages for the step.
1341
+
1342
+ Information in this decorator will augment any
1343
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
1344
+ you can use `@pypi_base` to set packages required by all
1345
+ steps and use `@pypi` to specify step-specific overrides.
1325
1346
 
1326
1347
  Parameters
1327
1348
  ----------
1328
- cpu : int, default 1
1329
- Number of CPUs required for this step. If `@resources` is
1330
- also present, the maximum value from all decorators is used.
1331
- gpu : int, default 0
1332
- Number of GPUs required for this step. If `@resources` is
1333
- also present, the maximum value from all decorators is used.
1334
- memory : int, default 4096
1335
- Memory size (in MB) required for this step. If
1336
- `@resources` is also present, the maximum value from all decorators is
1337
- used.
1338
- image : str, optional, default None
1339
- Docker image to use when launching on AWS Batch. If not specified, and
1340
- METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
1341
- not, a default Docker image mapping to the current version of Python is used.
1342
- queue : str, default METAFLOW_BATCH_JOB_QUEUE
1343
- AWS Batch Job Queue to submit the job to.
1344
- iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
1345
- AWS IAM role that AWS Batch container uses to access AWS cloud resources.
1346
- execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
1347
- AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
1348
- (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
1349
- shared_memory : int, optional, default None
1350
- The value for the size (in MiB) of the /dev/shm volume for this step.
1351
- This parameter maps to the `--shm-size` option in Docker.
1352
- max_swap : int, optional, default None
1353
- The total amount of swap memory (in MiB) a container can use for this
1354
- step. This parameter is translated to the `--memory-swap` option in
1355
- Docker where the value is the sum of the container memory plus the
1356
- `max_swap` value.
1357
- swappiness : int, optional, default None
1358
- This allows you to tune memory swappiness behavior for this step.
1359
- A swappiness value of 0 causes swapping not to happen unless absolutely
1360
- necessary. A swappiness value of 100 causes pages to be swapped very
1361
- aggressively. Accepted values are whole numbers between 0 and 100.
1362
- use_tmpfs : bool, default False
1363
- This enables an explicit tmpfs mount for this step. Note that tmpfs is
1364
- not available on Fargate compute environments
1365
- tmpfs_tempdir : bool, default True
1366
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1367
- tmpfs_size : int, optional, default None
1368
- The value for the size (in MiB) of the tmpfs mount for this step.
1369
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1370
- memory allocated for this step.
1371
- tmpfs_path : str, optional, default None
1372
- Path to tmpfs mount for this step. Defaults to /metaflow_temp.
1373
- inferentia : int, default 0
1374
- Number of Inferentia chips required for this step.
1375
- trainium : int, default None
1376
- Alias for inferentia. Use only one of the two.
1377
- efa : int, default 0
1378
- Number of elastic fabric adapter network devices to attach to container
1379
- ephemeral_storage: int, default None
1380
- The total amount, in GiB, of ephemeral storage to set for the task (21-200)
1381
- This is only relevant for Fargate compute environments
1382
- log_driver: str, optional, default None
1383
- The log driver to use for the Amazon ECS container.
1384
- log_options: List[str], optional, default None
1385
- List of strings containing options for the chosen log driver. The configurable values
1386
- depend on the `log driver` chosen. Validation of these options is not supported yet.
1387
- Example usage: ["awslogs-group:aws/batch/job"]
1349
+ packages : Dict[str, str], default: {}
1350
+ Packages to use for this step. The key is the name of the package
1351
+ and the value is the version to use.
1352
+ python : str, optional, default: None
1353
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1354
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1355
+ """
1356
+ ...
1357
+
1358
+ @typing.overload
1359
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1360
+ """
1361
+ Specifies secrets to be retrieved and injected as environment variables prior to
1362
+ the execution of a step.
1363
+
1364
+ Parameters
1365
+ ----------
1366
+ sources : List[Union[str, Dict[str, Any]]], default: []
1367
+ List of secret specs, defining how the secrets are to be retrieved
1368
+ """
1369
+ ...
1370
+
1371
+ @typing.overload
1372
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1373
+ ...
1374
+
1375
+ @typing.overload
1376
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1377
+ ...
1378
+
1379
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
1380
+ """
1381
+ Specifies secrets to be retrieved and injected as environment variables prior to
1382
+ the execution of a step.
1383
+
1384
+ Parameters
1385
+ ----------
1386
+ sources : List[Union[str, Dict[str, Any]]], default: []
1387
+ List of secret specs, defining how the secrets are to be retrieved
1388
+ """
1389
+ ...
1390
+
1391
+ @typing.overload
1392
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1393
+ """
1394
+ Specifies the PyPI packages for all steps of the flow.
1395
+
1396
+ Use `@pypi_base` to set common packages required by all
1397
+ steps and use `@pypi` to specify step-specific overrides.
1398
+ Parameters
1399
+ ----------
1400
+ packages : Dict[str, str], default: {}
1401
+ Packages to use for this flow. The key is the name of the package
1402
+ and the value is the version to use.
1403
+ python : str, optional, default: None
1404
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1405
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1406
+ """
1407
+ ...
1408
+
1409
+ @typing.overload
1410
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1411
+ ...
1412
+
1413
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1414
+ """
1415
+ Specifies the PyPI packages for all steps of the flow.
1416
+
1417
+ Use `@pypi_base` to set common packages required by all
1418
+ steps and use `@pypi` to specify step-specific overrides.
1419
+ Parameters
1420
+ ----------
1421
+ packages : Dict[str, str], default: {}
1422
+ Packages to use for this flow. The key is the name of the package
1423
+ and the value is the version to use.
1424
+ python : str, optional, default: None
1425
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1426
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1388
1427
  """
1389
1428
  ...
1390
1429
 
@@ -1491,91 +1530,70 @@ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *
1491
1530
  """
1492
1531
  ...
1493
1532
 
1494
- @typing.overload
1495
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1533
+ def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1496
1534
  """
1497
- Specifies the times when the flow should be run when running on a
1498
- production scheduler.
1535
+ Specifies what flows belong to the same project.
1499
1536
 
1500
- Parameters
1501
- ----------
1502
- hourly : bool, default False
1503
- Run the workflow hourly.
1504
- daily : bool, default True
1505
- Run the workflow daily.
1506
- weekly : bool, default False
1507
- Run the workflow weekly.
1508
- cron : str, optional, default None
1509
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1510
- specified by this expression.
1511
- timezone : str, optional, default None
1512
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1513
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1514
- """
1515
- ...
1516
-
1517
- @typing.overload
1518
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1519
- ...
1520
-
1521
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1522
- """
1523
- Specifies the times when the flow should be run when running on a
1524
- production scheduler.
1537
+ A project-specific namespace is created for all flows that
1538
+ use the same `@project(name)`.
1525
1539
 
1526
1540
  Parameters
1527
1541
  ----------
1528
- hourly : bool, default False
1529
- Run the workflow hourly.
1530
- daily : bool, default True
1531
- Run the workflow daily.
1532
- weekly : bool, default False
1533
- Run the workflow weekly.
1534
- cron : str, optional, default None
1535
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1536
- specified by this expression.
1537
- timezone : str, optional, default None
1538
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1539
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1542
+ name : str
1543
+ Project name. Make sure that the name is unique amongst all
1544
+ projects that use the same production scheduler. The name may
1545
+ contain only lowercase alphanumeric characters and underscores.
1546
+
1547
+
1540
1548
  """
1541
1549
  ...
1542
1550
 
1543
1551
  @typing.overload
1544
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1552
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1545
1553
  """
1546
- Specifies the PyPI packages for all steps of the flow.
1554
+ Specifies the Conda environment for all steps of the flow.
1555
+
1556
+ Use `@conda_base` to set common libraries required by all
1557
+ steps and use `@conda` to specify step-specific additions.
1547
1558
 
1548
- Use `@pypi_base` to set common packages required by all
1549
- steps and use `@pypi` to specify step-specific overrides.
1550
1559
  Parameters
1551
1560
  ----------
1552
- packages : Dict[str, str], default: {}
1561
+ packages : Dict[str, str], default {}
1553
1562
  Packages to use for this flow. The key is the name of the package
1554
1563
  and the value is the version to use.
1555
- python : str, optional, default: None
1564
+ libraries : Dict[str, str], default {}
1565
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1566
+ python : str, optional, default None
1556
1567
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1557
1568
  that the version used will correspond to the version of the Python interpreter used to start the run.
1569
+ disabled : bool, default False
1570
+ If set to True, disables Conda.
1558
1571
  """
1559
1572
  ...
1560
1573
 
1561
1574
  @typing.overload
1562
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1575
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1563
1576
  ...
1564
1577
 
1565
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1578
+ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1566
1579
  """
1567
- Specifies the PyPI packages for all steps of the flow.
1580
+ Specifies the Conda environment for all steps of the flow.
1581
+
1582
+ Use `@conda_base` to set common libraries required by all
1583
+ steps and use `@conda` to specify step-specific additions.
1568
1584
 
1569
- Use `@pypi_base` to set common packages required by all
1570
- steps and use `@pypi` to specify step-specific overrides.
1571
1585
  Parameters
1572
1586
  ----------
1573
- packages : Dict[str, str], default: {}
1587
+ packages : Dict[str, str], default {}
1574
1588
  Packages to use for this flow. The key is the name of the package
1575
1589
  and the value is the version to use.
1576
- python : str, optional, default: None
1590
+ libraries : Dict[str, str], default {}
1591
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1592
+ python : str, optional, default None
1577
1593
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1578
1594
  that the version used will correspond to the version of the Python interpreter used to start the run.
1595
+ disabled : bool, default False
1596
+ If set to True, disables Conda.
1579
1597
  """
1580
1598
  ...
1581
1599
 
@@ -1674,6 +1692,55 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
1674
1692
  """
1675
1693
  ...
1676
1694
 
1695
+ @typing.overload
1696
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1697
+ """
1698
+ Specifies the times when the flow should be run when running on a
1699
+ production scheduler.
1700
+
1701
+ Parameters
1702
+ ----------
1703
+ hourly : bool, default False
1704
+ Run the workflow hourly.
1705
+ daily : bool, default True
1706
+ Run the workflow daily.
1707
+ weekly : bool, default False
1708
+ Run the workflow weekly.
1709
+ cron : str, optional, default None
1710
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1711
+ specified by this expression.
1712
+ timezone : str, optional, default None
1713
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1714
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1715
+ """
1716
+ ...
1717
+
1718
+ @typing.overload
1719
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1720
+ ...
1721
+
1722
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1723
+ """
1724
+ Specifies the times when the flow should be run when running on a
1725
+ production scheduler.
1726
+
1727
+ Parameters
1728
+ ----------
1729
+ hourly : bool, default False
1730
+ Run the workflow hourly.
1731
+ daily : bool, default True
1732
+ Run the workflow daily.
1733
+ weekly : bool, default False
1734
+ Run the workflow weekly.
1735
+ cron : str, optional, default None
1736
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1737
+ specified by this expression.
1738
+ timezone : str, optional, default None
1739
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1740
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1741
+ """
1742
+ ...
1743
+
1677
1744
  def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1678
1745
  """
1679
1746
  The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
@@ -1716,24 +1783,6 @@ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str,
1716
1783
  """
1717
1784
  ...
1718
1785
 
1719
- def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1720
- """
1721
- Specifies what flows belong to the same project.
1722
-
1723
- A project-specific namespace is created for all flows that
1724
- use the same `@project(name)`.
1725
-
1726
- Parameters
1727
- ----------
1728
- name : str
1729
- Project name. Make sure that the name is unique amongst all
1730
- projects that use the same production scheduler. The name may
1731
- contain only lowercase alphanumeric characters and underscores.
1732
-
1733
-
1734
- """
1735
- ...
1736
-
1737
1786
  def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1738
1787
  """
1739
1788
  The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
@@ -1776,55 +1825,6 @@ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, expone
1776
1825
  """
1777
1826
  ...
1778
1827
 
1779
- @typing.overload
1780
- def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1781
- """
1782
- Specifies the Conda environment for all steps of the flow.
1783
-
1784
- Use `@conda_base` to set common libraries required by all
1785
- steps and use `@conda` to specify step-specific additions.
1786
-
1787
- Parameters
1788
- ----------
1789
- packages : Dict[str, str], default {}
1790
- Packages to use for this flow. The key is the name of the package
1791
- and the value is the version to use.
1792
- libraries : Dict[str, str], default {}
1793
- Supported for backward compatibility. When used with packages, packages will take precedence.
1794
- python : str, optional, default None
1795
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1796
- that the version used will correspond to the version of the Python interpreter used to start the run.
1797
- disabled : bool, default False
1798
- If set to True, disables Conda.
1799
- """
1800
- ...
1801
-
1802
- @typing.overload
1803
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1804
- ...
1805
-
1806
- def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1807
- """
1808
- Specifies the Conda environment for all steps of the flow.
1809
-
1810
- Use `@conda_base` to set common libraries required by all
1811
- steps and use `@conda` to specify step-specific additions.
1812
-
1813
- Parameters
1814
- ----------
1815
- packages : Dict[str, str], default {}
1816
- Packages to use for this flow. The key is the name of the package
1817
- and the value is the version to use.
1818
- libraries : Dict[str, str], default {}
1819
- Supported for backward compatibility. When used with packages, packages will take precedence.
1820
- python : str, optional, default None
1821
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1822
- that the version used will correspond to the version of the Python interpreter used to start the run.
1823
- disabled : bool, default False
1824
- If set to True, disables Conda.
1825
- """
1826
- ...
1827
-
1828
1828
  def namespace(ns: typing.Optional[str]) -> typing.Optional[str]:
1829
1829
  """
1830
1830
  Switch namespace to the one provided.