metaflow-stubs 2.12.0__py2.py3-none-any.whl → 2.12.1__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (142) hide show
  1. metaflow-stubs/__init__.pyi +532 -532
  2. metaflow-stubs/cards.pyi +5 -5
  3. metaflow-stubs/cli.pyi +7 -2
  4. metaflow-stubs/client/__init__.pyi +3 -3
  5. metaflow-stubs/client/core.pyi +7 -7
  6. metaflow-stubs/client/filecache.pyi +2 -2
  7. metaflow-stubs/clone_util.pyi +2 -2
  8. metaflow-stubs/events.pyi +3 -3
  9. metaflow-stubs/exception.pyi +2 -2
  10. metaflow-stubs/flowspec.pyi +4 -4
  11. metaflow-stubs/generated_for.txt +1 -1
  12. metaflow-stubs/includefile.pyi +5 -5
  13. metaflow-stubs/metadata/metadata.pyi +3 -3
  14. metaflow-stubs/metadata/util.pyi +2 -2
  15. metaflow-stubs/metaflow_config.pyi +4 -2
  16. metaflow-stubs/metaflow_current.pyi +5 -5
  17. metaflow-stubs/mflog/mflog.pyi +2 -2
  18. metaflow-stubs/multicore_utils.pyi +2 -2
  19. metaflow-stubs/parameters.pyi +4 -4
  20. metaflow-stubs/plugins/__init__.pyi +2 -2
  21. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  22. metaflow-stubs/plugins/airflow/airflow.pyi +3 -3
  23. metaflow-stubs/plugins/airflow/airflow_cli.pyi +3 -3
  24. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
  25. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  26. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  27. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  28. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
  29. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +3 -3
  30. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +3 -3
  31. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  32. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  33. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  34. metaflow-stubs/plugins/argo/argo_workflows.pyi +4 -4
  35. metaflow-stubs/plugins/argo/argo_workflows_cli.pyi +4 -4
  36. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +3 -3
  37. metaflow-stubs/plugins/aws/__init__.pyi +2 -2
  38. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  39. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  40. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  41. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  42. metaflow-stubs/plugins/aws/batch/batch_cli.pyi +2 -2
  43. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  44. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +3 -3
  45. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  46. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +3 -3
  47. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  48. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
  49. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  50. metaflow-stubs/plugins/aws/step_functions/production_token.pyi +2 -2
  51. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  52. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  53. metaflow-stubs/plugins/aws/step_functions/step_functions_cli.pyi +4 -4
  54. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  55. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
  56. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  57. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  58. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  59. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +3 -3
  60. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  61. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  62. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  63. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  64. metaflow-stubs/plugins/cards/card_cli.pyi +5 -5
  65. metaflow-stubs/plugins/cards/card_client.pyi +3 -3
  66. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  67. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  68. metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
  69. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  70. metaflow-stubs/plugins/cards/card_modules/basic.pyi +3 -3
  71. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  72. metaflow-stubs/plugins/cards/card_modules/chevron/__init__.pyi +2 -2
  73. metaflow-stubs/plugins/cards/card_modules/chevron/main.pyi +2 -2
  74. metaflow-stubs/plugins/cards/card_modules/chevron/metadata.pyi +2 -2
  75. metaflow-stubs/plugins/cards/card_modules/chevron/renderer.pyi +2 -2
  76. metaflow-stubs/plugins/cards/card_modules/chevron/tokenizer.pyi +2 -2
  77. metaflow-stubs/plugins/cards/card_modules/components.pyi +4 -4
  78. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  79. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  80. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  81. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  82. metaflow-stubs/plugins/cards/component_serializer.pyi +4 -4
  83. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  84. metaflow-stubs/plugins/catch_decorator.pyi +3 -3
  85. metaflow-stubs/plugins/datatools/__init__.pyi +3 -3
  86. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  87. metaflow-stubs/plugins/datatools/s3/__init__.pyi +4 -4
  88. metaflow-stubs/plugins/datatools/s3/s3.pyi +4 -4
  89. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  90. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  91. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  92. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  93. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  94. metaflow-stubs/plugins/events_decorator.pyi +2 -2
  95. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  96. metaflow-stubs/plugins/frameworks/pytorch.pyi +3 -3
  97. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  98. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +3 -3
  99. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  100. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  101. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  102. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  103. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  104. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +3 -3
  105. metaflow-stubs/plugins/kubernetes/kubernetes_cli.pyi +3 -3
  106. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  107. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +3 -3
  108. metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +2 -2
  109. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +3 -3
  110. metaflow-stubs/plugins/logs_cli.pyi +3 -3
  111. metaflow-stubs/plugins/package_cli.pyi +2 -2
  112. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  113. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  114. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  115. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  116. metaflow-stubs/plugins/pypi/conda_environment.pyi +3 -3
  117. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  118. metaflow-stubs/plugins/pypi/pypi_environment.pyi +3 -3
  119. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  120. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  121. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  122. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  123. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +2 -2
  124. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  125. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  126. metaflow-stubs/plugins/tag_cli.pyi +4 -4
  127. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +2 -2
  128. metaflow-stubs/plugins/timeout_decorator.pyi +2 -2
  129. metaflow-stubs/procpoll.pyi +2 -2
  130. metaflow-stubs/pylint_wrapper.pyi +2 -2
  131. metaflow-stubs/runner/__init__.pyi +2 -2
  132. metaflow-stubs/runner/metaflow_runner.pyi +5 -5
  133. metaflow-stubs/runner/nbrun.pyi +2 -2
  134. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  135. metaflow-stubs/tagging_util.pyi +2 -2
  136. metaflow-stubs/tuple_util.pyi +2 -2
  137. metaflow-stubs/version.pyi +2 -2
  138. {metaflow_stubs-2.12.0.dist-info → metaflow_stubs-2.12.1.dist-info}/METADATA +2 -2
  139. metaflow_stubs-2.12.1.dist-info/RECORD +142 -0
  140. metaflow_stubs-2.12.0.dist-info/RECORD +0 -142
  141. {metaflow_stubs-2.12.0.dist-info → metaflow_stubs-2.12.1.dist-info}/WHEEL +0 -0
  142. {metaflow_stubs-2.12.0.dist-info → metaflow_stubs-2.12.1.dist-info}/top_level.txt +0 -0
@@ -1,24 +1,24 @@
1
1
  ##################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.12.0 #
4
- # Generated on 2024-05-28T09:55:27.155006 #
3
+ # MF version: 2.12.1 #
4
+ # Generated on 2024-06-03T17:47:01.111972 #
5
5
  ##################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import metaflow.parameters
12
- import metaflow._vendor.click.types
13
- import metaflow.datastore.inputs
14
- import datetime
15
- import metaflow.client.core
16
11
  import metaflow.plugins.datatools.s3.s3
17
- import typing
18
- import metaflow.metaflow_current
12
+ import metaflow._vendor.click.types
19
13
  import io
14
+ import typing
20
15
  import metaflow.events
16
+ import datetime
17
+ import metaflow.client.core
18
+ import metaflow.datastore.inputs
19
+ import metaflow.parameters
21
20
  import metaflow.runner.metaflow_runner
21
+ import metaflow.metaflow_current
22
22
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
23
23
  StepFlag = typing.NewType("StepFlag", bool)
24
24
 
@@ -727,79 +727,82 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
727
727
  ...
728
728
 
729
729
  @typing.overload
730
- def resources(*, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
730
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
731
731
  """
732
- Specifies the resources needed when executing this step.
733
-
734
- Use `@resources` to specify the resource requirements
735
- independently of the specific compute layer (`@batch`, `@kubernetes`).
732
+ Specifies the PyPI packages for the step.
736
733
 
737
- You can choose the compute layer on the command line by executing e.g.
738
- ```
739
- python myflow.py run --with batch
740
- ```
741
- or
742
- ```
743
- python myflow.py run --with kubernetes
744
- ```
745
- which executes the flow on the desired system using the
746
- requirements specified in `@resources`.
734
+ Information in this decorator will augment any
735
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
736
+ you can use `@pypi_base` to set packages required by all
737
+ steps and use `@pypi` to specify step-specific overrides.
747
738
 
748
739
  Parameters
749
740
  ----------
750
- cpu : int, default 1
751
- Number of CPUs required for this step.
752
- gpu : int, default 0
753
- Number of GPUs required for this step.
754
- disk : int, optional, default None
755
- Disk size (in MB) required for this step. Only applies on Kubernetes.
756
- memory : int, default 4096
757
- Memory size (in MB) required for this step.
758
- shared_memory : int, optional, default None
759
- The value for the size (in MiB) of the /dev/shm volume for this step.
760
- This parameter maps to the `--shm-size` option in Docker.
741
+ packages : Dict[str, str], default: {}
742
+ Packages to use for this step. The key is the name of the package
743
+ and the value is the version to use.
744
+ python : str, optional, default: None
745
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
746
+ that the version used will correspond to the version of the Python interpreter used to start the run.
761
747
  """
762
748
  ...
763
749
 
764
750
  @typing.overload
765
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
751
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
766
752
  ...
767
753
 
768
754
  @typing.overload
769
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
755
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
770
756
  ...
771
757
 
772
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
758
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
773
759
  """
774
- Specifies the resources needed when executing this step.
760
+ Specifies the PyPI packages for the step.
775
761
 
776
- Use `@resources` to specify the resource requirements
777
- independently of the specific compute layer (`@batch`, `@kubernetes`).
762
+ Information in this decorator will augment any
763
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
764
+ you can use `@pypi_base` to set packages required by all
765
+ steps and use `@pypi` to specify step-specific overrides.
778
766
 
779
- You can choose the compute layer on the command line by executing e.g.
780
- ```
781
- python myflow.py run --with batch
782
- ```
783
- or
784
- ```
785
- python myflow.py run --with kubernetes
786
- ```
787
- which executes the flow on the desired system using the
788
- requirements specified in `@resources`.
767
+ Parameters
768
+ ----------
769
+ packages : Dict[str, str], default: {}
770
+ Packages to use for this step. The key is the name of the package
771
+ and the value is the version to use.
772
+ python : str, optional, default: None
773
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
774
+ that the version used will correspond to the version of the Python interpreter used to start the run.
775
+ """
776
+ ...
777
+
778
+ @typing.overload
779
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
780
+ """
781
+ Specifies environment variables to be set prior to the execution of a step.
789
782
 
790
783
  Parameters
791
784
  ----------
792
- cpu : int, default 1
793
- Number of CPUs required for this step.
794
- gpu : int, default 0
795
- Number of GPUs required for this step.
796
- disk : int, optional, default None
797
- Disk size (in MB) required for this step. Only applies on Kubernetes.
798
- memory : int, default 4096
799
- Memory size (in MB) required for this step.
800
- shared_memory : int, optional, default None
801
- The value for the size (in MiB) of the /dev/shm volume for this step.
802
- This parameter maps to the `--shm-size` option in Docker.
785
+ vars : Dict[str, str], default {}
786
+ Dictionary of environment variables to set.
787
+ """
788
+ ...
789
+
790
+ @typing.overload
791
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
792
+ ...
793
+
794
+ @typing.overload
795
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
796
+ ...
797
+
798
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
799
+ """
800
+ Specifies environment variables to be set prior to the execution of a step.
801
+
802
+ Parameters
803
+ ----------
804
+ vars : Dict[str, str], default {}
805
+ Dictionary of environment variables to set.
803
806
  """
804
807
  ...
805
808
 
@@ -863,270 +866,187 @@ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: ty
863
866
  ...
864
867
 
865
868
  @typing.overload
866
- def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
869
+ def card(*, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
867
870
  """
868
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
871
+ Creates a human-readable report, a Metaflow Card, after this step completes.
872
+
873
+ Note that you may add multiple `@card` decorators in a step with different parameters.
869
874
 
870
875
  Parameters
871
876
  ----------
872
- cpu : int, default 1
873
- Number of CPUs required for this step. If `@resources` is
874
- also present, the maximum value from all decorators is used.
875
- gpu : int, default 0
876
- Number of GPUs required for this step. If `@resources` is
877
- also present, the maximum value from all decorators is used.
878
- memory : int, default 4096
879
- Memory size (in MB) required for this step. If
880
- `@resources` is also present, the maximum value from all decorators is
881
- used.
882
- image : str, optional, default None
883
- Docker image to use when launching on AWS Batch. If not specified, and
884
- METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
885
- not, a default Docker image mapping to the current version of Python is used.
886
- queue : str, default METAFLOW_BATCH_JOB_QUEUE
887
- AWS Batch Job Queue to submit the job to.
888
- iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
889
- AWS IAM role that AWS Batch container uses to access AWS cloud resources.
890
- execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
891
- AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
892
- (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
893
- shared_memory : int, optional, default None
894
- The value for the size (in MiB) of the /dev/shm volume for this step.
895
- This parameter maps to the `--shm-size` option in Docker.
896
- max_swap : int, optional, default None
897
- The total amount of swap memory (in MiB) a container can use for this
898
- step. This parameter is translated to the `--memory-swap` option in
899
- Docker where the value is the sum of the container memory plus the
900
- `max_swap` value.
901
- swappiness : int, optional, default None
902
- This allows you to tune memory swappiness behavior for this step.
903
- A swappiness value of 0 causes swapping not to happen unless absolutely
904
- necessary. A swappiness value of 100 causes pages to be swapped very
905
- aggressively. Accepted values are whole numbers between 0 and 100.
906
- use_tmpfs : bool, default False
907
- This enables an explicit tmpfs mount for this step. Note that tmpfs is
908
- not available on Fargate compute environments
909
- tmpfs_tempdir : bool, default True
910
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
911
- tmpfs_size : int, optional, default None
912
- The value for the size (in MiB) of the tmpfs mount for this step.
913
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
914
- memory allocated for this step.
915
- tmpfs_path : str, optional, default None
916
- Path to tmpfs mount for this step. Defaults to /metaflow_temp.
917
- inferentia : int, default 0
918
- Number of Inferentia chips required for this step.
919
- trainium : int, default None
920
- Alias for inferentia. Use only one of the two.
921
- efa : int, default 0
922
- Number of elastic fabric adapter network devices to attach to container
923
- ephemeral_storage : int, default None
924
- The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
925
- This is only relevant for Fargate compute environments
926
- log_driver: str, optional, default None
927
- The log driver to use for the Amazon ECS container.
928
- log_options: List[str], optional, default None
929
- List of strings containing options for the chosen log driver. The configurable values
930
- depend on the `log driver` chosen. Validation of these options is not supported yet.
931
- Example: [`awslogs-group:aws/batch/job`]
877
+ type : str, default 'default'
878
+ Card type.
879
+ id : str, optional, default None
880
+ If multiple cards are present, use this id to identify this card.
881
+ options : Dict[str, Any], default {}
882
+ Options passed to the card. The contents depend on the card type.
883
+ timeout : int, default 45
884
+ Interrupt reporting if it takes more than this many seconds.
885
+
886
+
932
887
  """
933
888
  ...
934
889
 
935
890
  @typing.overload
936
- def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
891
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
937
892
  ...
938
893
 
939
894
  @typing.overload
940
- def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
895
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
941
896
  ...
942
897
 
943
- def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
898
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
944
899
  """
945
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
900
+ Creates a human-readable report, a Metaflow Card, after this step completes.
901
+
902
+ Note that you may add multiple `@card` decorators in a step with different parameters.
903
+
904
+ Parameters
905
+ ----------
906
+ type : str, default 'default'
907
+ Card type.
908
+ id : str, optional, default None
909
+ If multiple cards are present, use this id to identify this card.
910
+ options : Dict[str, Any], default {}
911
+ Options passed to the card. The contents depend on the card type.
912
+ timeout : int, default 45
913
+ Interrupt reporting if it takes more than this many seconds.
914
+
915
+
916
+ """
917
+ ...
918
+
919
+ @typing.overload
920
+ def resources(*, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
921
+ """
922
+ Specifies the resources needed when executing this step.
923
+
924
+ Use `@resources` to specify the resource requirements
925
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
926
+
927
+ You can choose the compute layer on the command line by executing e.g.
928
+ ```
929
+ python myflow.py run --with batch
930
+ ```
931
+ or
932
+ ```
933
+ python myflow.py run --with kubernetes
934
+ ```
935
+ which executes the flow on the desired system using the
936
+ requirements specified in `@resources`.
946
937
 
947
938
  Parameters
948
939
  ----------
949
940
  cpu : int, default 1
950
- Number of CPUs required for this step. If `@resources` is
951
- also present, the maximum value from all decorators is used.
941
+ Number of CPUs required for this step.
952
942
  gpu : int, default 0
953
- Number of GPUs required for this step. If `@resources` is
954
- also present, the maximum value from all decorators is used.
943
+ Number of GPUs required for this step.
944
+ disk : int, optional, default None
945
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
955
946
  memory : int, default 4096
956
- Memory size (in MB) required for this step. If
957
- `@resources` is also present, the maximum value from all decorators is
958
- used.
959
- image : str, optional, default None
960
- Docker image to use when launching on AWS Batch. If not specified, and
961
- METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
962
- not, a default Docker image mapping to the current version of Python is used.
963
- queue : str, default METAFLOW_BATCH_JOB_QUEUE
964
- AWS Batch Job Queue to submit the job to.
965
- iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
966
- AWS IAM role that AWS Batch container uses to access AWS cloud resources.
967
- execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
968
- AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
969
- (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
947
+ Memory size (in MB) required for this step.
970
948
  shared_memory : int, optional, default None
971
949
  The value for the size (in MiB) of the /dev/shm volume for this step.
972
950
  This parameter maps to the `--shm-size` option in Docker.
973
- max_swap : int, optional, default None
974
- The total amount of swap memory (in MiB) a container can use for this
975
- step. This parameter is translated to the `--memory-swap` option in
976
- Docker where the value is the sum of the container memory plus the
977
- `max_swap` value.
978
- swappiness : int, optional, default None
979
- This allows you to tune memory swappiness behavior for this step.
980
- A swappiness value of 0 causes swapping not to happen unless absolutely
981
- necessary. A swappiness value of 100 causes pages to be swapped very
982
- aggressively. Accepted values are whole numbers between 0 and 100.
983
- use_tmpfs : bool, default False
984
- This enables an explicit tmpfs mount for this step. Note that tmpfs is
985
- not available on Fargate compute environments
986
- tmpfs_tempdir : bool, default True
987
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
988
- tmpfs_size : int, optional, default None
989
- The value for the size (in MiB) of the tmpfs mount for this step.
990
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
991
- memory allocated for this step.
992
- tmpfs_path : str, optional, default None
993
- Path to tmpfs mount for this step. Defaults to /metaflow_temp.
994
- inferentia : int, default 0
995
- Number of Inferentia chips required for this step.
996
- trainium : int, default None
997
- Alias for inferentia. Use only one of the two.
998
- efa : int, default 0
999
- Number of elastic fabric adapter network devices to attach to container
1000
- ephemeral_storage : int, default None
1001
- The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
1002
- This is only relevant for Fargate compute environments
1003
- log_driver: str, optional, default None
1004
- The log driver to use for the Amazon ECS container.
1005
- log_options: List[str], optional, default None
1006
- List of strings containing options for the chosen log driver. The configurable values
1007
- depend on the `log driver` chosen. Validation of these options is not supported yet.
1008
- Example: [`awslogs-group:aws/batch/job`]
1009
- """
1010
- ...
1011
-
1012
- @typing.overload
1013
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1014
- """
1015
- Specifies a timeout for your step.
1016
-
1017
- This decorator is useful if this step may hang indefinitely.
1018
-
1019
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1020
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
1021
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
1022
-
1023
- Note that all the values specified in parameters are added together so if you specify
1024
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1025
-
1026
- Parameters
1027
- ----------
1028
- seconds : int, default 0
1029
- Number of seconds to wait prior to timing out.
1030
- minutes : int, default 0
1031
- Number of minutes to wait prior to timing out.
1032
- hours : int, default 0
1033
- Number of hours to wait prior to timing out.
1034
951
  """
1035
952
  ...
1036
953
 
1037
954
  @typing.overload
1038
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
955
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1039
956
  ...
1040
957
 
1041
958
  @typing.overload
1042
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
959
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1043
960
  ...
1044
961
 
1045
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
962
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
1046
963
  """
1047
- Specifies a timeout for your step.
1048
-
1049
- This decorator is useful if this step may hang indefinitely.
1050
-
1051
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1052
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
1053
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
964
+ Specifies the resources needed when executing this step.
1054
965
 
1055
- Note that all the values specified in parameters are added together so if you specify
1056
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
966
+ Use `@resources` to specify the resource requirements
967
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
1057
968
 
1058
- Parameters
1059
- ----------
1060
- seconds : int, default 0
1061
- Number of seconds to wait prior to timing out.
1062
- minutes : int, default 0
1063
- Number of minutes to wait prior to timing out.
1064
- hours : int, default 0
1065
- Number of hours to wait prior to timing out.
1066
- """
1067
- ...
1068
-
1069
- @typing.overload
1070
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1071
- """
1072
- Specifies environment variables to be set prior to the execution of a step.
969
+ You can choose the compute layer on the command line by executing e.g.
970
+ ```
971
+ python myflow.py run --with batch
972
+ ```
973
+ or
974
+ ```
975
+ python myflow.py run --with kubernetes
976
+ ```
977
+ which executes the flow on the desired system using the
978
+ requirements specified in `@resources`.
1073
979
 
1074
980
  Parameters
1075
981
  ----------
1076
- vars : Dict[str, str], default {}
1077
- Dictionary of environment variables to set.
982
+ cpu : int, default 1
983
+ Number of CPUs required for this step.
984
+ gpu : int, default 0
985
+ Number of GPUs required for this step.
986
+ disk : int, optional, default None
987
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
988
+ memory : int, default 4096
989
+ Memory size (in MB) required for this step.
990
+ shared_memory : int, optional, default None
991
+ The value for the size (in MiB) of the /dev/shm volume for this step.
992
+ This parameter maps to the `--shm-size` option in Docker.
1078
993
  """
1079
994
  ...
1080
995
 
1081
996
  @typing.overload
1082
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1083
- ...
1084
-
1085
- @typing.overload
1086
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1087
- ...
1088
-
1089
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
997
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1090
998
  """
1091
- Specifies environment variables to be set prior to the execution of a step.
999
+ Specifies the Conda environment for the step.
1092
1000
 
1093
- Parameters
1094
- ----------
1095
- vars : Dict[str, str], default {}
1096
- Dictionary of environment variables to set.
1097
- """
1098
- ...
1099
-
1100
- @typing.overload
1101
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1102
- """
1103
- Specifies secrets to be retrieved and injected as environment variables prior to
1104
- the execution of a step.
1001
+ Information in this decorator will augment any
1002
+ attributes set in the `@conda_base` flow-level decorator. Hence,
1003
+ you can use `@conda_base` to set packages required by all
1004
+ steps and use `@conda` to specify step-specific overrides.
1105
1005
 
1106
1006
  Parameters
1107
1007
  ----------
1108
- sources : List[Union[str, Dict[str, Any]]], default: []
1109
- List of secret specs, defining how the secrets are to be retrieved
1008
+ packages : Dict[str, str], default {}
1009
+ Packages to use for this step. The key is the name of the package
1010
+ and the value is the version to use.
1011
+ libraries : Dict[str, str], default {}
1012
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1013
+ python : str, optional, default None
1014
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1015
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1016
+ disabled : bool, default False
1017
+ If set to True, disables @conda.
1110
1018
  """
1111
1019
  ...
1112
1020
 
1113
1021
  @typing.overload
1114
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1022
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1115
1023
  ...
1116
1024
 
1117
1025
  @typing.overload
1118
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1026
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1119
1027
  ...
1120
1028
 
1121
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
1029
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1122
1030
  """
1123
- Specifies secrets to be retrieved and injected as environment variables prior to
1124
- the execution of a step.
1031
+ Specifies the Conda environment for the step.
1032
+
1033
+ Information in this decorator will augment any
1034
+ attributes set in the `@conda_base` flow-level decorator. Hence,
1035
+ you can use `@conda_base` to set packages required by all
1036
+ steps and use `@conda` to specify step-specific overrides.
1125
1037
 
1126
1038
  Parameters
1127
1039
  ----------
1128
- sources : List[Union[str, Dict[str, Any]]], default: []
1129
- List of secret specs, defining how the secrets are to be retrieved
1040
+ packages : Dict[str, str], default {}
1041
+ Packages to use for this step. The key is the name of the package
1042
+ and the value is the version to use.
1043
+ libraries : Dict[str, str], default {}
1044
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1045
+ python : str, optional, default None
1046
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1047
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1048
+ disabled : bool, default False
1049
+ If set to True, disables @conda.
1130
1050
  """
1131
1051
  ...
1132
1052
 
@@ -1184,208 +1104,330 @@ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
1184
1104
  ...
1185
1105
 
1186
1106
  @typing.overload
1187
- def card(*, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1107
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1188
1108
  """
1189
- Creates a human-readable report, a Metaflow Card, after this step completes.
1190
-
1191
- Note that you may add multiple `@card` decorators in a step with different parameters.
1109
+ Specifies secrets to be retrieved and injected as environment variables prior to
1110
+ the execution of a step.
1192
1111
 
1193
1112
  Parameters
1194
1113
  ----------
1195
- type : str, default 'default'
1196
- Card type.
1197
- id : str, optional, default None
1198
- If multiple cards are present, use this id to identify this card.
1199
- options : Dict[str, Any], default {}
1200
- Options passed to the card. The contents depend on the card type.
1201
- timeout : int, default 45
1202
- Interrupt reporting if it takes more than this many seconds.
1203
-
1204
-
1114
+ sources : List[Union[str, Dict[str, Any]]], default: []
1115
+ List of secret specs, defining how the secrets are to be retrieved
1205
1116
  """
1206
1117
  ...
1207
1118
 
1208
1119
  @typing.overload
1209
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1120
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1210
1121
  ...
1211
1122
 
1212
1123
  @typing.overload
1213
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1124
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1214
1125
  ...
1215
1126
 
1216
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
1127
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
1217
1128
  """
1218
- Creates a human-readable report, a Metaflow Card, after this step completes.
1219
-
1220
- Note that you may add multiple `@card` decorators in a step with different parameters.
1129
+ Specifies secrets to be retrieved and injected as environment variables prior to
1130
+ the execution of a step.
1221
1131
 
1222
1132
  Parameters
1223
1133
  ----------
1224
- type : str, default 'default'
1225
- Card type.
1226
- id : str, optional, default None
1227
- If multiple cards are present, use this id to identify this card.
1228
- options : Dict[str, Any], default {}
1229
- Options passed to the card. The contents depend on the card type.
1230
- timeout : int, default 45
1231
- Interrupt reporting if it takes more than this many seconds.
1232
-
1233
-
1134
+ sources : List[Union[str, Dict[str, Any]]], default: []
1135
+ List of secret specs, defining how the secrets are to be retrieved
1234
1136
  """
1235
1137
  ...
1236
1138
 
1237
1139
  @typing.overload
1238
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1140
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1239
1141
  """
1240
- Specifies that the step will success under all circumstances.
1142
+ Specifies a timeout for your step.
1241
1143
 
1242
- The decorator will create an optional artifact, specified by `var`, which
1243
- contains the exception raised. You can use it to detect the presence
1244
- of errors, indicating that all happy-path artifacts produced by the step
1245
- are missing.
1144
+ This decorator is useful if this step may hang indefinitely.
1145
+
1146
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1147
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
1148
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
1149
+
1150
+ Note that all the values specified in parameters are added together so if you specify
1151
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1246
1152
 
1247
1153
  Parameters
1248
1154
  ----------
1249
- var : str, optional, default None
1250
- Name of the artifact in which to store the caught exception.
1251
- If not specified, the exception is not stored.
1252
- print_exception : bool, default True
1253
- Determines whether or not the exception is printed to
1254
- stdout when caught.
1155
+ seconds : int, default 0
1156
+ Number of seconds to wait prior to timing out.
1157
+ minutes : int, default 0
1158
+ Number of minutes to wait prior to timing out.
1159
+ hours : int, default 0
1160
+ Number of hours to wait prior to timing out.
1255
1161
  """
1256
1162
  ...
1257
1163
 
1258
1164
  @typing.overload
1259
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1165
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1260
1166
  ...
1261
1167
 
1262
1168
  @typing.overload
1263
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1169
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1264
1170
  ...
1265
1171
 
1266
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
1172
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
1267
1173
  """
1268
- Specifies that the step will success under all circumstances.
1174
+ Specifies a timeout for your step.
1269
1175
 
1270
- The decorator will create an optional artifact, specified by `var`, which
1271
- contains the exception raised. You can use it to detect the presence
1272
- of errors, indicating that all happy-path artifacts produced by the step
1273
- are missing.
1176
+ This decorator is useful if this step may hang indefinitely.
1177
+
1178
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1179
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
1180
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
1181
+
1182
+ Note that all the values specified in parameters are added together so if you specify
1183
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1274
1184
 
1275
1185
  Parameters
1276
1186
  ----------
1277
- var : str, optional, default None
1278
- Name of the artifact in which to store the caught exception.
1279
- If not specified, the exception is not stored.
1280
- print_exception : bool, default True
1281
- Determines whether or not the exception is printed to
1282
- stdout when caught.
1187
+ seconds : int, default 0
1188
+ Number of seconds to wait prior to timing out.
1189
+ minutes : int, default 0
1190
+ Number of minutes to wait prior to timing out.
1191
+ hours : int, default 0
1192
+ Number of hours to wait prior to timing out.
1283
1193
  """
1284
1194
  ...
1285
1195
 
1286
1196
  @typing.overload
1287
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1197
+ def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1288
1198
  """
1289
- Specifies the PyPI packages for the step.
1290
-
1291
- Information in this decorator will augment any
1292
- attributes set in the `@pyi_base` flow-level decorator. Hence,
1293
- you can use `@pypi_base` to set packages required by all
1294
- steps and use `@pypi` to specify step-specific overrides.
1199
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
1295
1200
 
1296
1201
  Parameters
1297
1202
  ----------
1298
- packages : Dict[str, str], default: {}
1299
- Packages to use for this step. The key is the name of the package
1300
- and the value is the version to use.
1301
- python : str, optional, default: None
1302
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1303
- that the version used will correspond to the version of the Python interpreter used to start the run.
1203
+ cpu : int, default 1
1204
+ Number of CPUs required for this step. If `@resources` is
1205
+ also present, the maximum value from all decorators is used.
1206
+ gpu : int, default 0
1207
+ Number of GPUs required for this step. If `@resources` is
1208
+ also present, the maximum value from all decorators is used.
1209
+ memory : int, default 4096
1210
+ Memory size (in MB) required for this step. If
1211
+ `@resources` is also present, the maximum value from all decorators is
1212
+ used.
1213
+ image : str, optional, default None
1214
+ Docker image to use when launching on AWS Batch. If not specified, and
1215
+ METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
1216
+ not, a default Docker image mapping to the current version of Python is used.
1217
+ queue : str, default METAFLOW_BATCH_JOB_QUEUE
1218
+ AWS Batch Job Queue to submit the job to.
1219
+ iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
1220
+ AWS IAM role that AWS Batch container uses to access AWS cloud resources.
1221
+ execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
1222
+ AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
1223
+ (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
1224
+ shared_memory : int, optional, default None
1225
+ The value for the size (in MiB) of the /dev/shm volume for this step.
1226
+ This parameter maps to the `--shm-size` option in Docker.
1227
+ max_swap : int, optional, default None
1228
+ The total amount of swap memory (in MiB) a container can use for this
1229
+ step. This parameter is translated to the `--memory-swap` option in
1230
+ Docker where the value is the sum of the container memory plus the
1231
+ `max_swap` value.
1232
+ swappiness : int, optional, default None
1233
+ This allows you to tune memory swappiness behavior for this step.
1234
+ A swappiness value of 0 causes swapping not to happen unless absolutely
1235
+ necessary. A swappiness value of 100 causes pages to be swapped very
1236
+ aggressively. Accepted values are whole numbers between 0 and 100.
1237
+ use_tmpfs : bool, default False
1238
+ This enables an explicit tmpfs mount for this step. Note that tmpfs is
1239
+ not available on Fargate compute environments
1240
+ tmpfs_tempdir : bool, default True
1241
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1242
+ tmpfs_size : int, optional, default None
1243
+ The value for the size (in MiB) of the tmpfs mount for this step.
1244
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1245
+ memory allocated for this step.
1246
+ tmpfs_path : str, optional, default None
1247
+ Path to tmpfs mount for this step. Defaults to /metaflow_temp.
1248
+ inferentia : int, default 0
1249
+ Number of Inferentia chips required for this step.
1250
+ trainium : int, default None
1251
+ Alias for inferentia. Use only one of the two.
1252
+ efa : int, default 0
1253
+ Number of elastic fabric adapter network devices to attach to container
1254
+ ephemeral_storage : int, default None
1255
+ The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
1256
+ This is only relevant for Fargate compute environments
1257
+ log_driver: str, optional, default None
1258
+ The log driver to use for the Amazon ECS container.
1259
+ log_options: List[str], optional, default None
1260
+ List of strings containing options for the chosen log driver. The configurable values
1261
+ depend on the `log driver` chosen. Validation of these options is not supported yet.
1262
+ Example: [`awslogs-group:aws/batch/job`]
1304
1263
  """
1305
1264
  ...
1306
1265
 
1307
1266
  @typing.overload
1308
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1267
+ def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1309
1268
  ...
1310
1269
 
1311
1270
  @typing.overload
1312
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1271
+ def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1313
1272
  ...
1314
1273
 
1315
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1274
+ def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
1316
1275
  """
1317
- Specifies the PyPI packages for the step.
1318
-
1319
- Information in this decorator will augment any
1320
- attributes set in the `@pyi_base` flow-level decorator. Hence,
1321
- you can use `@pypi_base` to set packages required by all
1322
- steps and use `@pypi` to specify step-specific overrides.
1276
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
1323
1277
 
1324
1278
  Parameters
1325
1279
  ----------
1326
- packages : Dict[str, str], default: {}
1327
- Packages to use for this step. The key is the name of the package
1328
- and the value is the version to use.
1329
- python : str, optional, default: None
1330
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1331
- that the version used will correspond to the version of the Python interpreter used to start the run.
1280
+ cpu : int, default 1
1281
+ Number of CPUs required for this step. If `@resources` is
1282
+ also present, the maximum value from all decorators is used.
1283
+ gpu : int, default 0
1284
+ Number of GPUs required for this step. If `@resources` is
1285
+ also present, the maximum value from all decorators is used.
1286
+ memory : int, default 4096
1287
+ Memory size (in MB) required for this step. If
1288
+ `@resources` is also present, the maximum value from all decorators is
1289
+ used.
1290
+ image : str, optional, default None
1291
+ Docker image to use when launching on AWS Batch. If not specified, and
1292
+ METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
1293
+ not, a default Docker image mapping to the current version of Python is used.
1294
+ queue : str, default METAFLOW_BATCH_JOB_QUEUE
1295
+ AWS Batch Job Queue to submit the job to.
1296
+ iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
1297
+ AWS IAM role that AWS Batch container uses to access AWS cloud resources.
1298
+ execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
1299
+ AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
1300
+ (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
1301
+ shared_memory : int, optional, default None
1302
+ The value for the size (in MiB) of the /dev/shm volume for this step.
1303
+ This parameter maps to the `--shm-size` option in Docker.
1304
+ max_swap : int, optional, default None
1305
+ The total amount of swap memory (in MiB) a container can use for this
1306
+ step. This parameter is translated to the `--memory-swap` option in
1307
+ Docker where the value is the sum of the container memory plus the
1308
+ `max_swap` value.
1309
+ swappiness : int, optional, default None
1310
+ This allows you to tune memory swappiness behavior for this step.
1311
+ A swappiness value of 0 causes swapping not to happen unless absolutely
1312
+ necessary. A swappiness value of 100 causes pages to be swapped very
1313
+ aggressively. Accepted values are whole numbers between 0 and 100.
1314
+ use_tmpfs : bool, default False
1315
+ This enables an explicit tmpfs mount for this step. Note that tmpfs is
1316
+ not available on Fargate compute environments
1317
+ tmpfs_tempdir : bool, default True
1318
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1319
+ tmpfs_size : int, optional, default None
1320
+ The value for the size (in MiB) of the tmpfs mount for this step.
1321
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1322
+ memory allocated for this step.
1323
+ tmpfs_path : str, optional, default None
1324
+ Path to tmpfs mount for this step. Defaults to /metaflow_temp.
1325
+ inferentia : int, default 0
1326
+ Number of Inferentia chips required for this step.
1327
+ trainium : int, default None
1328
+ Alias for inferentia. Use only one of the two.
1329
+ efa : int, default 0
1330
+ Number of elastic fabric adapter network devices to attach to container
1331
+ ephemeral_storage : int, default None
1332
+ The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
1333
+ This is only relevant for Fargate compute environments
1334
+ log_driver: str, optional, default None
1335
+ The log driver to use for the Amazon ECS container.
1336
+ log_options: List[str], optional, default None
1337
+ List of strings containing options for the chosen log driver. The configurable values
1338
+ depend on the `log driver` chosen. Validation of these options is not supported yet.
1339
+ Example: [`awslogs-group:aws/batch/job`]
1332
1340
  """
1333
1341
  ...
1334
1342
 
1335
1343
  @typing.overload
1336
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1344
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1337
1345
  """
1338
- Specifies the Conda environment for the step.
1346
+ Specifies that the step will success under all circumstances.
1339
1347
 
1340
- Information in this decorator will augment any
1341
- attributes set in the `@conda_base` flow-level decorator. Hence,
1342
- you can use `@conda_base` to set packages required by all
1343
- steps and use `@conda` to specify step-specific overrides.
1348
+ The decorator will create an optional artifact, specified by `var`, which
1349
+ contains the exception raised. You can use it to detect the presence
1350
+ of errors, indicating that all happy-path artifacts produced by the step
1351
+ are missing.
1344
1352
 
1345
1353
  Parameters
1346
1354
  ----------
1347
- packages : Dict[str, str], default {}
1348
- Packages to use for this step. The key is the name of the package
1349
- and the value is the version to use.
1350
- libraries : Dict[str, str], default {}
1351
- Supported for backward compatibility. When used with packages, packages will take precedence.
1352
- python : str, optional, default None
1353
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1354
- that the version used will correspond to the version of the Python interpreter used to start the run.
1355
- disabled : bool, default False
1356
- If set to True, disables @conda.
1355
+ var : str, optional, default None
1356
+ Name of the artifact in which to store the caught exception.
1357
+ If not specified, the exception is not stored.
1358
+ print_exception : bool, default True
1359
+ Determines whether or not the exception is printed to
1360
+ stdout when caught.
1357
1361
  """
1358
1362
  ...
1359
1363
 
1360
1364
  @typing.overload
1361
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1365
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1362
1366
  ...
1363
1367
 
1364
1368
  @typing.overload
1365
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1369
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1366
1370
  ...
1367
1371
 
1368
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1372
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
1369
1373
  """
1370
- Specifies the Conda environment for the step.
1374
+ Specifies that the step will success under all circumstances.
1371
1375
 
1372
- Information in this decorator will augment any
1373
- attributes set in the `@conda_base` flow-level decorator. Hence,
1374
- you can use `@conda_base` to set packages required by all
1375
- steps and use `@conda` to specify step-specific overrides.
1376
+ The decorator will create an optional artifact, specified by `var`, which
1377
+ contains the exception raised. You can use it to detect the presence
1378
+ of errors, indicating that all happy-path artifacts produced by the step
1379
+ are missing.
1376
1380
 
1377
1381
  Parameters
1378
1382
  ----------
1379
- packages : Dict[str, str], default {}
1380
- Packages to use for this step. The key is the name of the package
1381
- and the value is the version to use.
1382
- libraries : Dict[str, str], default {}
1383
- Supported for backward compatibility. When used with packages, packages will take precedence.
1384
- python : str, optional, default None
1385
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1386
- that the version used will correspond to the version of the Python interpreter used to start the run.
1387
- disabled : bool, default False
1388
- If set to True, disables @conda.
1383
+ var : str, optional, default None
1384
+ Name of the artifact in which to store the caught exception.
1385
+ If not specified, the exception is not stored.
1386
+ print_exception : bool, default True
1387
+ Determines whether or not the exception is printed to
1388
+ stdout when caught.
1389
+ """
1390
+ ...
1391
+
1392
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1393
+ """
1394
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1395
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1396
+
1397
+ Parameters
1398
+ ----------
1399
+ timeout : int
1400
+ Time, in seconds before the task times out and fails. (Default: 3600)
1401
+ poke_interval : int
1402
+ Time in seconds that the job should wait in between each try. (Default: 60)
1403
+ mode : str
1404
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1405
+ exponential_backoff : bool
1406
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1407
+ pool : str
1408
+ the slot pool this task should run in,
1409
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1410
+ soft_fail : bool
1411
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1412
+ name : str
1413
+ Name of the sensor on Airflow
1414
+ description : str
1415
+ Description of sensor in the Airflow UI
1416
+ external_dag_id : str
1417
+ The dag_id that contains the task you want to wait for.
1418
+ external_task_ids : List[str]
1419
+ The list of task_ids that you want to wait for.
1420
+ If None (default value) the sensor waits for the DAG. (Default: None)
1421
+ allowed_states : List[str]
1422
+ Iterable of allowed states, (Default: ['success'])
1423
+ failed_states : List[str]
1424
+ Iterable of failed or dis-allowed states. (Default: None)
1425
+ execution_delta : datetime.timedelta
1426
+ time difference with the previous execution to look at,
1427
+ the default is the same logical date as the current task or DAG. (Default: None)
1428
+ check_existence: bool
1429
+ Set to True to check if the external task exists or check if
1430
+ the DAG to wait for exists. (Default: True)
1389
1431
  """
1390
1432
  ...
1391
1433
 
@@ -1484,105 +1526,6 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
1484
1526
  """
1485
1527
  ...
1486
1528
 
1487
- def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1488
- """
1489
- Specifies what flows belong to the same project.
1490
-
1491
- A project-specific namespace is created for all flows that
1492
- use the same `@project(name)`.
1493
-
1494
- Parameters
1495
- ----------
1496
- name : str
1497
- Project name. Make sure that the name is unique amongst all
1498
- projects that use the same production scheduler. The name may
1499
- contain only lowercase alphanumeric characters and underscores.
1500
-
1501
-
1502
- """
1503
- ...
1504
-
1505
- @typing.overload
1506
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1507
- """
1508
- Specifies the PyPI packages for all steps of the flow.
1509
-
1510
- Use `@pypi_base` to set common packages required by all
1511
- steps and use `@pypi` to specify step-specific overrides.
1512
- Parameters
1513
- ----------
1514
- packages : Dict[str, str], default: {}
1515
- Packages to use for this flow. The key is the name of the package
1516
- and the value is the version to use.
1517
- python : str, optional, default: None
1518
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1519
- that the version used will correspond to the version of the Python interpreter used to start the run.
1520
- """
1521
- ...
1522
-
1523
- @typing.overload
1524
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1525
- ...
1526
-
1527
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1528
- """
1529
- Specifies the PyPI packages for all steps of the flow.
1530
-
1531
- Use `@pypi_base` to set common packages required by all
1532
- steps and use `@pypi` to specify step-specific overrides.
1533
- Parameters
1534
- ----------
1535
- packages : Dict[str, str], default: {}
1536
- Packages to use for this flow. The key is the name of the package
1537
- and the value is the version to use.
1538
- python : str, optional, default: None
1539
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1540
- that the version used will correspond to the version of the Python interpreter used to start the run.
1541
- """
1542
- ...
1543
-
1544
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1545
- """
1546
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1547
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1548
-
1549
- Parameters
1550
- ----------
1551
- timeout : int
1552
- Time, in seconds before the task times out and fails. (Default: 3600)
1553
- poke_interval : int
1554
- Time in seconds that the job should wait in between each try. (Default: 60)
1555
- mode : str
1556
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1557
- exponential_backoff : bool
1558
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1559
- pool : str
1560
- the slot pool this task should run in,
1561
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1562
- soft_fail : bool
1563
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1564
- name : str
1565
- Name of the sensor on Airflow
1566
- description : str
1567
- Description of sensor in the Airflow UI
1568
- external_dag_id : str
1569
- The dag_id that contains the task you want to wait for.
1570
- external_task_ids : List[str]
1571
- The list of task_ids that you want to wait for.
1572
- If None (default value) the sensor waits for the DAG. (Default: None)
1573
- allowed_states : List[str]
1574
- Iterable of allowed states, (Default: ['success'])
1575
- failed_states : List[str]
1576
- Iterable of failed or dis-allowed states. (Default: None)
1577
- execution_delta : datetime.timedelta
1578
- time difference with the previous execution to look at,
1579
- the default is the same logical date as the current task or DAG. (Default: None)
1580
- check_existence: bool
1581
- Set to True to check if the external task exists or check if
1582
- the DAG to wait for exists. (Default: True)
1583
- """
1584
- ...
1585
-
1586
1529
  @typing.overload
1587
1530
  def trigger_on_finish(*, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1588
1531
  """
@@ -1686,55 +1629,6 @@ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *
1686
1629
  """
1687
1630
  ...
1688
1631
 
1689
- @typing.overload
1690
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1691
- """
1692
- Specifies the times when the flow should be run when running on a
1693
- production scheduler.
1694
-
1695
- Parameters
1696
- ----------
1697
- hourly : bool, default False
1698
- Run the workflow hourly.
1699
- daily : bool, default True
1700
- Run the workflow daily.
1701
- weekly : bool, default False
1702
- Run the workflow weekly.
1703
- cron : str, optional, default None
1704
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1705
- specified by this expression.
1706
- timezone : str, optional, default None
1707
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1708
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1709
- """
1710
- ...
1711
-
1712
- @typing.overload
1713
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1714
- ...
1715
-
1716
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1717
- """
1718
- Specifies the times when the flow should be run when running on a
1719
- production scheduler.
1720
-
1721
- Parameters
1722
- ----------
1723
- hourly : bool, default False
1724
- Run the workflow hourly.
1725
- daily : bool, default True
1726
- Run the workflow daily.
1727
- weekly : bool, default False
1728
- Run the workflow weekly.
1729
- cron : str, optional, default None
1730
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1731
- specified by this expression.
1732
- timezone : str, optional, default None
1733
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1734
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1735
- """
1736
- ...
1737
-
1738
1632
  def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1739
1633
  """
1740
1634
  The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
@@ -1777,6 +1671,63 @@ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, expone
1777
1671
  """
1778
1672
  ...
1779
1673
 
1674
+ @typing.overload
1675
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1676
+ """
1677
+ Specifies the PyPI packages for all steps of the flow.
1678
+
1679
+ Use `@pypi_base` to set common packages required by all
1680
+ steps and use `@pypi` to specify step-specific overrides.
1681
+ Parameters
1682
+ ----------
1683
+ packages : Dict[str, str], default: {}
1684
+ Packages to use for this flow. The key is the name of the package
1685
+ and the value is the version to use.
1686
+ python : str, optional, default: None
1687
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1688
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1689
+ """
1690
+ ...
1691
+
1692
+ @typing.overload
1693
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1694
+ ...
1695
+
1696
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1697
+ """
1698
+ Specifies the PyPI packages for all steps of the flow.
1699
+
1700
+ Use `@pypi_base` to set common packages required by all
1701
+ steps and use `@pypi` to specify step-specific overrides.
1702
+ Parameters
1703
+ ----------
1704
+ packages : Dict[str, str], default: {}
1705
+ Packages to use for this flow. The key is the name of the package
1706
+ and the value is the version to use.
1707
+ python : str, optional, default: None
1708
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1709
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1710
+ """
1711
+ ...
1712
+
1713
+ def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1714
+ """
1715
+ Specifies what flows belong to the same project.
1716
+
1717
+ A project-specific namespace is created for all flows that
1718
+ use the same `@project(name)`.
1719
+
1720
+ Parameters
1721
+ ----------
1722
+ name : str
1723
+ Project name. Make sure that the name is unique amongst all
1724
+ projects that use the same production scheduler. The name may
1725
+ contain only lowercase alphanumeric characters and underscores.
1726
+
1727
+
1728
+ """
1729
+ ...
1730
+
1780
1731
  @typing.overload
1781
1732
  def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1782
1733
  """
@@ -1826,6 +1777,55 @@ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packa
1826
1777
  """
1827
1778
  ...
1828
1779
 
1780
+ @typing.overload
1781
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1782
+ """
1783
+ Specifies the times when the flow should be run when running on a
1784
+ production scheduler.
1785
+
1786
+ Parameters
1787
+ ----------
1788
+ hourly : bool, default False
1789
+ Run the workflow hourly.
1790
+ daily : bool, default True
1791
+ Run the workflow daily.
1792
+ weekly : bool, default False
1793
+ Run the workflow weekly.
1794
+ cron : str, optional, default None
1795
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1796
+ specified by this expression.
1797
+ timezone : str, optional, default None
1798
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1799
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1800
+ """
1801
+ ...
1802
+
1803
+ @typing.overload
1804
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1805
+ ...
1806
+
1807
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1808
+ """
1809
+ Specifies the times when the flow should be run when running on a
1810
+ production scheduler.
1811
+
1812
+ Parameters
1813
+ ----------
1814
+ hourly : bool, default False
1815
+ Run the workflow hourly.
1816
+ daily : bool, default True
1817
+ Run the workflow daily.
1818
+ weekly : bool, default False
1819
+ Run the workflow weekly.
1820
+ cron : str, optional, default None
1821
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1822
+ specified by this expression.
1823
+ timezone : str, optional, default None
1824
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1825
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1826
+ """
1827
+ ...
1828
+
1829
1829
  def namespace(ns: typing.Optional[str]) -> typing.Optional[str]:
1830
1830
  """
1831
1831
  Switch namespace to the one provided.