metaflow-stubs 2.11.11__py2.py3-none-any.whl → 2.11.12__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (132) hide show
  1. metaflow-stubs/__init__.pyi +603 -603
  2. metaflow-stubs/cards.pyi +5 -5
  3. metaflow-stubs/cli.pyi +3 -3
  4. metaflow-stubs/client/__init__.pyi +4 -4
  5. metaflow-stubs/client/core.pyi +6 -6
  6. metaflow-stubs/client/filecache.pyi +2 -2
  7. metaflow-stubs/clone_util.pyi +2 -2
  8. metaflow-stubs/events.pyi +3 -3
  9. metaflow-stubs/exception.pyi +2 -2
  10. metaflow-stubs/flowspec.pyi +4 -4
  11. metaflow-stubs/generated_for.txt +1 -1
  12. metaflow-stubs/includefile.pyi +4 -4
  13. metaflow-stubs/metadata/metadata.pyi +3 -3
  14. metaflow-stubs/metadata/util.pyi +2 -2
  15. metaflow-stubs/metaflow_config.pyi +2 -2
  16. metaflow-stubs/metaflow_current.pyi +18 -18
  17. metaflow-stubs/mflog/mflog.pyi +2 -2
  18. metaflow-stubs/multicore_utils.pyi +2 -2
  19. metaflow-stubs/parameters.pyi +4 -4
  20. metaflow-stubs/plugins/__init__.pyi +2 -2
  21. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  22. metaflow-stubs/plugins/airflow/airflow.pyi +3 -3
  23. metaflow-stubs/plugins/airflow/airflow_cli.pyi +4 -4
  24. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
  25. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  26. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  27. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  28. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +3 -3
  29. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +3 -3
  30. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +3 -3
  31. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  32. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  33. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  34. metaflow-stubs/plugins/argo/argo_workflows.pyi +4 -4
  35. metaflow-stubs/plugins/argo/argo_workflows_cli.pyi +4 -4
  36. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +4 -4
  37. metaflow-stubs/plugins/aws/__init__.pyi +2 -2
  38. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  39. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  40. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  41. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  42. metaflow-stubs/plugins/aws/batch/batch_cli.pyi +2 -2
  43. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  44. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +3 -3
  45. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  46. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +3 -3
  47. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  48. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
  49. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  50. metaflow-stubs/plugins/aws/step_functions/production_token.pyi +2 -2
  51. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  52. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  53. metaflow-stubs/plugins/aws/step_functions/step_functions_cli.pyi +4 -4
  54. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  55. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
  56. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  57. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  58. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  59. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  60. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  61. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  62. metaflow-stubs/plugins/cards/card_cli.pyi +4 -4
  63. metaflow-stubs/plugins/cards/card_client.pyi +2 -2
  64. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  65. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  66. metaflow-stubs/plugins/cards/card_decorator.pyi +3 -3
  67. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  68. metaflow-stubs/plugins/cards/card_modules/basic.pyi +3 -3
  69. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  70. metaflow-stubs/plugins/cards/card_modules/chevron/__init__.pyi +2 -2
  71. metaflow-stubs/plugins/cards/card_modules/chevron/main.pyi +2 -2
  72. metaflow-stubs/plugins/cards/card_modules/chevron/metadata.pyi +2 -2
  73. metaflow-stubs/plugins/cards/card_modules/chevron/renderer.pyi +2 -2
  74. metaflow-stubs/plugins/cards/card_modules/chevron/tokenizer.pyi +2 -2
  75. metaflow-stubs/plugins/cards/card_modules/components.pyi +4 -4
  76. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  77. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  78. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  79. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  80. metaflow-stubs/plugins/cards/component_serializer.pyi +4 -4
  81. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  82. metaflow-stubs/plugins/catch_decorator.pyi +4 -4
  83. metaflow-stubs/plugins/datatools/__init__.pyi +4 -4
  84. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  85. metaflow-stubs/plugins/datatools/s3/__init__.pyi +3 -3
  86. metaflow-stubs/plugins/datatools/s3/s3.pyi +5 -5
  87. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  88. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  89. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  90. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  91. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  92. metaflow-stubs/plugins/events_decorator.pyi +3 -3
  93. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  94. metaflow-stubs/plugins/frameworks/pytorch.pyi +3 -3
  95. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  96. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  97. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  98. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  99. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  100. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  101. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +3 -3
  102. metaflow-stubs/plugins/kubernetes/kubernetes_cli.pyi +3 -3
  103. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  104. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +3 -3
  105. metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +2 -2
  106. metaflow-stubs/plugins/package_cli.pyi +2 -2
  107. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  108. metaflow-stubs/plugins/project_decorator.pyi +3 -3
  109. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  110. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  111. metaflow-stubs/plugins/pypi/conda_environment.pyi +4 -4
  112. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  113. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  114. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  115. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  116. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  117. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  118. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +2 -2
  119. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  120. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  121. metaflow-stubs/plugins/tag_cli.pyi +4 -4
  122. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +2 -2
  123. metaflow-stubs/plugins/timeout_decorator.pyi +3 -3
  124. metaflow-stubs/procpoll.pyi +2 -2
  125. metaflow-stubs/pylint_wrapper.pyi +2 -2
  126. metaflow-stubs/tagging_util.pyi +2 -2
  127. metaflow-stubs/version.pyi +2 -2
  128. {metaflow_stubs-2.11.11.dist-info → metaflow_stubs-2.11.12.dist-info}/METADATA +2 -2
  129. metaflow_stubs-2.11.12.dist-info/RECORD +132 -0
  130. metaflow_stubs-2.11.11.dist-info/RECORD +0 -132
  131. {metaflow_stubs-2.11.11.dist-info → metaflow_stubs-2.11.12.dist-info}/WHEEL +0 -0
  132. {metaflow_stubs-2.11.11.dist-info → metaflow_stubs-2.11.12.dist-info}/top_level.txt +0 -0
@@ -1,23 +1,23 @@
1
1
  ##################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.11.11 #
4
- # Generated on 2024-05-02T22:04:13.800222 #
3
+ # MF version: 2.11.12 #
4
+ # Generated on 2024-05-03T20:23:27.231184 #
5
5
  ##################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
+ import metaflow.plugins.datatools.s3.s3
12
+ import io
11
13
  import metaflow.client.core
14
+ import metaflow.events
15
+ import metaflow.parameters
12
16
  import datetime
17
+ import metaflow.metaflow_current
13
18
  import metaflow._vendor.click.types
14
19
  import typing
15
- import metaflow.plugins.datatools.s3.s3
16
20
  import metaflow.datastore.inputs
17
- import metaflow.metaflow_current
18
- import metaflow.parameters
19
- import io
20
- import metaflow.events
21
21
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
22
22
  StepFlag = typing.NewType("StepFlag", bool)
23
23
 
@@ -775,226 +775,92 @@ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
775
775
  ...
776
776
 
777
777
  @typing.overload
778
- def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
778
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
779
779
  """
780
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
780
+ Specifies secrets to be retrieved and injected as environment variables prior to
781
+ the execution of a step.
781
782
 
782
783
  Parameters
783
784
  ----------
784
- cpu : int, default 1
785
- Number of CPUs required for this step. If `@resources` is
786
- also present, the maximum value from all decorators is used.
787
- gpu : int, default 0
788
- Number of GPUs required for this step. If `@resources` is
789
- also present, the maximum value from all decorators is used.
790
- memory : int, default 4096
791
- Memory size (in MB) required for this step. If
792
- `@resources` is also present, the maximum value from all decorators is
793
- used.
794
- image : str, optional, default None
795
- Docker image to use when launching on AWS Batch. If not specified, and
796
- METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
797
- not, a default Docker image mapping to the current version of Python is used.
798
- queue : str, default METAFLOW_BATCH_JOB_QUEUE
799
- AWS Batch Job Queue to submit the job to.
800
- iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
801
- AWS IAM role that AWS Batch container uses to access AWS cloud resources.
802
- execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
803
- AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
804
- (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
805
- shared_memory : int, optional, default None
806
- The value for the size (in MiB) of the /dev/shm volume for this step.
807
- This parameter maps to the `--shm-size` option in Docker.
808
- max_swap : int, optional, default None
809
- The total amount of swap memory (in MiB) a container can use for this
810
- step. This parameter is translated to the `--memory-swap` option in
811
- Docker where the value is the sum of the container memory plus the
812
- `max_swap` value.
813
- swappiness : int, optional, default None
814
- This allows you to tune memory swappiness behavior for this step.
815
- A swappiness value of 0 causes swapping not to happen unless absolutely
816
- necessary. A swappiness value of 100 causes pages to be swapped very
817
- aggressively. Accepted values are whole numbers between 0 and 100.
818
- use_tmpfs : bool, default False
819
- This enables an explicit tmpfs mount for this step. Note that tmpfs is
820
- not available on Fargate compute environments
821
- tmpfs_tempdir : bool, default True
822
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
823
- tmpfs_size : int, optional, default None
824
- The value for the size (in MiB) of the tmpfs mount for this step.
825
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
826
- memory allocated for this step.
827
- tmpfs_path : str, optional, default None
828
- Path to tmpfs mount for this step. Defaults to /metaflow_temp.
829
- inferentia : int, default 0
830
- Number of Inferentia chips required for this step.
831
- trainium : int, default None
832
- Alias for inferentia. Use only one of the two.
833
- efa : int, default 0
834
- Number of elastic fabric adapter network devices to attach to container
835
- ephemeral_storage: int, default None
836
- The total amount, in GiB, of ephemeral storage to set for the task (21-200)
837
- This is only relevant for Fargate compute environments
838
- log_driver: str, optional, default None
839
- The log driver to use for the Amazon ECS container.
840
- log_options: List[str], optional, default None
841
- List of strings containing options for the chosen log driver. The configurable values
842
- depend on the `log driver` chosen. Validation of these options is not supported yet.
843
- Example usage: ["awslogs-group:aws/batch/job"]
785
+ sources : List[Union[str, Dict[str, Any]]], default: []
786
+ List of secret specs, defining how the secrets are to be retrieved
844
787
  """
845
788
  ...
846
789
 
847
790
  @typing.overload
848
- def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
791
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
849
792
  ...
850
793
 
851
794
  @typing.overload
852
- def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
795
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
853
796
  ...
854
797
 
855
- def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
798
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
856
799
  """
857
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
800
+ Specifies secrets to be retrieved and injected as environment variables prior to
801
+ the execution of a step.
858
802
 
859
803
  Parameters
860
804
  ----------
861
- cpu : int, default 1
862
- Number of CPUs required for this step. If `@resources` is
863
- also present, the maximum value from all decorators is used.
864
- gpu : int, default 0
865
- Number of GPUs required for this step. If `@resources` is
866
- also present, the maximum value from all decorators is used.
867
- memory : int, default 4096
868
- Memory size (in MB) required for this step. If
869
- `@resources` is also present, the maximum value from all decorators is
870
- used.
871
- image : str, optional, default None
872
- Docker image to use when launching on AWS Batch. If not specified, and
873
- METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
874
- not, a default Docker image mapping to the current version of Python is used.
875
- queue : str, default METAFLOW_BATCH_JOB_QUEUE
876
- AWS Batch Job Queue to submit the job to.
877
- iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
878
- AWS IAM role that AWS Batch container uses to access AWS cloud resources.
879
- execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
880
- AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
881
- (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
882
- shared_memory : int, optional, default None
883
- The value for the size (in MiB) of the /dev/shm volume for this step.
884
- This parameter maps to the `--shm-size` option in Docker.
885
- max_swap : int, optional, default None
886
- The total amount of swap memory (in MiB) a container can use for this
887
- step. This parameter is translated to the `--memory-swap` option in
888
- Docker where the value is the sum of the container memory plus the
889
- `max_swap` value.
890
- swappiness : int, optional, default None
891
- This allows you to tune memory swappiness behavior for this step.
892
- A swappiness value of 0 causes swapping not to happen unless absolutely
893
- necessary. A swappiness value of 100 causes pages to be swapped very
894
- aggressively. Accepted values are whole numbers between 0 and 100.
895
- use_tmpfs : bool, default False
896
- This enables an explicit tmpfs mount for this step. Note that tmpfs is
897
- not available on Fargate compute environments
898
- tmpfs_tempdir : bool, default True
899
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
900
- tmpfs_size : int, optional, default None
901
- The value for the size (in MiB) of the tmpfs mount for this step.
902
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
903
- memory allocated for this step.
904
- tmpfs_path : str, optional, default None
905
- Path to tmpfs mount for this step. Defaults to /metaflow_temp.
906
- inferentia : int, default 0
907
- Number of Inferentia chips required for this step.
908
- trainium : int, default None
909
- Alias for inferentia. Use only one of the two.
910
- efa : int, default 0
911
- Number of elastic fabric adapter network devices to attach to container
912
- ephemeral_storage: int, default None
913
- The total amount, in GiB, of ephemeral storage to set for the task (21-200)
914
- This is only relevant for Fargate compute environments
915
- log_driver: str, optional, default None
916
- The log driver to use for the Amazon ECS container.
917
- log_options: List[str], optional, default None
918
- List of strings containing options for the chosen log driver. The configurable values
919
- depend on the `log driver` chosen. Validation of these options is not supported yet.
920
- Example usage: ["awslogs-group:aws/batch/job"]
805
+ sources : List[Union[str, Dict[str, Any]]], default: []
806
+ List of secret specs, defining how the secrets are to be retrieved
921
807
  """
922
808
  ...
923
809
 
924
810
  @typing.overload
925
- def resources(*, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
811
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
926
812
  """
927
- Specifies the resources needed when executing this step.
928
-
929
- Use `@resources` to specify the resource requirements
930
- independently of the specific compute layer (`@batch`, `@kubernetes`).
813
+ Specifies the Conda environment for the step.
931
814
 
932
- You can choose the compute layer on the command line by executing e.g.
933
- ```
934
- python myflow.py run --with batch
935
- ```
936
- or
937
- ```
938
- python myflow.py run --with kubernetes
939
- ```
940
- which executes the flow on the desired system using the
941
- requirements specified in `@resources`.
815
+ Information in this decorator will augment any
816
+ attributes set in the `@conda_base` flow-level decorator. Hence,
817
+ you can use `@conda_base` to set packages required by all
818
+ steps and use `@conda` to specify step-specific overrides.
942
819
 
943
820
  Parameters
944
821
  ----------
945
- cpu : int, default 1
946
- Number of CPUs required for this step.
947
- gpu : int, default 0
948
- Number of GPUs required for this step.
949
- disk : int, optional, default None
950
- Disk size (in MB) required for this step. Only applies on Kubernetes.
951
- memory : int, default 4096
952
- Memory size (in MB) required for this step.
953
- shared_memory : int, optional, default None
954
- The value for the size (in MiB) of the /dev/shm volume for this step.
955
- This parameter maps to the `--shm-size` option in Docker.
822
+ packages : Dict[str, str], default {}
823
+ Packages to use for this step. The key is the name of the package
824
+ and the value is the version to use.
825
+ libraries : Dict[str, str], default {}
826
+ Supported for backward compatibility. When used with packages, packages will take precedence.
827
+ python : str, optional, default None
828
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
829
+ that the version used will correspond to the version of the Python interpreter used to start the run.
830
+ disabled : bool, default False
831
+ If set to True, disables @conda.
956
832
  """
957
833
  ...
958
834
 
959
835
  @typing.overload
960
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
836
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
961
837
  ...
962
838
 
963
839
  @typing.overload
964
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
840
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
965
841
  ...
966
842
 
967
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
843
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
968
844
  """
969
- Specifies the resources needed when executing this step.
970
-
971
- Use `@resources` to specify the resource requirements
972
- independently of the specific compute layer (`@batch`, `@kubernetes`).
845
+ Specifies the Conda environment for the step.
973
846
 
974
- You can choose the compute layer on the command line by executing e.g.
975
- ```
976
- python myflow.py run --with batch
977
- ```
978
- or
979
- ```
980
- python myflow.py run --with kubernetes
981
- ```
982
- which executes the flow on the desired system using the
983
- requirements specified in `@resources`.
847
+ Information in this decorator will augment any
848
+ attributes set in the `@conda_base` flow-level decorator. Hence,
849
+ you can use `@conda_base` to set packages required by all
850
+ steps and use `@conda` to specify step-specific overrides.
984
851
 
985
852
  Parameters
986
853
  ----------
987
- cpu : int, default 1
988
- Number of CPUs required for this step.
989
- gpu : int, default 0
990
- Number of GPUs required for this step.
991
- disk : int, optional, default None
992
- Disk size (in MB) required for this step. Only applies on Kubernetes.
993
- memory : int, default 4096
994
- Memory size (in MB) required for this step.
995
- shared_memory : int, optional, default None
996
- The value for the size (in MiB) of the /dev/shm volume for this step.
997
- This parameter maps to the `--shm-size` option in Docker.
854
+ packages : Dict[str, str], default {}
855
+ Packages to use for this step. The key is the name of the package
856
+ and the value is the version to use.
857
+ libraries : Dict[str, str], default {}
858
+ Supported for backward compatibility. When used with packages, packages will take precedence.
859
+ python : str, optional, default None
860
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
861
+ that the version used will correspond to the version of the Python interpreter used to start the run.
862
+ disabled : bool, default False
863
+ If set to True, disables @conda.
998
864
  """
999
865
  ...
1000
866
 
@@ -1107,25 +973,161 @@ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
1107
973
  ...
1108
974
 
1109
975
  @typing.overload
1110
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
976
+ def resources(*, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1111
977
  """
1112
- Specifies the number of times the task corresponding
1113
- to a step needs to be retried.
1114
-
1115
- This decorator is useful for handling transient errors, such as networking issues.
1116
- If your task contains operations that can't be retried safely, e.g. database updates,
1117
- it is advisable to annotate it with `@retry(times=0)`.
978
+ Specifies the resources needed when executing this step.
1118
979
 
1119
- This can be used in conjunction with the `@catch` decorator. The `@catch`
1120
- decorator will execute a no-op task after all retries have been exhausted,
1121
- ensuring that the flow execution can continue.
980
+ Use `@resources` to specify the resource requirements
981
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
1122
982
 
1123
- Parameters
1124
- ----------
1125
- times : int, default 3
1126
- Number of times to retry this task.
1127
- minutes_between_retries : int, default 2
1128
- Number of minutes between retries.
983
+ You can choose the compute layer on the command line by executing e.g.
984
+ ```
985
+ python myflow.py run --with batch
986
+ ```
987
+ or
988
+ ```
989
+ python myflow.py run --with kubernetes
990
+ ```
991
+ which executes the flow on the desired system using the
992
+ requirements specified in `@resources`.
993
+
994
+ Parameters
995
+ ----------
996
+ cpu : int, default 1
997
+ Number of CPUs required for this step.
998
+ gpu : int, default 0
999
+ Number of GPUs required for this step.
1000
+ disk : int, optional, default None
1001
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
1002
+ memory : int, default 4096
1003
+ Memory size (in MB) required for this step.
1004
+ shared_memory : int, optional, default None
1005
+ The value for the size (in MiB) of the /dev/shm volume for this step.
1006
+ This parameter maps to the `--shm-size` option in Docker.
1007
+ """
1008
+ ...
1009
+
1010
+ @typing.overload
1011
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1012
+ ...
1013
+
1014
+ @typing.overload
1015
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1016
+ ...
1017
+
1018
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
1019
+ """
1020
+ Specifies the resources needed when executing this step.
1021
+
1022
+ Use `@resources` to specify the resource requirements
1023
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
1024
+
1025
+ You can choose the compute layer on the command line by executing e.g.
1026
+ ```
1027
+ python myflow.py run --with batch
1028
+ ```
1029
+ or
1030
+ ```
1031
+ python myflow.py run --with kubernetes
1032
+ ```
1033
+ which executes the flow on the desired system using the
1034
+ requirements specified in `@resources`.
1035
+
1036
+ Parameters
1037
+ ----------
1038
+ cpu : int, default 1
1039
+ Number of CPUs required for this step.
1040
+ gpu : int, default 0
1041
+ Number of GPUs required for this step.
1042
+ disk : int, optional, default None
1043
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
1044
+ memory : int, default 4096
1045
+ Memory size (in MB) required for this step.
1046
+ shared_memory : int, optional, default None
1047
+ The value for the size (in MiB) of the /dev/shm volume for this step.
1048
+ This parameter maps to the `--shm-size` option in Docker.
1049
+ """
1050
+ ...
1051
+
1052
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = "KUBERNETES_IMAGE_PULL_POLICY", service_account: str = "METAFLOW_KUBERNETES_SERVICE_ACCOUNT", secrets: typing.Optional[typing.List[str]] = None, namespace: str = "METAFLOW_KUBERNETES_NAMESPACE", gpu: typing.Optional[int] = None, gpu_vendor: str = "KUBERNETES_GPU_VENDOR", tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = "/metaflow_temp", persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1053
+ """
1054
+ Specifies that this step should execute on Kubernetes.
1055
+
1056
+ Parameters
1057
+ ----------
1058
+ cpu : int, default 1
1059
+ Number of CPUs required for this step. If `@resources` is
1060
+ also present, the maximum value from all decorators is used.
1061
+ memory : int, default 4096
1062
+ Memory size (in MB) required for this step. If
1063
+ `@resources` is also present, the maximum value from all decorators is
1064
+ used.
1065
+ disk : int, default 10240
1066
+ Disk size (in MB) required for this step. If
1067
+ `@resources` is also present, the maximum value from all decorators is
1068
+ used.
1069
+ image : str, optional, default None
1070
+ Docker image to use when launching on Kubernetes. If not specified, and
1071
+ METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
1072
+ not, a default Docker image mapping to the current version of Python is used.
1073
+ image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
1074
+ If given, the imagePullPolicy to be applied to the Docker image of the step.
1075
+ service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
1076
+ Kubernetes service account to use when launching pod in Kubernetes.
1077
+ secrets : List[str], optional, default None
1078
+ Kubernetes secrets to use when launching pod in Kubernetes. These
1079
+ secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
1080
+ in Metaflow configuration.
1081
+ namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
1082
+ Kubernetes namespace to use when launching pod in Kubernetes.
1083
+ gpu : int, optional, default None
1084
+ Number of GPUs required for this step. A value of zero implies that
1085
+ the scheduled node should not have GPUs.
1086
+ gpu_vendor : str, default KUBERNETES_GPU_VENDOR
1087
+ The vendor of the GPUs to be used for this step.
1088
+ tolerations : List[str], default []
1089
+ The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
1090
+ Kubernetes tolerations to use when launching pod in Kubernetes.
1091
+ use_tmpfs : bool, default False
1092
+ This enables an explicit tmpfs mount for this step.
1093
+ tmpfs_tempdir : bool, default True
1094
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1095
+ tmpfs_size : int, optional, default: None
1096
+ The value for the size (in MiB) of the tmpfs mount for this step.
1097
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1098
+ memory allocated for this step.
1099
+ tmpfs_path : str, optional, default /metaflow_temp
1100
+ Path to tmpfs mount for this step.
1101
+ persistent_volume_claims : Dict[str, str], optional, default None
1102
+ A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
1103
+ volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
1104
+ shared_memory: int, optional
1105
+ Shared memory size (in MiB) required for this step
1106
+ port: int, optional
1107
+ Port number to specify in the Kubernetes job object
1108
+ """
1109
+ ...
1110
+
1111
+ @typing.overload
1112
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1113
+ """
1114
+ Specifies the number of times the task corresponding
1115
+ to a step needs to be retried.
1116
+
1117
+ This decorator is useful for handling transient errors, such as networking issues.
1118
+ If your task contains operations that can't be retried safely, e.g. database updates,
1119
+ it is advisable to annotate it with `@retry(times=0)`.
1120
+
1121
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
1122
+ decorator will execute a no-op task after all retries have been exhausted,
1123
+ ensuring that the flow execution can continue.
1124
+
1125
+ Parameters
1126
+ ----------
1127
+ times : int, default 3
1128
+ Number of times to retry this task.
1129
+ minutes_between_retries : int, default 2
1130
+ Number of minutes between retries.
1129
1131
  """
1130
1132
  ...
1131
1133
 
@@ -1160,270 +1162,427 @@ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
1160
1162
  ...
1161
1163
 
1162
1164
  @typing.overload
1163
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1165
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1164
1166
  """
1165
- Specifies that the step will success under all circumstances.
1166
-
1167
- The decorator will create an optional artifact, specified by `var`, which
1168
- contains the exception raised. You can use it to detect the presence
1169
- of errors, indicating that all happy-path artifacts produced by the step
1170
- are missing.
1167
+ Specifies environment variables to be set prior to the execution of a step.
1171
1168
 
1172
1169
  Parameters
1173
1170
  ----------
1174
- var : str, optional, default None
1175
- Name of the artifact in which to store the caught exception.
1176
- If not specified, the exception is not stored.
1177
- print_exception : bool, default True
1178
- Determines whether or not the exception is printed to
1179
- stdout when caught.
1171
+ vars : Dict[str, str], default {}
1172
+ Dictionary of environment variables to set.
1180
1173
  """
1181
1174
  ...
1182
1175
 
1183
1176
  @typing.overload
1184
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1177
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1185
1178
  ...
1186
1179
 
1187
1180
  @typing.overload
1188
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1181
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1189
1182
  ...
1190
1183
 
1191
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
1184
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
1192
1185
  """
1193
- Specifies that the step will success under all circumstances.
1194
-
1195
- The decorator will create an optional artifact, specified by `var`, which
1196
- contains the exception raised. You can use it to detect the presence
1197
- of errors, indicating that all happy-path artifacts produced by the step
1198
- are missing.
1186
+ Specifies environment variables to be set prior to the execution of a step.
1199
1187
 
1200
1188
  Parameters
1201
1189
  ----------
1202
- var : str, optional, default None
1203
- Name of the artifact in which to store the caught exception.
1204
- If not specified, the exception is not stored.
1205
- print_exception : bool, default True
1206
- Determines whether or not the exception is printed to
1207
- stdout when caught.
1190
+ vars : Dict[str, str], default {}
1191
+ Dictionary of environment variables to set.
1208
1192
  """
1209
1193
  ...
1210
1194
 
1211
1195
  @typing.overload
1212
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1196
+ def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1213
1197
  """
1214
- Specifies the Conda environment for the step.
1215
-
1216
- Information in this decorator will augment any
1217
- attributes set in the `@conda_base` flow-level decorator. Hence,
1218
- you can use `@conda_base` to set packages required by all
1219
- steps and use `@conda` to specify step-specific overrides.
1198
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
1220
1199
 
1221
1200
  Parameters
1222
1201
  ----------
1223
- packages : Dict[str, str], default {}
1224
- Packages to use for this step. The key is the name of the package
1225
- and the value is the version to use.
1226
- libraries : Dict[str, str], default {}
1227
- Supported for backward compatibility. When used with packages, packages will take precedence.
1228
- python : str, optional, default None
1229
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1230
- that the version used will correspond to the version of the Python interpreter used to start the run.
1231
- disabled : bool, default False
1232
- If set to True, disables @conda.
1202
+ cpu : int, default 1
1203
+ Number of CPUs required for this step. If `@resources` is
1204
+ also present, the maximum value from all decorators is used.
1205
+ gpu : int, default 0
1206
+ Number of GPUs required for this step. If `@resources` is
1207
+ also present, the maximum value from all decorators is used.
1208
+ memory : int, default 4096
1209
+ Memory size (in MB) required for this step. If
1210
+ `@resources` is also present, the maximum value from all decorators is
1211
+ used.
1212
+ image : str, optional, default None
1213
+ Docker image to use when launching on AWS Batch. If not specified, and
1214
+ METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
1215
+ not, a default Docker image mapping to the current version of Python is used.
1216
+ queue : str, default METAFLOW_BATCH_JOB_QUEUE
1217
+ AWS Batch Job Queue to submit the job to.
1218
+ iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
1219
+ AWS IAM role that AWS Batch container uses to access AWS cloud resources.
1220
+ execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
1221
+ AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
1222
+ (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
1223
+ shared_memory : int, optional, default None
1224
+ The value for the size (in MiB) of the /dev/shm volume for this step.
1225
+ This parameter maps to the `--shm-size` option in Docker.
1226
+ max_swap : int, optional, default None
1227
+ The total amount of swap memory (in MiB) a container can use for this
1228
+ step. This parameter is translated to the `--memory-swap` option in
1229
+ Docker where the value is the sum of the container memory plus the
1230
+ `max_swap` value.
1231
+ swappiness : int, optional, default None
1232
+ This allows you to tune memory swappiness behavior for this step.
1233
+ A swappiness value of 0 causes swapping not to happen unless absolutely
1234
+ necessary. A swappiness value of 100 causes pages to be swapped very
1235
+ aggressively. Accepted values are whole numbers between 0 and 100.
1236
+ use_tmpfs : bool, default False
1237
+ This enables an explicit tmpfs mount for this step. Note that tmpfs is
1238
+ not available on Fargate compute environments
1239
+ tmpfs_tempdir : bool, default True
1240
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1241
+ tmpfs_size : int, optional, default None
1242
+ The value for the size (in MiB) of the tmpfs mount for this step.
1243
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1244
+ memory allocated for this step.
1245
+ tmpfs_path : str, optional, default None
1246
+ Path to tmpfs mount for this step. Defaults to /metaflow_temp.
1247
+ inferentia : int, default 0
1248
+ Number of Inferentia chips required for this step.
1249
+ trainium : int, default None
1250
+ Alias for inferentia. Use only one of the two.
1251
+ efa : int, default 0
1252
+ Number of elastic fabric adapter network devices to attach to container
1253
+ ephemeral_storage: int, default None
1254
+ The total amount, in GiB, of ephemeral storage to set for the task (21-200)
1255
+ This is only relevant for Fargate compute environments
1256
+ log_driver: str, optional, default None
1257
+ The log driver to use for the Amazon ECS container.
1258
+ log_options: List[str], optional, default None
1259
+ List of strings containing options for the chosen log driver. The configurable values
1260
+ depend on the `log driver` chosen. Validation of these options is not supported yet.
1261
+ Example usage: ["awslogs-group:aws/batch/job"]
1233
1262
  """
1234
1263
  ...
1235
1264
 
1236
1265
  @typing.overload
1237
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1266
+ def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1238
1267
  ...
1239
1268
 
1240
1269
  @typing.overload
1241
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1270
+ def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1242
1271
  ...
1243
1272
 
1244
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1273
+ def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
1245
1274
  """
1246
- Specifies the Conda environment for the step.
1247
-
1248
- Information in this decorator will augment any
1249
- attributes set in the `@conda_base` flow-level decorator. Hence,
1250
- you can use `@conda_base` to set packages required by all
1251
- steps and use `@conda` to specify step-specific overrides.
1275
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
1252
1276
 
1253
1277
  Parameters
1254
1278
  ----------
1255
- packages : Dict[str, str], default {}
1256
- Packages to use for this step. The key is the name of the package
1257
- and the value is the version to use.
1258
- libraries : Dict[str, str], default {}
1259
- Supported for backward compatibility. When used with packages, packages will take precedence.
1260
- python : str, optional, default None
1261
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1262
- that the version used will correspond to the version of the Python interpreter used to start the run.
1263
- disabled : bool, default False
1264
- If set to True, disables @conda.
1279
+ cpu : int, default 1
1280
+ Number of CPUs required for this step. If `@resources` is
1281
+ also present, the maximum value from all decorators is used.
1282
+ gpu : int, default 0
1283
+ Number of GPUs required for this step. If `@resources` is
1284
+ also present, the maximum value from all decorators is used.
1285
+ memory : int, default 4096
1286
+ Memory size (in MB) required for this step. If
1287
+ `@resources` is also present, the maximum value from all decorators is
1288
+ used.
1289
+ image : str, optional, default None
1290
+ Docker image to use when launching on AWS Batch. If not specified, and
1291
+ METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
1292
+ not, a default Docker image mapping to the current version of Python is used.
1293
+ queue : str, default METAFLOW_BATCH_JOB_QUEUE
1294
+ AWS Batch Job Queue to submit the job to.
1295
+ iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
1296
+ AWS IAM role that AWS Batch container uses to access AWS cloud resources.
1297
+ execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
1298
+ AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
1299
+ (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
1300
+ shared_memory : int, optional, default None
1301
+ The value for the size (in MiB) of the /dev/shm volume for this step.
1302
+ This parameter maps to the `--shm-size` option in Docker.
1303
+ max_swap : int, optional, default None
1304
+ The total amount of swap memory (in MiB) a container can use for this
1305
+ step. This parameter is translated to the `--memory-swap` option in
1306
+ Docker where the value is the sum of the container memory plus the
1307
+ `max_swap` value.
1308
+ swappiness : int, optional, default None
1309
+ This allows you to tune memory swappiness behavior for this step.
1310
+ A swappiness value of 0 causes swapping not to happen unless absolutely
1311
+ necessary. A swappiness value of 100 causes pages to be swapped very
1312
+ aggressively. Accepted values are whole numbers between 0 and 100.
1313
+ use_tmpfs : bool, default False
1314
+ This enables an explicit tmpfs mount for this step. Note that tmpfs is
1315
+ not available on Fargate compute environments
1316
+ tmpfs_tempdir : bool, default True
1317
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1318
+ tmpfs_size : int, optional, default None
1319
+ The value for the size (in MiB) of the tmpfs mount for this step.
1320
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1321
+ memory allocated for this step.
1322
+ tmpfs_path : str, optional, default None
1323
+ Path to tmpfs mount for this step. Defaults to /metaflow_temp.
1324
+ inferentia : int, default 0
1325
+ Number of Inferentia chips required for this step.
1326
+ trainium : int, default None
1327
+ Alias for inferentia. Use only one of the two.
1328
+ efa : int, default 0
1329
+ Number of elastic fabric adapter network devices to attach to container
1330
+ ephemeral_storage: int, default None
1331
+ The total amount, in GiB, of ephemeral storage to set for the task (21-200)
1332
+ This is only relevant for Fargate compute environments
1333
+ log_driver: str, optional, default None
1334
+ The log driver to use for the Amazon ECS container.
1335
+ log_options: List[str], optional, default None
1336
+ List of strings containing options for the chosen log driver. The configurable values
1337
+ depend on the `log driver` chosen. Validation of these options is not supported yet.
1338
+ Example usage: ["awslogs-group:aws/batch/job"]
1265
1339
  """
1266
1340
  ...
1267
1341
 
1268
1342
  @typing.overload
1269
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1343
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1270
1344
  """
1271
- Specifies secrets to be retrieved and injected as environment variables prior to
1272
- the execution of a step.
1345
+ Specifies that the step will success under all circumstances.
1346
+
1347
+ The decorator will create an optional artifact, specified by `var`, which
1348
+ contains the exception raised. You can use it to detect the presence
1349
+ of errors, indicating that all happy-path artifacts produced by the step
1350
+ are missing.
1273
1351
 
1274
1352
  Parameters
1275
1353
  ----------
1276
- sources : List[Union[str, Dict[str, Any]]], default: []
1277
- List of secret specs, defining how the secrets are to be retrieved
1354
+ var : str, optional, default None
1355
+ Name of the artifact in which to store the caught exception.
1356
+ If not specified, the exception is not stored.
1357
+ print_exception : bool, default True
1358
+ Determines whether or not the exception is printed to
1359
+ stdout when caught.
1278
1360
  """
1279
1361
  ...
1280
1362
 
1281
1363
  @typing.overload
1282
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1364
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1283
1365
  ...
1284
1366
 
1285
1367
  @typing.overload
1286
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1368
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1287
1369
  ...
1288
1370
 
1289
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
1371
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
1290
1372
  """
1291
- Specifies secrets to be retrieved and injected as environment variables prior to
1292
- the execution of a step.
1373
+ Specifies that the step will success under all circumstances.
1374
+
1375
+ The decorator will create an optional artifact, specified by `var`, which
1376
+ contains the exception raised. You can use it to detect the presence
1377
+ of errors, indicating that all happy-path artifacts produced by the step
1378
+ are missing.
1293
1379
 
1294
1380
  Parameters
1295
1381
  ----------
1296
- sources : List[Union[str, Dict[str, Any]]], default: []
1297
- List of secret specs, defining how the secrets are to be retrieved
1382
+ var : str, optional, default None
1383
+ Name of the artifact in which to store the caught exception.
1384
+ If not specified, the exception is not stored.
1385
+ print_exception : bool, default True
1386
+ Determines whether or not the exception is printed to
1387
+ stdout when caught.
1298
1388
  """
1299
1389
  ...
1300
1390
 
1301
1391
  @typing.overload
1302
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1392
+ def trigger_on_finish(*, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1303
1393
  """
1304
- Specifies environment variables to be set prior to the execution of a step.
1394
+ Specifies the flow(s) that this flow depends on.
1395
+
1396
+ ```
1397
+ @trigger_on_finish(flow='FooFlow')
1398
+ ```
1399
+ or
1400
+ ```
1401
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1402
+ ```
1403
+ This decorator respects the @project decorator and triggers the flow
1404
+ when upstream runs within the same namespace complete successfully
1405
+
1406
+ Additionally, you can specify project aware upstream flow dependencies
1407
+ by specifying the fully qualified project_flow_name.
1408
+ ```
1409
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1410
+ ```
1411
+ or
1412
+ ```
1413
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1414
+ ```
1415
+
1416
+ You can also specify just the project or project branch (other values will be
1417
+ inferred from the current project or project branch):
1418
+ ```
1419
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1420
+ ```
1421
+
1422
+ Note that `branch` is typically one of:
1423
+ - `prod`
1424
+ - `user.bob`
1425
+ - `test.my_experiment`
1426
+ - `prod.staging`
1305
1427
 
1306
1428
  Parameters
1307
1429
  ----------
1308
- vars : Dict[str, str], default {}
1309
- Dictionary of environment variables to set.
1430
+ flow : Union[str, Dict[str, str]], optional, default None
1431
+ Upstream flow dependency for this flow.
1432
+ flows : List[Union[str, Dict[str, str]]], default []
1433
+ Upstream flow dependencies for this flow.
1434
+ options : Dict[str, Any], default {}
1435
+ Backend-specific configuration for tuning eventing behavior.
1436
+
1437
+
1310
1438
  """
1311
1439
  ...
1312
1440
 
1313
1441
  @typing.overload
1314
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1315
- ...
1316
-
1317
- @typing.overload
1318
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1442
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1319
1443
  ...
1320
1444
 
1321
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
1445
+ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1322
1446
  """
1323
- Specifies environment variables to be set prior to the execution of a step.
1447
+ Specifies the flow(s) that this flow depends on.
1324
1448
 
1325
- Parameters
1326
- ----------
1327
- vars : Dict[str, str], default {}
1328
- Dictionary of environment variables to set.
1329
- """
1330
- ...
1331
-
1332
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = "KUBERNETES_IMAGE_PULL_POLICY", service_account: str = "METAFLOW_KUBERNETES_SERVICE_ACCOUNT", secrets: typing.Optional[typing.List[str]] = None, namespace: str = "METAFLOW_KUBERNETES_NAMESPACE", gpu: typing.Optional[int] = None, gpu_vendor: str = "KUBERNETES_GPU_VENDOR", tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = "/metaflow_temp", persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1333
- """
1334
- Specifies that this step should execute on Kubernetes.
1449
+ ```
1450
+ @trigger_on_finish(flow='FooFlow')
1451
+ ```
1452
+ or
1453
+ ```
1454
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1455
+ ```
1456
+ This decorator respects the @project decorator and triggers the flow
1457
+ when upstream runs within the same namespace complete successfully
1458
+
1459
+ Additionally, you can specify project aware upstream flow dependencies
1460
+ by specifying the fully qualified project_flow_name.
1461
+ ```
1462
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1463
+ ```
1464
+ or
1465
+ ```
1466
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1467
+ ```
1468
+
1469
+ You can also specify just the project or project branch (other values will be
1470
+ inferred from the current project or project branch):
1471
+ ```
1472
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1473
+ ```
1474
+
1475
+ Note that `branch` is typically one of:
1476
+ - `prod`
1477
+ - `user.bob`
1478
+ - `test.my_experiment`
1479
+ - `prod.staging`
1335
1480
 
1336
1481
  Parameters
1337
1482
  ----------
1338
- cpu : int, default 1
1339
- Number of CPUs required for this step. If `@resources` is
1340
- also present, the maximum value from all decorators is used.
1341
- memory : int, default 4096
1342
- Memory size (in MB) required for this step. If
1343
- `@resources` is also present, the maximum value from all decorators is
1344
- used.
1345
- disk : int, default 10240
1346
- Disk size (in MB) required for this step. If
1347
- `@resources` is also present, the maximum value from all decorators is
1348
- used.
1349
- image : str, optional, default None
1350
- Docker image to use when launching on Kubernetes. If not specified, and
1351
- METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
1352
- not, a default Docker image mapping to the current version of Python is used.
1353
- image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
1354
- If given, the imagePullPolicy to be applied to the Docker image of the step.
1355
- service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
1356
- Kubernetes service account to use when launching pod in Kubernetes.
1357
- secrets : List[str], optional, default None
1358
- Kubernetes secrets to use when launching pod in Kubernetes. These
1359
- secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
1360
- in Metaflow configuration.
1361
- namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
1362
- Kubernetes namespace to use when launching pod in Kubernetes.
1363
- gpu : int, optional, default None
1364
- Number of GPUs required for this step. A value of zero implies that
1365
- the scheduled node should not have GPUs.
1366
- gpu_vendor : str, default KUBERNETES_GPU_VENDOR
1367
- The vendor of the GPUs to be used for this step.
1368
- tolerations : List[str], default []
1369
- The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
1370
- Kubernetes tolerations to use when launching pod in Kubernetes.
1371
- use_tmpfs : bool, default False
1372
- This enables an explicit tmpfs mount for this step.
1373
- tmpfs_tempdir : bool, default True
1374
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1375
- tmpfs_size : int, optional, default: None
1376
- The value for the size (in MiB) of the tmpfs mount for this step.
1377
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1378
- memory allocated for this step.
1379
- tmpfs_path : str, optional, default /metaflow_temp
1380
- Path to tmpfs mount for this step.
1381
- persistent_volume_claims : Dict[str, str], optional, default None
1382
- A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
1383
- volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
1384
- shared_memory: int, optional
1385
- Shared memory size (in MiB) required for this step
1386
- port: int, optional
1387
- Port number to specify in the Kubernetes job object
1483
+ flow : Union[str, Dict[str, str]], optional, default None
1484
+ Upstream flow dependency for this flow.
1485
+ flows : List[Union[str, Dict[str, str]]], default []
1486
+ Upstream flow dependencies for this flow.
1487
+ options : Dict[str, Any], default {}
1488
+ Backend-specific configuration for tuning eventing behavior.
1489
+
1490
+
1388
1491
  """
1389
1492
  ...
1390
1493
 
1391
1494
  @typing.overload
1392
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1495
+ def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1393
1496
  """
1394
- Specifies the PyPI packages for all steps of the flow.
1497
+ Specifies the event(s) that this flow depends on.
1498
+
1499
+ ```
1500
+ @trigger(event='foo')
1501
+ ```
1502
+ or
1503
+ ```
1504
+ @trigger(events=['foo', 'bar'])
1505
+ ```
1506
+
1507
+ Additionally, you can specify the parameter mappings
1508
+ to map event payload to Metaflow parameters for the flow.
1509
+ ```
1510
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1511
+ ```
1512
+ or
1513
+ ```
1514
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1515
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1516
+ ```
1517
+
1518
+ 'parameters' can also be a list of strings and tuples like so:
1519
+ ```
1520
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1521
+ ```
1522
+ This is equivalent to:
1523
+ ```
1524
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1525
+ ```
1395
1526
 
1396
- Use `@pypi_base` to set common packages required by all
1397
- steps and use `@pypi` to specify step-specific overrides.
1398
1527
  Parameters
1399
1528
  ----------
1400
- packages : Dict[str, str], default: {}
1401
- Packages to use for this flow. The key is the name of the package
1402
- and the value is the version to use.
1403
- python : str, optional, default: None
1404
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1405
- that the version used will correspond to the version of the Python interpreter used to start the run.
1529
+ event : Union[str, Dict[str, Any]], optional, default None
1530
+ Event dependency for this flow.
1531
+ events : List[Union[str, Dict[str, Any]]], default []
1532
+ Events dependency for this flow.
1533
+ options : Dict[str, Any], default {}
1534
+ Backend-specific configuration for tuning eventing behavior.
1535
+
1536
+
1406
1537
  """
1407
1538
  ...
1408
1539
 
1409
1540
  @typing.overload
1410
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1541
+ def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1411
1542
  ...
1412
1543
 
1413
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1544
+ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1414
1545
  """
1415
- Specifies the PyPI packages for all steps of the flow.
1546
+ Specifies the event(s) that this flow depends on.
1547
+
1548
+ ```
1549
+ @trigger(event='foo')
1550
+ ```
1551
+ or
1552
+ ```
1553
+ @trigger(events=['foo', 'bar'])
1554
+ ```
1555
+
1556
+ Additionally, you can specify the parameter mappings
1557
+ to map event payload to Metaflow parameters for the flow.
1558
+ ```
1559
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1560
+ ```
1561
+ or
1562
+ ```
1563
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1564
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1565
+ ```
1566
+
1567
+ 'parameters' can also be a list of strings and tuples like so:
1568
+ ```
1569
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1570
+ ```
1571
+ This is equivalent to:
1572
+ ```
1573
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1574
+ ```
1416
1575
 
1417
- Use `@pypi_base` to set common packages required by all
1418
- steps and use `@pypi` to specify step-specific overrides.
1419
1576
  Parameters
1420
1577
  ----------
1421
- packages : Dict[str, str], default: {}
1422
- Packages to use for this flow. The key is the name of the package
1423
- and the value is the version to use.
1424
- python : str, optional, default: None
1425
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1426
- that the version used will correspond to the version of the Python interpreter used to start the run.
1578
+ event : Union[str, Dict[str, Any]], optional, default None
1579
+ Event dependency for this flow.
1580
+ events : List[Union[str, Dict[str, Any]]], default []
1581
+ Events dependency for this flow.
1582
+ options : Dict[str, Any], default {}
1583
+ Backend-specific configuration for tuning eventing behavior.
1584
+
1585
+
1427
1586
  """
1428
1587
  ...
1429
1588
 
@@ -1488,146 +1647,51 @@ def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typ
1488
1647
  ...
1489
1648
 
1490
1649
  @typing.overload
1491
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1492
- """
1493
- Specifies the times when the flow should be run when running on a
1494
- production scheduler.
1495
-
1496
- Parameters
1497
- ----------
1498
- hourly : bool, default False
1499
- Run the workflow hourly.
1500
- daily : bool, default True
1501
- Run the workflow daily.
1502
- weekly : bool, default False
1503
- Run the workflow weekly.
1504
- cron : str, optional, default None
1505
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1506
- specified by this expression.
1507
- timezone : str, optional, default None
1508
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1509
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1510
- """
1511
- ...
1512
-
1513
- @typing.overload
1514
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1515
- ...
1516
-
1517
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1518
- """
1519
- Specifies the times when the flow should be run when running on a
1520
- production scheduler.
1521
-
1522
- Parameters
1523
- ----------
1524
- hourly : bool, default False
1525
- Run the workflow hourly.
1526
- daily : bool, default True
1527
- Run the workflow daily.
1528
- weekly : bool, default False
1529
- Run the workflow weekly.
1530
- cron : str, optional, default None
1531
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1532
- specified by this expression.
1533
- timezone : str, optional, default None
1534
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1535
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1536
- """
1537
- ...
1538
-
1539
- @typing.overload
1540
- def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1650
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1541
1651
  """
1542
- Specifies the event(s) that this flow depends on.
1543
-
1544
- ```
1545
- @trigger(event='foo')
1546
- ```
1547
- or
1548
- ```
1549
- @trigger(events=['foo', 'bar'])
1550
- ```
1551
-
1552
- Additionally, you can specify the parameter mappings
1553
- to map event payload to Metaflow parameters for the flow.
1554
- ```
1555
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1556
- ```
1557
- or
1558
- ```
1559
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1560
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1561
- ```
1652
+ Specifies the Conda environment for all steps of the flow.
1562
1653
 
1563
- 'parameters' can also be a list of strings and tuples like so:
1564
- ```
1565
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1566
- ```
1567
- This is equivalent to:
1568
- ```
1569
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1570
- ```
1654
+ Use `@conda_base` to set common libraries required by all
1655
+ steps and use `@conda` to specify step-specific additions.
1571
1656
 
1572
1657
  Parameters
1573
1658
  ----------
1574
- event : Union[str, Dict[str, Any]], optional, default None
1575
- Event dependency for this flow.
1576
- events : List[Union[str, Dict[str, Any]]], default []
1577
- Events dependency for this flow.
1578
- options : Dict[str, Any], default {}
1579
- Backend-specific configuration for tuning eventing behavior.
1580
-
1581
-
1659
+ packages : Dict[str, str], default {}
1660
+ Packages to use for this flow. The key is the name of the package
1661
+ and the value is the version to use.
1662
+ libraries : Dict[str, str], default {}
1663
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1664
+ python : str, optional, default None
1665
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1666
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1667
+ disabled : bool, default False
1668
+ If set to True, disables Conda.
1582
1669
  """
1583
1670
  ...
1584
1671
 
1585
1672
  @typing.overload
1586
- def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1673
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1587
1674
  ...
1588
1675
 
1589
- def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1676
+ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1590
1677
  """
1591
- Specifies the event(s) that this flow depends on.
1592
-
1593
- ```
1594
- @trigger(event='foo')
1595
- ```
1596
- or
1597
- ```
1598
- @trigger(events=['foo', 'bar'])
1599
- ```
1600
-
1601
- Additionally, you can specify the parameter mappings
1602
- to map event payload to Metaflow parameters for the flow.
1603
- ```
1604
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1605
- ```
1606
- or
1607
- ```
1608
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1609
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1610
- ```
1678
+ Specifies the Conda environment for all steps of the flow.
1611
1679
 
1612
- 'parameters' can also be a list of strings and tuples like so:
1613
- ```
1614
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1615
- ```
1616
- This is equivalent to:
1617
- ```
1618
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1619
- ```
1680
+ Use `@conda_base` to set common libraries required by all
1681
+ steps and use `@conda` to specify step-specific additions.
1620
1682
 
1621
1683
  Parameters
1622
1684
  ----------
1623
- event : Union[str, Dict[str, Any]], optional, default None
1624
- Event dependency for this flow.
1625
- events : List[Union[str, Dict[str, Any]]], default []
1626
- Events dependency for this flow.
1627
- options : Dict[str, Any], default {}
1628
- Backend-specific configuration for tuning eventing behavior.
1629
-
1630
-
1685
+ packages : Dict[str, str], default {}
1686
+ Packages to use for this flow. The key is the name of the package
1687
+ and the value is the version to use.
1688
+ libraries : Dict[str, str], default {}
1689
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1690
+ python : str, optional, default None
1691
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1692
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1693
+ disabled : bool, default False
1694
+ If set to True, disables Conda.
1631
1695
  """
1632
1696
  ...
1633
1697
 
@@ -1674,154 +1738,90 @@ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str,
1674
1738
  ...
1675
1739
 
1676
1740
  @typing.overload
1677
- def trigger_on_finish(*, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1741
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1678
1742
  """
1679
- Specifies the flow(s) that this flow depends on.
1680
-
1681
- ```
1682
- @trigger_on_finish(flow='FooFlow')
1683
- ```
1684
- or
1685
- ```
1686
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1687
- ```
1688
- This decorator respects the @project decorator and triggers the flow
1689
- when upstream runs within the same namespace complete successfully
1690
-
1691
- Additionally, you can specify project aware upstream flow dependencies
1692
- by specifying the fully qualified project_flow_name.
1693
- ```
1694
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1695
- ```
1696
- or
1697
- ```
1698
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1699
- ```
1700
-
1701
- You can also specify just the project or project branch (other values will be
1702
- inferred from the current project or project branch):
1703
- ```
1704
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1705
- ```
1706
-
1707
- Note that `branch` is typically one of:
1708
- - `prod`
1709
- - `user.bob`
1710
- - `test.my_experiment`
1711
- - `prod.staging`
1743
+ Specifies the times when the flow should be run when running on a
1744
+ production scheduler.
1712
1745
 
1713
1746
  Parameters
1714
1747
  ----------
1715
- flow : Union[str, Dict[str, str]], optional, default None
1716
- Upstream flow dependency for this flow.
1717
- flows : List[Union[str, Dict[str, str]]], default []
1718
- Upstream flow dependencies for this flow.
1719
- options : Dict[str, Any], default {}
1720
- Backend-specific configuration for tuning eventing behavior.
1721
-
1722
-
1748
+ hourly : bool, default False
1749
+ Run the workflow hourly.
1750
+ daily : bool, default True
1751
+ Run the workflow daily.
1752
+ weekly : bool, default False
1753
+ Run the workflow weekly.
1754
+ cron : str, optional, default None
1755
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1756
+ specified by this expression.
1757
+ timezone : str, optional, default None
1758
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1759
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1723
1760
  """
1724
1761
  ...
1725
1762
 
1726
1763
  @typing.overload
1727
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1764
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1728
1765
  ...
1729
1766
 
1730
- def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1767
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1731
1768
  """
1732
- Specifies the flow(s) that this flow depends on.
1733
-
1734
- ```
1735
- @trigger_on_finish(flow='FooFlow')
1736
- ```
1737
- or
1738
- ```
1739
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1740
- ```
1741
- This decorator respects the @project decorator and triggers the flow
1742
- when upstream runs within the same namespace complete successfully
1743
-
1744
- Additionally, you can specify project aware upstream flow dependencies
1745
- by specifying the fully qualified project_flow_name.
1746
- ```
1747
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1748
- ```
1749
- or
1750
- ```
1751
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1752
- ```
1753
-
1754
- You can also specify just the project or project branch (other values will be
1755
- inferred from the current project or project branch):
1756
- ```
1757
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1758
- ```
1759
-
1760
- Note that `branch` is typically one of:
1761
- - `prod`
1762
- - `user.bob`
1763
- - `test.my_experiment`
1764
- - `prod.staging`
1769
+ Specifies the times when the flow should be run when running on a
1770
+ production scheduler.
1765
1771
 
1766
1772
  Parameters
1767
1773
  ----------
1768
- flow : Union[str, Dict[str, str]], optional, default None
1769
- Upstream flow dependency for this flow.
1770
- flows : List[Union[str, Dict[str, str]]], default []
1771
- Upstream flow dependencies for this flow.
1772
- options : Dict[str, Any], default {}
1773
- Backend-specific configuration for tuning eventing behavior.
1774
-
1775
-
1774
+ hourly : bool, default False
1775
+ Run the workflow hourly.
1776
+ daily : bool, default True
1777
+ Run the workflow daily.
1778
+ weekly : bool, default False
1779
+ Run the workflow weekly.
1780
+ cron : str, optional, default None
1781
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1782
+ specified by this expression.
1783
+ timezone : str, optional, default None
1784
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1785
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1776
1786
  """
1777
1787
  ...
1778
1788
 
1779
1789
  @typing.overload
1780
- def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1790
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1781
1791
  """
1782
- Specifies the Conda environment for all steps of the flow.
1783
-
1784
- Use `@conda_base` to set common libraries required by all
1785
- steps and use `@conda` to specify step-specific additions.
1792
+ Specifies the PyPI packages for all steps of the flow.
1786
1793
 
1794
+ Use `@pypi_base` to set common packages required by all
1795
+ steps and use `@pypi` to specify step-specific overrides.
1787
1796
  Parameters
1788
1797
  ----------
1789
- packages : Dict[str, str], default {}
1798
+ packages : Dict[str, str], default: {}
1790
1799
  Packages to use for this flow. The key is the name of the package
1791
1800
  and the value is the version to use.
1792
- libraries : Dict[str, str], default {}
1793
- Supported for backward compatibility. When used with packages, packages will take precedence.
1794
- python : str, optional, default None
1801
+ python : str, optional, default: None
1795
1802
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1796
1803
  that the version used will correspond to the version of the Python interpreter used to start the run.
1797
- disabled : bool, default False
1798
- If set to True, disables Conda.
1799
1804
  """
1800
1805
  ...
1801
1806
 
1802
1807
  @typing.overload
1803
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1808
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1804
1809
  ...
1805
1810
 
1806
- def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1811
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1807
1812
  """
1808
- Specifies the Conda environment for all steps of the flow.
1809
-
1810
- Use `@conda_base` to set common libraries required by all
1811
- steps and use `@conda` to specify step-specific additions.
1813
+ Specifies the PyPI packages for all steps of the flow.
1812
1814
 
1815
+ Use `@pypi_base` to set common packages required by all
1816
+ steps and use `@pypi` to specify step-specific overrides.
1813
1817
  Parameters
1814
1818
  ----------
1815
- packages : Dict[str, str], default {}
1819
+ packages : Dict[str, str], default: {}
1816
1820
  Packages to use for this flow. The key is the name of the package
1817
1821
  and the value is the version to use.
1818
- libraries : Dict[str, str], default {}
1819
- Supported for backward compatibility. When used with packages, packages will take precedence.
1820
- python : str, optional, default None
1822
+ python : str, optional, default: None
1821
1823
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1822
1824
  that the version used will correspond to the version of the Python interpreter used to start the run.
1823
- disabled : bool, default False
1824
- If set to True, disables Conda.
1825
1825
  """
1826
1826
  ...
1827
1827