metaflow-stubs 2.12.10__py2.py3-none-any.whl → 2.12.12__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (150) hide show
  1. metaflow-stubs/__init__.pyi +619 -619
  2. metaflow-stubs/cards.pyi +5 -5
  3. metaflow-stubs/cli.pyi +2 -2
  4. metaflow-stubs/client/__init__.pyi +4 -4
  5. metaflow-stubs/client/core.pyi +6 -6
  6. metaflow-stubs/client/filecache.pyi +3 -3
  7. metaflow-stubs/clone_util.pyi +2 -2
  8. metaflow-stubs/events.pyi +3 -3
  9. metaflow-stubs/exception.pyi +2 -2
  10. metaflow-stubs/flowspec.pyi +4 -4
  11. metaflow-stubs/generated_for.txt +1 -1
  12. metaflow-stubs/includefile.pyi +5 -5
  13. metaflow-stubs/metadata/metadata.pyi +3 -3
  14. metaflow-stubs/metadata/util.pyi +2 -2
  15. metaflow-stubs/metaflow_config.pyi +4 -2
  16. metaflow-stubs/metaflow_current.pyi +35 -35
  17. metaflow-stubs/mflog/mflog.pyi +2 -2
  18. metaflow-stubs/multicore_utils.pyi +2 -2
  19. metaflow-stubs/parameters.pyi +4 -4
  20. metaflow-stubs/plugins/__init__.pyi +2 -2
  21. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  22. metaflow-stubs/plugins/airflow/airflow.pyi +3 -3
  23. metaflow-stubs/plugins/airflow/airflow_cli.pyi +2 -2
  24. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
  25. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  26. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  27. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  28. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
  29. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +3 -3
  30. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +3 -3
  31. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  32. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  33. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  34. metaflow-stubs/plugins/argo/argo_workflows.pyi +34 -11
  35. metaflow-stubs/plugins/argo/argo_workflows_cli.pyi +11 -11
  36. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +4 -4
  37. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +4 -4
  38. metaflow-stubs/plugins/aws/__init__.pyi +2 -2
  39. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  40. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  41. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  42. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  43. metaflow-stubs/plugins/aws/batch/batch_cli.pyi +2 -2
  44. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  45. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
  46. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  47. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +3 -3
  48. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  49. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
  50. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  51. metaflow-stubs/plugins/aws/step_functions/production_token.pyi +2 -2
  52. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  53. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  54. metaflow-stubs/plugins/aws/step_functions/step_functions_cli.pyi +4 -4
  55. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  56. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
  57. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +2 -2
  58. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  59. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  60. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  61. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +3 -3
  62. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  63. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  64. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  65. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  66. metaflow-stubs/plugins/cards/card_cli.pyi +4 -4
  67. metaflow-stubs/plugins/cards/card_client.pyi +4 -4
  68. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  69. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  70. metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
  71. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  72. metaflow-stubs/plugins/cards/card_modules/basic.pyi +3 -3
  73. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  74. metaflow-stubs/plugins/cards/card_modules/chevron/__init__.pyi +2 -2
  75. metaflow-stubs/plugins/cards/card_modules/chevron/main.pyi +2 -2
  76. metaflow-stubs/plugins/cards/card_modules/chevron/metadata.pyi +2 -2
  77. metaflow-stubs/plugins/cards/card_modules/chevron/renderer.pyi +2 -2
  78. metaflow-stubs/plugins/cards/card_modules/chevron/tokenizer.pyi +2 -2
  79. metaflow-stubs/plugins/cards/card_modules/components.pyi +4 -4
  80. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  81. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  82. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  83. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  84. metaflow-stubs/plugins/cards/component_serializer.pyi +4 -4
  85. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  86. metaflow-stubs/plugins/catch_decorator.pyi +2 -2
  87. metaflow-stubs/plugins/datatools/__init__.pyi +4 -4
  88. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  89. metaflow-stubs/plugins/datatools/s3/__init__.pyi +3 -3
  90. metaflow-stubs/plugins/datatools/s3/s3.pyi +6 -6
  91. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  92. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  93. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  94. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  95. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  96. metaflow-stubs/plugins/events_decorator.pyi +2 -2
  97. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  98. metaflow-stubs/plugins/frameworks/pytorch.pyi +3 -3
  99. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  100. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +3 -3
  101. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  102. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  103. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  104. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  105. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  106. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
  107. metaflow-stubs/plugins/kubernetes/kubernetes_cli.pyi +3 -3
  108. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  109. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
  110. metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +2 -2
  111. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -4
  112. metaflow-stubs/plugins/logs_cli.pyi +4 -4
  113. metaflow-stubs/plugins/package_cli.pyi +2 -2
  114. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  115. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  116. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  117. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  118. metaflow-stubs/plugins/pypi/conda_environment.pyi +5 -5
  119. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  120. metaflow-stubs/plugins/pypi/pypi_environment.pyi +3 -3
  121. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  122. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  123. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  124. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  125. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +3 -3
  126. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  127. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  128. metaflow-stubs/plugins/tag_cli.pyi +4 -4
  129. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +3 -3
  130. metaflow-stubs/plugins/timeout_decorator.pyi +2 -2
  131. metaflow-stubs/procpoll.pyi +2 -2
  132. metaflow-stubs/pylint_wrapper.pyi +2 -2
  133. metaflow-stubs/runner/__init__.pyi +2 -2
  134. metaflow-stubs/runner/deployer.pyi +2 -2
  135. metaflow-stubs/runner/metaflow_runner.pyi +4 -4
  136. metaflow-stubs/runner/nbdeploy.pyi +2 -2
  137. metaflow-stubs/runner/nbrun.pyi +2 -2
  138. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  139. metaflow-stubs/runner/utils.pyi +2 -2
  140. metaflow-stubs/system/__init__.pyi +3 -3
  141. metaflow-stubs/system/system_logger.pyi +2 -2
  142. metaflow-stubs/system/system_monitor.pyi +3 -3
  143. metaflow-stubs/tagging_util.pyi +2 -2
  144. metaflow-stubs/tuple_util.pyi +2 -2
  145. metaflow-stubs/version.pyi +2 -2
  146. {metaflow_stubs-2.12.10.dist-info → metaflow_stubs-2.12.12.dist-info}/METADATA +2 -2
  147. metaflow_stubs-2.12.12.dist-info/RECORD +150 -0
  148. {metaflow_stubs-2.12.10.dist-info → metaflow_stubs-2.12.12.dist-info}/WHEEL +1 -1
  149. metaflow_stubs-2.12.10.dist-info/RECORD +0 -150
  150. {metaflow_stubs-2.12.10.dist-info → metaflow_stubs-2.12.12.dist-info}/top_level.txt +0 -0
@@ -1,25 +1,25 @@
1
1
  ##################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.12.10 #
4
- # Generated on 2024-07-30T23:31:35.098122 #
3
+ # MF version: 2.12.12 #
4
+ # Generated on 2024-08-13T23:49:26.963512 #
5
5
  ##################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import metaflow.metaflow_current
12
- import metaflow._vendor.click.types
11
+ import metaflow.runner.metaflow_runner
12
+ import metaflow.plugins.datatools.s3.s3
13
+ import io
13
14
  import metaflow.client.core
14
15
  import metaflow.events
15
16
  import metaflow.parameters
16
- import datetime
17
17
  import typing
18
- import metaflow.plugins.datatools.s3.s3
18
+ import metaflow.metaflow_current
19
+ import datetime
19
20
  import metaflow.datastore.inputs
20
- import io
21
+ import metaflow._vendor.click.types
21
22
  import metaflow.flowspec
22
- import metaflow.runner.metaflow_runner
23
23
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
24
24
  StepFlag = typing.NewType("StepFlag", bool)
25
25
 
@@ -728,187 +728,244 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
728
728
  ...
729
729
 
730
730
  @typing.overload
731
- def card(*, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
731
+ def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
732
732
  """
733
- Creates a human-readable report, a Metaflow Card, after this step completes.
734
-
735
- Note that you may add multiple `@card` decorators in a step with different parameters.
733
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
736
734
 
737
735
  Parameters
738
736
  ----------
739
- type : str, default 'default'
740
- Card type.
741
- id : str, optional, default None
742
- If multiple cards are present, use this id to identify this card.
743
- options : Dict[str, Any], default {}
744
- Options passed to the card. The contents depend on the card type.
745
- timeout : int, default 45
746
- Interrupt reporting if it takes more than this many seconds.
747
-
748
-
737
+ cpu : int, default 1
738
+ Number of CPUs required for this step. If `@resources` is
739
+ also present, the maximum value from all decorators is used.
740
+ gpu : int, default 0
741
+ Number of GPUs required for this step. If `@resources` is
742
+ also present, the maximum value from all decorators is used.
743
+ memory : int, default 4096
744
+ Memory size (in MB) required for this step. If
745
+ `@resources` is also present, the maximum value from all decorators is
746
+ used.
747
+ image : str, optional, default None
748
+ Docker image to use when launching on AWS Batch. If not specified, and
749
+ METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
750
+ not, a default Docker image mapping to the current version of Python is used.
751
+ queue : str, default METAFLOW_BATCH_JOB_QUEUE
752
+ AWS Batch Job Queue to submit the job to.
753
+ iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
754
+ AWS IAM role that AWS Batch container uses to access AWS cloud resources.
755
+ execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
756
+ AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
757
+ (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
758
+ shared_memory : int, optional, default None
759
+ The value for the size (in MiB) of the /dev/shm volume for this step.
760
+ This parameter maps to the `--shm-size` option in Docker.
761
+ max_swap : int, optional, default None
762
+ The total amount of swap memory (in MiB) a container can use for this
763
+ step. This parameter is translated to the `--memory-swap` option in
764
+ Docker where the value is the sum of the container memory plus the
765
+ `max_swap` value.
766
+ swappiness : int, optional, default None
767
+ This allows you to tune memory swappiness behavior for this step.
768
+ A swappiness value of 0 causes swapping not to happen unless absolutely
769
+ necessary. A swappiness value of 100 causes pages to be swapped very
770
+ aggressively. Accepted values are whole numbers between 0 and 100.
771
+ use_tmpfs : bool, default False
772
+ This enables an explicit tmpfs mount for this step. Note that tmpfs is
773
+ not available on Fargate compute environments
774
+ tmpfs_tempdir : bool, default True
775
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
776
+ tmpfs_size : int, optional, default None
777
+ The value for the size (in MiB) of the tmpfs mount for this step.
778
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
779
+ memory allocated for this step.
780
+ tmpfs_path : str, optional, default None
781
+ Path to tmpfs mount for this step. Defaults to /metaflow_temp.
782
+ inferentia : int, default 0
783
+ Number of Inferentia chips required for this step.
784
+ trainium : int, default None
785
+ Alias for inferentia. Use only one of the two.
786
+ efa : int, default 0
787
+ Number of elastic fabric adapter network devices to attach to container
788
+ ephemeral_storage : int, default None
789
+ The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
790
+ This is only relevant for Fargate compute environments
791
+ log_driver: str, optional, default None
792
+ The log driver to use for the Amazon ECS container.
793
+ log_options: List[str], optional, default None
794
+ List of strings containing options for the chosen log driver. The configurable values
795
+ depend on the `log driver` chosen. Validation of these options is not supported yet.
796
+ Example: [`awslogs-group:aws/batch/job`]
749
797
  """
750
798
  ...
751
799
 
752
800
  @typing.overload
753
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
801
+ def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
754
802
  ...
755
803
 
756
804
  @typing.overload
757
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
805
+ def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
758
806
  ...
759
807
 
760
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
808
+ def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
761
809
  """
762
- Creates a human-readable report, a Metaflow Card, after this step completes.
763
-
764
- Note that you may add multiple `@card` decorators in a step with different parameters.
810
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
765
811
 
766
812
  Parameters
767
813
  ----------
768
- type : str, default 'default'
769
- Card type.
770
- id : str, optional, default None
771
- If multiple cards are present, use this id to identify this card.
772
- options : Dict[str, Any], default {}
773
- Options passed to the card. The contents depend on the card type.
774
- timeout : int, default 45
775
- Interrupt reporting if it takes more than this many seconds.
776
-
777
-
814
+ cpu : int, default 1
815
+ Number of CPUs required for this step. If `@resources` is
816
+ also present, the maximum value from all decorators is used.
817
+ gpu : int, default 0
818
+ Number of GPUs required for this step. If `@resources` is
819
+ also present, the maximum value from all decorators is used.
820
+ memory : int, default 4096
821
+ Memory size (in MB) required for this step. If
822
+ `@resources` is also present, the maximum value from all decorators is
823
+ used.
824
+ image : str, optional, default None
825
+ Docker image to use when launching on AWS Batch. If not specified, and
826
+ METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
827
+ not, a default Docker image mapping to the current version of Python is used.
828
+ queue : str, default METAFLOW_BATCH_JOB_QUEUE
829
+ AWS Batch Job Queue to submit the job to.
830
+ iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
831
+ AWS IAM role that AWS Batch container uses to access AWS cloud resources.
832
+ execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
833
+ AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
834
+ (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
835
+ shared_memory : int, optional, default None
836
+ The value for the size (in MiB) of the /dev/shm volume for this step.
837
+ This parameter maps to the `--shm-size` option in Docker.
838
+ max_swap : int, optional, default None
839
+ The total amount of swap memory (in MiB) a container can use for this
840
+ step. This parameter is translated to the `--memory-swap` option in
841
+ Docker where the value is the sum of the container memory plus the
842
+ `max_swap` value.
843
+ swappiness : int, optional, default None
844
+ This allows you to tune memory swappiness behavior for this step.
845
+ A swappiness value of 0 causes swapping not to happen unless absolutely
846
+ necessary. A swappiness value of 100 causes pages to be swapped very
847
+ aggressively. Accepted values are whole numbers between 0 and 100.
848
+ use_tmpfs : bool, default False
849
+ This enables an explicit tmpfs mount for this step. Note that tmpfs is
850
+ not available on Fargate compute environments
851
+ tmpfs_tempdir : bool, default True
852
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
853
+ tmpfs_size : int, optional, default None
854
+ The value for the size (in MiB) of the tmpfs mount for this step.
855
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
856
+ memory allocated for this step.
857
+ tmpfs_path : str, optional, default None
858
+ Path to tmpfs mount for this step. Defaults to /metaflow_temp.
859
+ inferentia : int, default 0
860
+ Number of Inferentia chips required for this step.
861
+ trainium : int, default None
862
+ Alias for inferentia. Use only one of the two.
863
+ efa : int, default 0
864
+ Number of elastic fabric adapter network devices to attach to container
865
+ ephemeral_storage : int, default None
866
+ The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
867
+ This is only relevant for Fargate compute environments
868
+ log_driver: str, optional, default None
869
+ The log driver to use for the Amazon ECS container.
870
+ log_options: List[str], optional, default None
871
+ List of strings containing options for the chosen log driver. The configurable values
872
+ depend on the `log driver` chosen. Validation of these options is not supported yet.
873
+ Example: [`awslogs-group:aws/batch/job`]
778
874
  """
779
875
  ...
780
876
 
781
877
  @typing.overload
782
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
878
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
783
879
  """
784
- Specifies environment variables to be set prior to the execution of a step.
880
+ Specifies the Conda environment for the step.
881
+
882
+ Information in this decorator will augment any
883
+ attributes set in the `@conda_base` flow-level decorator. Hence,
884
+ you can use `@conda_base` to set packages required by all
885
+ steps and use `@conda` to specify step-specific overrides.
785
886
 
786
887
  Parameters
787
888
  ----------
788
- vars : Dict[str, str], default {}
789
- Dictionary of environment variables to set.
889
+ packages : Dict[str, str], default {}
890
+ Packages to use for this step. The key is the name of the package
891
+ and the value is the version to use.
892
+ libraries : Dict[str, str], default {}
893
+ Supported for backward compatibility. When used with packages, packages will take precedence.
894
+ python : str, optional, default None
895
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
896
+ that the version used will correspond to the version of the Python interpreter used to start the run.
897
+ disabled : bool, default False
898
+ If set to True, disables @conda.
790
899
  """
791
900
  ...
792
901
 
793
902
  @typing.overload
794
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
903
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
795
904
  ...
796
905
 
797
906
  @typing.overload
798
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
907
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
799
908
  ...
800
909
 
801
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
910
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
802
911
  """
803
- Specifies environment variables to be set prior to the execution of a step.
912
+ Specifies the Conda environment for the step.
913
+
914
+ Information in this decorator will augment any
915
+ attributes set in the `@conda_base` flow-level decorator. Hence,
916
+ you can use `@conda_base` to set packages required by all
917
+ steps and use `@conda` to specify step-specific overrides.
804
918
 
805
919
  Parameters
806
920
  ----------
807
- vars : Dict[str, str], default {}
808
- Dictionary of environment variables to set.
921
+ packages : Dict[str, str], default {}
922
+ Packages to use for this step. The key is the name of the package
923
+ and the value is the version to use.
924
+ libraries : Dict[str, str], default {}
925
+ Supported for backward compatibility. When used with packages, packages will take precedence.
926
+ python : str, optional, default None
927
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
928
+ that the version used will correspond to the version of the Python interpreter used to start the run.
929
+ disabled : bool, default False
930
+ If set to True, disables @conda.
809
931
  """
810
932
  ...
811
933
 
812
934
  @typing.overload
813
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
935
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
814
936
  """
815
- Specifies that the step will success under all circumstances.
816
-
817
- The decorator will create an optional artifact, specified by `var`, which
818
- contains the exception raised. You can use it to detect the presence
819
- of errors, indicating that all happy-path artifacts produced by the step
820
- are missing.
937
+ Specifies secrets to be retrieved and injected as environment variables prior to
938
+ the execution of a step.
821
939
 
822
940
  Parameters
823
941
  ----------
824
- var : str, optional, default None
825
- Name of the artifact in which to store the caught exception.
826
- If not specified, the exception is not stored.
827
- print_exception : bool, default True
828
- Determines whether or not the exception is printed to
829
- stdout when caught.
942
+ sources : List[Union[str, Dict[str, Any]]], default: []
943
+ List of secret specs, defining how the secrets are to be retrieved
830
944
  """
831
945
  ...
832
946
 
833
947
  @typing.overload
834
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
948
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
835
949
  ...
836
950
 
837
951
  @typing.overload
838
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
952
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
839
953
  ...
840
954
 
841
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
955
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
842
956
  """
843
- Specifies that the step will success under all circumstances.
844
-
845
- The decorator will create an optional artifact, specified by `var`, which
846
- contains the exception raised. You can use it to detect the presence
847
- of errors, indicating that all happy-path artifacts produced by the step
848
- are missing.
957
+ Specifies secrets to be retrieved and injected as environment variables prior to
958
+ the execution of a step.
849
959
 
850
960
  Parameters
851
961
  ----------
852
- var : str, optional, default None
853
- Name of the artifact in which to store the caught exception.
854
- If not specified, the exception is not stored.
855
- print_exception : bool, default True
856
- Determines whether or not the exception is printed to
857
- stdout when caught.
962
+ sources : List[Union[str, Dict[str, Any]]], default: []
963
+ List of secret specs, defining how the secrets are to be retrieved
858
964
  """
859
965
  ...
860
966
 
861
967
  @typing.overload
862
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
863
- """
864
- Specifies the PyPI packages for the step.
865
-
866
- Information in this decorator will augment any
867
- attributes set in the `@pyi_base` flow-level decorator. Hence,
868
- you can use `@pypi_base` to set packages required by all
869
- steps and use `@pypi` to specify step-specific overrides.
870
-
871
- Parameters
872
- ----------
873
- packages : Dict[str, str], default: {}
874
- Packages to use for this step. The key is the name of the package
875
- and the value is the version to use.
876
- python : str, optional, default: None
877
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
878
- that the version used will correspond to the version of the Python interpreter used to start the run.
879
- """
880
- ...
881
-
882
- @typing.overload
883
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
884
- ...
885
-
886
- @typing.overload
887
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
888
- ...
889
-
890
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
891
- """
892
- Specifies the PyPI packages for the step.
893
-
894
- Information in this decorator will augment any
895
- attributes set in the `@pyi_base` flow-level decorator. Hence,
896
- you can use `@pypi_base` to set packages required by all
897
- steps and use `@pypi` to specify step-specific overrides.
898
-
899
- Parameters
900
- ----------
901
- packages : Dict[str, str], default: {}
902
- Packages to use for this step. The key is the name of the package
903
- and the value is the version to use.
904
- python : str, optional, default: None
905
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
906
- that the version used will correspond to the version of the Python interpreter used to start the run.
907
- """
908
- ...
909
-
910
- @typing.overload
911
- def resources(*, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
968
+ def resources(*, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
912
969
  """
913
970
  Specifies the resources needed when executing this step.
914
971
 
@@ -984,141 +1041,6 @@ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None]
984
1041
  """
985
1042
  ...
986
1043
 
987
- @typing.overload
988
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
989
- """
990
- Decorator prototype for all step decorators. This function gets specialized
991
- and imported for all decorators types by _import_plugin_decorators().
992
- """
993
- ...
994
-
995
- @typing.overload
996
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
997
- ...
998
-
999
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1000
- """
1001
- Decorator prototype for all step decorators. This function gets specialized
1002
- and imported for all decorators types by _import_plugin_decorators().
1003
- """
1004
- ...
1005
-
1006
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = "KUBERNETES_IMAGE_PULL_POLICY", service_account: str = "METAFLOW_KUBERNETES_SERVICE_ACCOUNT", secrets: typing.Optional[typing.List[str]] = None, namespace: str = "METAFLOW_KUBERNETES_NAMESPACE", gpu: typing.Optional[int] = None, gpu_vendor: str = "KUBERNETES_GPU_VENDOR", tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = "/metaflow_temp", persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1007
- """
1008
- Specifies that this step should execute on Kubernetes.
1009
-
1010
- Parameters
1011
- ----------
1012
- cpu : int, default 1
1013
- Number of CPUs required for this step. If `@resources` is
1014
- also present, the maximum value from all decorators is used.
1015
- memory : int, default 4096
1016
- Memory size (in MB) required for this step. If
1017
- `@resources` is also present, the maximum value from all decorators is
1018
- used.
1019
- disk : int, default 10240
1020
- Disk size (in MB) required for this step. If
1021
- `@resources` is also present, the maximum value from all decorators is
1022
- used.
1023
- image : str, optional, default None
1024
- Docker image to use when launching on Kubernetes. If not specified, and
1025
- METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
1026
- not, a default Docker image mapping to the current version of Python is used.
1027
- image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
1028
- If given, the imagePullPolicy to be applied to the Docker image of the step.
1029
- service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
1030
- Kubernetes service account to use when launching pod in Kubernetes.
1031
- secrets : List[str], optional, default None
1032
- Kubernetes secrets to use when launching pod in Kubernetes. These
1033
- secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
1034
- in Metaflow configuration.
1035
- namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
1036
- Kubernetes namespace to use when launching pod in Kubernetes.
1037
- gpu : int, optional, default None
1038
- Number of GPUs required for this step. A value of zero implies that
1039
- the scheduled node should not have GPUs.
1040
- gpu_vendor : str, default KUBERNETES_GPU_VENDOR
1041
- The vendor of the GPUs to be used for this step.
1042
- tolerations : List[str], default []
1043
- The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
1044
- Kubernetes tolerations to use when launching pod in Kubernetes.
1045
- use_tmpfs : bool, default False
1046
- This enables an explicit tmpfs mount for this step.
1047
- tmpfs_tempdir : bool, default True
1048
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1049
- tmpfs_size : int, optional, default: None
1050
- The value for the size (in MiB) of the tmpfs mount for this step.
1051
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1052
- memory allocated for this step.
1053
- tmpfs_path : str, optional, default /metaflow_temp
1054
- Path to tmpfs mount for this step.
1055
- persistent_volume_claims : Dict[str, str], optional, default None
1056
- A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
1057
- volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
1058
- shared_memory: int, optional
1059
- Shared memory size (in MiB) required for this step
1060
- port: int, optional
1061
- Port number to specify in the Kubernetes job object
1062
- """
1063
- ...
1064
-
1065
- @typing.overload
1066
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1067
- """
1068
- Specifies the Conda environment for the step.
1069
-
1070
- Information in this decorator will augment any
1071
- attributes set in the `@conda_base` flow-level decorator. Hence,
1072
- you can use `@conda_base` to set packages required by all
1073
- steps and use `@conda` to specify step-specific overrides.
1074
-
1075
- Parameters
1076
- ----------
1077
- packages : Dict[str, str], default {}
1078
- Packages to use for this step. The key is the name of the package
1079
- and the value is the version to use.
1080
- libraries : Dict[str, str], default {}
1081
- Supported for backward compatibility. When used with packages, packages will take precedence.
1082
- python : str, optional, default None
1083
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1084
- that the version used will correspond to the version of the Python interpreter used to start the run.
1085
- disabled : bool, default False
1086
- If set to True, disables @conda.
1087
- """
1088
- ...
1089
-
1090
- @typing.overload
1091
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1092
- ...
1093
-
1094
- @typing.overload
1095
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1096
- ...
1097
-
1098
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1099
- """
1100
- Specifies the Conda environment for the step.
1101
-
1102
- Information in this decorator will augment any
1103
- attributes set in the `@conda_base` flow-level decorator. Hence,
1104
- you can use `@conda_base` to set packages required by all
1105
- steps and use `@conda` to specify step-specific overrides.
1106
-
1107
- Parameters
1108
- ----------
1109
- packages : Dict[str, str], default {}
1110
- Packages to use for this step. The key is the name of the package
1111
- and the value is the version to use.
1112
- libraries : Dict[str, str], default {}
1113
- Supported for backward compatibility. When used with packages, packages will take precedence.
1114
- python : str, optional, default None
1115
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1116
- that the version used will correspond to the version of the Python interpreter used to start the run.
1117
- disabled : bool, default False
1118
- If set to True, disables @conda.
1119
- """
1120
- ...
1121
-
1122
1044
  @typing.overload
1123
1045
  def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1124
1046
  """
@@ -1177,182 +1099,33 @@ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
1177
1099
  ...
1178
1100
 
1179
1101
  @typing.overload
1180
- def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1102
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1181
1103
  """
1182
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
1104
+ Specifies environment variables to be set prior to the execution of a step.
1183
1105
 
1184
1106
  Parameters
1185
1107
  ----------
1186
- cpu : int, default 1
1187
- Number of CPUs required for this step. If `@resources` is
1188
- also present, the maximum value from all decorators is used.
1189
- gpu : int, default 0
1190
- Number of GPUs required for this step. If `@resources` is
1191
- also present, the maximum value from all decorators is used.
1192
- memory : int, default 4096
1193
- Memory size (in MB) required for this step. If
1194
- `@resources` is also present, the maximum value from all decorators is
1195
- used.
1196
- image : str, optional, default None
1197
- Docker image to use when launching on AWS Batch. If not specified, and
1198
- METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
1199
- not, a default Docker image mapping to the current version of Python is used.
1200
- queue : str, default METAFLOW_BATCH_JOB_QUEUE
1201
- AWS Batch Job Queue to submit the job to.
1202
- iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
1203
- AWS IAM role that AWS Batch container uses to access AWS cloud resources.
1204
- execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
1205
- AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
1206
- (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
1207
- shared_memory : int, optional, default None
1208
- The value for the size (in MiB) of the /dev/shm volume for this step.
1209
- This parameter maps to the `--shm-size` option in Docker.
1210
- max_swap : int, optional, default None
1211
- The total amount of swap memory (in MiB) a container can use for this
1212
- step. This parameter is translated to the `--memory-swap` option in
1213
- Docker where the value is the sum of the container memory plus the
1214
- `max_swap` value.
1215
- swappiness : int, optional, default None
1216
- This allows you to tune memory swappiness behavior for this step.
1217
- A swappiness value of 0 causes swapping not to happen unless absolutely
1218
- necessary. A swappiness value of 100 causes pages to be swapped very
1219
- aggressively. Accepted values are whole numbers between 0 and 100.
1220
- use_tmpfs : bool, default False
1221
- This enables an explicit tmpfs mount for this step. Note that tmpfs is
1222
- not available on Fargate compute environments
1223
- tmpfs_tempdir : bool, default True
1224
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1225
- tmpfs_size : int, optional, default None
1226
- The value for the size (in MiB) of the tmpfs mount for this step.
1227
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1228
- memory allocated for this step.
1229
- tmpfs_path : str, optional, default None
1230
- Path to tmpfs mount for this step. Defaults to /metaflow_temp.
1231
- inferentia : int, default 0
1232
- Number of Inferentia chips required for this step.
1233
- trainium : int, default None
1234
- Alias for inferentia. Use only one of the two.
1235
- efa : int, default 0
1236
- Number of elastic fabric adapter network devices to attach to container
1237
- ephemeral_storage : int, default None
1238
- The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
1239
- This is only relevant for Fargate compute environments
1240
- log_driver: str, optional, default None
1241
- The log driver to use for the Amazon ECS container.
1242
- log_options: List[str], optional, default None
1243
- List of strings containing options for the chosen log driver. The configurable values
1244
- depend on the `log driver` chosen. Validation of these options is not supported yet.
1245
- Example: [`awslogs-group:aws/batch/job`]
1108
+ vars : Dict[str, str], default {}
1109
+ Dictionary of environment variables to set.
1246
1110
  """
1247
1111
  ...
1248
1112
 
1249
1113
  @typing.overload
1250
- def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1114
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1251
1115
  ...
1252
1116
 
1253
1117
  @typing.overload
1254
- def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1118
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1255
1119
  ...
1256
1120
 
1257
- def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
1121
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
1258
1122
  """
1259
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
1260
-
1261
- Parameters
1262
- ----------
1263
- cpu : int, default 1
1264
- Number of CPUs required for this step. If `@resources` is
1265
- also present, the maximum value from all decorators is used.
1266
- gpu : int, default 0
1267
- Number of GPUs required for this step. If `@resources` is
1268
- also present, the maximum value from all decorators is used.
1269
- memory : int, default 4096
1270
- Memory size (in MB) required for this step. If
1271
- `@resources` is also present, the maximum value from all decorators is
1272
- used.
1273
- image : str, optional, default None
1274
- Docker image to use when launching on AWS Batch. If not specified, and
1275
- METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
1276
- not, a default Docker image mapping to the current version of Python is used.
1277
- queue : str, default METAFLOW_BATCH_JOB_QUEUE
1278
- AWS Batch Job Queue to submit the job to.
1279
- iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
1280
- AWS IAM role that AWS Batch container uses to access AWS cloud resources.
1281
- execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
1282
- AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
1283
- (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
1284
- shared_memory : int, optional, default None
1285
- The value for the size (in MiB) of the /dev/shm volume for this step.
1286
- This parameter maps to the `--shm-size` option in Docker.
1287
- max_swap : int, optional, default None
1288
- The total amount of swap memory (in MiB) a container can use for this
1289
- step. This parameter is translated to the `--memory-swap` option in
1290
- Docker where the value is the sum of the container memory plus the
1291
- `max_swap` value.
1292
- swappiness : int, optional, default None
1293
- This allows you to tune memory swappiness behavior for this step.
1294
- A swappiness value of 0 causes swapping not to happen unless absolutely
1295
- necessary. A swappiness value of 100 causes pages to be swapped very
1296
- aggressively. Accepted values are whole numbers between 0 and 100.
1297
- use_tmpfs : bool, default False
1298
- This enables an explicit tmpfs mount for this step. Note that tmpfs is
1299
- not available on Fargate compute environments
1300
- tmpfs_tempdir : bool, default True
1301
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1302
- tmpfs_size : int, optional, default None
1303
- The value for the size (in MiB) of the tmpfs mount for this step.
1304
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1305
- memory allocated for this step.
1306
- tmpfs_path : str, optional, default None
1307
- Path to tmpfs mount for this step. Defaults to /metaflow_temp.
1308
- inferentia : int, default 0
1309
- Number of Inferentia chips required for this step.
1310
- trainium : int, default None
1311
- Alias for inferentia. Use only one of the two.
1312
- efa : int, default 0
1313
- Number of elastic fabric adapter network devices to attach to container
1314
- ephemeral_storage : int, default None
1315
- The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
1316
- This is only relevant for Fargate compute environments
1317
- log_driver: str, optional, default None
1318
- The log driver to use for the Amazon ECS container.
1319
- log_options: List[str], optional, default None
1320
- List of strings containing options for the chosen log driver. The configurable values
1321
- depend on the `log driver` chosen. Validation of these options is not supported yet.
1322
- Example: [`awslogs-group:aws/batch/job`]
1323
- """
1324
- ...
1325
-
1326
- @typing.overload
1327
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1328
- """
1329
- Specifies secrets to be retrieved and injected as environment variables prior to
1330
- the execution of a step.
1331
-
1332
- Parameters
1333
- ----------
1334
- sources : List[Union[str, Dict[str, Any]]], default: []
1335
- List of secret specs, defining how the secrets are to be retrieved
1336
- """
1337
- ...
1338
-
1339
- @typing.overload
1340
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1341
- ...
1342
-
1343
- @typing.overload
1344
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1345
- ...
1346
-
1347
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
1348
- """
1349
- Specifies secrets to be retrieved and injected as environment variables prior to
1350
- the execution of a step.
1123
+ Specifies environment variables to be set prior to the execution of a step.
1351
1124
 
1352
1125
  Parameters
1353
1126
  ----------
1354
- sources : List[Union[str, Dict[str, Any]]], default: []
1355
- List of secret specs, defining how the secrets are to be retrieved
1127
+ vars : Dict[str, str], default {}
1128
+ Dictionary of environment variables to set.
1356
1129
  """
1357
1130
  ...
1358
1131
 
@@ -1410,111 +1183,275 @@ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
1410
1183
  ...
1411
1184
 
1412
1185
  @typing.overload
1413
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1186
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1414
1187
  """
1415
- Specifies the times when the flow should be run when running on a
1416
- production scheduler.
1188
+ Decorator prototype for all step decorators. This function gets specialized
1189
+ and imported for all decorators types by _import_plugin_decorators().
1190
+ """
1191
+ ...
1192
+
1193
+ @typing.overload
1194
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1195
+ ...
1196
+
1197
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1198
+ """
1199
+ Decorator prototype for all step decorators. This function gets specialized
1200
+ and imported for all decorators types by _import_plugin_decorators().
1201
+ """
1202
+ ...
1203
+
1204
+ @typing.overload
1205
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1206
+ """
1207
+ Specifies that the step will success under all circumstances.
1208
+
1209
+ The decorator will create an optional artifact, specified by `var`, which
1210
+ contains the exception raised. You can use it to detect the presence
1211
+ of errors, indicating that all happy-path artifacts produced by the step
1212
+ are missing.
1417
1213
 
1418
1214
  Parameters
1419
1215
  ----------
1420
- hourly : bool, default False
1421
- Run the workflow hourly.
1422
- daily : bool, default True
1423
- Run the workflow daily.
1424
- weekly : bool, default False
1425
- Run the workflow weekly.
1426
- cron : str, optional, default None
1427
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1428
- specified by this expression.
1429
- timezone : str, optional, default None
1430
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1431
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1216
+ var : str, optional, default None
1217
+ Name of the artifact in which to store the caught exception.
1218
+ If not specified, the exception is not stored.
1219
+ print_exception : bool, default True
1220
+ Determines whether or not the exception is printed to
1221
+ stdout when caught.
1432
1222
  """
1433
1223
  ...
1434
1224
 
1435
1225
  @typing.overload
1436
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1226
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1437
1227
  ...
1438
1228
 
1439
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1229
+ @typing.overload
1230
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1231
+ ...
1232
+
1233
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
1440
1234
  """
1441
- Specifies the times when the flow should be run when running on a
1442
- production scheduler.
1235
+ Specifies that the step will success under all circumstances.
1236
+
1237
+ The decorator will create an optional artifact, specified by `var`, which
1238
+ contains the exception raised. You can use it to detect the presence
1239
+ of errors, indicating that all happy-path artifacts produced by the step
1240
+ are missing.
1443
1241
 
1444
1242
  Parameters
1445
1243
  ----------
1446
- hourly : bool, default False
1447
- Run the workflow hourly.
1448
- daily : bool, default True
1449
- Run the workflow daily.
1450
- weekly : bool, default False
1451
- Run the workflow weekly.
1452
- cron : str, optional, default None
1453
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1454
- specified by this expression.
1455
- timezone : str, optional, default None
1456
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1457
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1244
+ var : str, optional, default None
1245
+ Name of the artifact in which to store the caught exception.
1246
+ If not specified, the exception is not stored.
1247
+ print_exception : bool, default True
1248
+ Determines whether or not the exception is printed to
1249
+ stdout when caught.
1458
1250
  """
1459
1251
  ...
1460
1252
 
1461
- def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1253
+ @typing.overload
1254
+ def card(*, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1462
1255
  """
1463
- Specifies what flows belong to the same project.
1256
+ Creates a human-readable report, a Metaflow Card, after this step completes.
1464
1257
 
1465
- A project-specific namespace is created for all flows that
1466
- use the same `@project(name)`.
1258
+ Note that you may add multiple `@card` decorators in a step with different parameters.
1467
1259
 
1468
1260
  Parameters
1469
1261
  ----------
1470
- name : str
1471
- Project name. Make sure that the name is unique amongst all
1472
- projects that use the same production scheduler. The name may
1473
- contain only lowercase alphanumeric characters and underscores.
1262
+ type : str, default 'default'
1263
+ Card type.
1264
+ id : str, optional, default None
1265
+ If multiple cards are present, use this id to identify this card.
1266
+ options : Dict[str, Any], default {}
1267
+ Options passed to the card. The contents depend on the card type.
1268
+ timeout : int, default 45
1269
+ Interrupt reporting if it takes more than this many seconds.
1474
1270
 
1475
1271
 
1476
1272
  """
1477
1273
  ...
1478
1274
 
1479
1275
  @typing.overload
1480
- def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1276
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1277
+ ...
1278
+
1279
+ @typing.overload
1280
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1281
+ ...
1282
+
1283
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
1481
1284
  """
1482
- Specifies the event(s) that this flow depends on.
1285
+ Creates a human-readable report, a Metaflow Card, after this step completes.
1286
+
1287
+ Note that you may add multiple `@card` decorators in a step with different parameters.
1288
+
1289
+ Parameters
1290
+ ----------
1291
+ type : str, default 'default'
1292
+ Card type.
1293
+ id : str, optional, default None
1294
+ If multiple cards are present, use this id to identify this card.
1295
+ options : Dict[str, Any], default {}
1296
+ Options passed to the card. The contents depend on the card type.
1297
+ timeout : int, default 45
1298
+ Interrupt reporting if it takes more than this many seconds.
1299
+
1300
+
1301
+ """
1302
+ ...
1303
+
1304
+ @typing.overload
1305
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1306
+ """
1307
+ Specifies the PyPI packages for the step.
1308
+
1309
+ Information in this decorator will augment any
1310
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
1311
+ you can use `@pypi_base` to set packages required by all
1312
+ steps and use `@pypi` to specify step-specific overrides.
1313
+
1314
+ Parameters
1315
+ ----------
1316
+ packages : Dict[str, str], default: {}
1317
+ Packages to use for this step. The key is the name of the package
1318
+ and the value is the version to use.
1319
+ python : str, optional, default: None
1320
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1321
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1322
+ """
1323
+ ...
1324
+
1325
+ @typing.overload
1326
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1327
+ ...
1328
+
1329
+ @typing.overload
1330
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1331
+ ...
1332
+
1333
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1334
+ """
1335
+ Specifies the PyPI packages for the step.
1336
+
1337
+ Information in this decorator will augment any
1338
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
1339
+ you can use `@pypi_base` to set packages required by all
1340
+ steps and use `@pypi` to specify step-specific overrides.
1341
+
1342
+ Parameters
1343
+ ----------
1344
+ packages : Dict[str, str], default: {}
1345
+ Packages to use for this step. The key is the name of the package
1346
+ and the value is the version to use.
1347
+ python : str, optional, default: None
1348
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1349
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1350
+ """
1351
+ ...
1352
+
1353
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = "KUBERNETES_IMAGE_PULL_POLICY", service_account: str = "METAFLOW_KUBERNETES_SERVICE_ACCOUNT", secrets: typing.Optional[typing.List[str]] = None, namespace: str = "METAFLOW_KUBERNETES_NAMESPACE", gpu: typing.Optional[int] = None, gpu_vendor: str = "KUBERNETES_GPU_VENDOR", tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = "/metaflow_temp", persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1354
+ """
1355
+ Specifies that this step should execute on Kubernetes.
1356
+
1357
+ Parameters
1358
+ ----------
1359
+ cpu : int, default 1
1360
+ Number of CPUs required for this step. If `@resources` is
1361
+ also present, the maximum value from all decorators is used.
1362
+ memory : int, default 4096
1363
+ Memory size (in MB) required for this step. If
1364
+ `@resources` is also present, the maximum value from all decorators is
1365
+ used.
1366
+ disk : int, default 10240
1367
+ Disk size (in MB) required for this step. If
1368
+ `@resources` is also present, the maximum value from all decorators is
1369
+ used.
1370
+ image : str, optional, default None
1371
+ Docker image to use when launching on Kubernetes. If not specified, and
1372
+ METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
1373
+ not, a default Docker image mapping to the current version of Python is used.
1374
+ image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
1375
+ If given, the imagePullPolicy to be applied to the Docker image of the step.
1376
+ service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
1377
+ Kubernetes service account to use when launching pod in Kubernetes.
1378
+ secrets : List[str], optional, default None
1379
+ Kubernetes secrets to use when launching pod in Kubernetes. These
1380
+ secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
1381
+ in Metaflow configuration.
1382
+ namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
1383
+ Kubernetes namespace to use when launching pod in Kubernetes.
1384
+ gpu : int, optional, default None
1385
+ Number of GPUs required for this step. A value of zero implies that
1386
+ the scheduled node should not have GPUs.
1387
+ gpu_vendor : str, default KUBERNETES_GPU_VENDOR
1388
+ The vendor of the GPUs to be used for this step.
1389
+ tolerations : List[str], default []
1390
+ The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
1391
+ Kubernetes tolerations to use when launching pod in Kubernetes.
1392
+ use_tmpfs : bool, default False
1393
+ This enables an explicit tmpfs mount for this step.
1394
+ tmpfs_tempdir : bool, default True
1395
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1396
+ tmpfs_size : int, optional, default: None
1397
+ The value for the size (in MiB) of the tmpfs mount for this step.
1398
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1399
+ memory allocated for this step.
1400
+ tmpfs_path : str, optional, default /metaflow_temp
1401
+ Path to tmpfs mount for this step.
1402
+ persistent_volume_claims : Dict[str, str], optional, default None
1403
+ A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
1404
+ volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
1405
+ shared_memory: int, optional
1406
+ Shared memory size (in MiB) required for this step
1407
+ port: int, optional
1408
+ Port number to specify in the Kubernetes job object
1409
+ """
1410
+ ...
1411
+
1412
+ @typing.overload
1413
+ def trigger_on_finish(*, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1414
+ """
1415
+ Specifies the flow(s) that this flow depends on.
1483
1416
 
1484
1417
  ```
1485
- @trigger(event='foo')
1418
+ @trigger_on_finish(flow='FooFlow')
1486
1419
  ```
1487
1420
  or
1488
1421
  ```
1489
- @trigger(events=['foo', 'bar'])
1422
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1490
1423
  ```
1424
+ This decorator respects the @project decorator and triggers the flow
1425
+ when upstream runs within the same namespace complete successfully
1491
1426
 
1492
- Additionally, you can specify the parameter mappings
1493
- to map event payload to Metaflow parameters for the flow.
1427
+ Additionally, you can specify project aware upstream flow dependencies
1428
+ by specifying the fully qualified project_flow_name.
1494
1429
  ```
1495
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1430
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1496
1431
  ```
1497
1432
  or
1498
1433
  ```
1499
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1500
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1434
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1501
1435
  ```
1502
1436
 
1503
- 'parameters' can also be a list of strings and tuples like so:
1504
- ```
1505
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1506
- ```
1507
- This is equivalent to:
1437
+ You can also specify just the project or project branch (other values will be
1438
+ inferred from the current project or project branch):
1508
1439
  ```
1509
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1440
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1510
1441
  ```
1511
1442
 
1443
+ Note that `branch` is typically one of:
1444
+ - `prod`
1445
+ - `user.bob`
1446
+ - `test.my_experiment`
1447
+ - `prod.staging`
1448
+
1512
1449
  Parameters
1513
1450
  ----------
1514
- event : Union[str, Dict[str, Any]], optional, default None
1515
- Event dependency for this flow.
1516
- events : List[Union[str, Dict[str, Any]]], default []
1517
- Events dependency for this flow.
1451
+ flow : Union[str, Dict[str, str]], optional, default None
1452
+ Upstream flow dependency for this flow.
1453
+ flows : List[Union[str, Dict[str, str]]], default []
1454
+ Upstream flow dependencies for this flow.
1518
1455
  options : Dict[str, Any], default {}
1519
1456
  Backend-specific configuration for tuning eventing behavior.
1520
1457
 
@@ -1523,47 +1460,51 @@ def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = No
1523
1460
  ...
1524
1461
 
1525
1462
  @typing.overload
1526
- def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1463
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1527
1464
  ...
1528
1465
 
1529
- def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1466
+ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1530
1467
  """
1531
- Specifies the event(s) that this flow depends on.
1468
+ Specifies the flow(s) that this flow depends on.
1532
1469
 
1533
1470
  ```
1534
- @trigger(event='foo')
1471
+ @trigger_on_finish(flow='FooFlow')
1535
1472
  ```
1536
1473
  or
1537
1474
  ```
1538
- @trigger(events=['foo', 'bar'])
1475
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1539
1476
  ```
1477
+ This decorator respects the @project decorator and triggers the flow
1478
+ when upstream runs within the same namespace complete successfully
1540
1479
 
1541
- Additionally, you can specify the parameter mappings
1542
- to map event payload to Metaflow parameters for the flow.
1480
+ Additionally, you can specify project aware upstream flow dependencies
1481
+ by specifying the fully qualified project_flow_name.
1543
1482
  ```
1544
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1483
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1545
1484
  ```
1546
1485
  or
1547
1486
  ```
1548
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1549
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1487
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1550
1488
  ```
1551
1489
 
1552
- 'parameters' can also be a list of strings and tuples like so:
1553
- ```
1554
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1555
- ```
1556
- This is equivalent to:
1490
+ You can also specify just the project or project branch (other values will be
1491
+ inferred from the current project or project branch):
1557
1492
  ```
1558
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1493
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1559
1494
  ```
1560
1495
 
1496
+ Note that `branch` is typically one of:
1497
+ - `prod`
1498
+ - `user.bob`
1499
+ - `test.my_experiment`
1500
+ - `prod.staging`
1501
+
1561
1502
  Parameters
1562
1503
  ----------
1563
- event : Union[str, Dict[str, Any]], optional, default None
1564
- Event dependency for this flow.
1565
- events : List[Union[str, Dict[str, Any]]], default []
1566
- Events dependency for this flow.
1504
+ flow : Union[str, Dict[str, str]], optional, default None
1505
+ Upstream flow dependency for this flow.
1506
+ flows : List[Union[str, Dict[str, str]]], default []
1507
+ Upstream flow dependencies for this flow.
1567
1508
  options : Dict[str, Any], default {}
1568
1509
  Backend-specific configuration for tuning eventing behavior.
1569
1510
 
@@ -1571,42 +1512,45 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
1571
1512
  """
1572
1513
  ...
1573
1514
 
1574
- @typing.overload
1575
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1576
- """
1577
- Specifies the PyPI packages for all steps of the flow.
1578
-
1579
- Use `@pypi_base` to set common packages required by all
1580
- steps and use `@pypi` to specify step-specific overrides.
1581
- Parameters
1582
- ----------
1583
- packages : Dict[str, str], default: {}
1584
- Packages to use for this flow. The key is the name of the package
1585
- and the value is the version to use.
1586
- python : str, optional, default: None
1587
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1588
- that the version used will correspond to the version of the Python interpreter used to start the run.
1589
- """
1590
- ...
1591
-
1592
- @typing.overload
1593
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1594
- ...
1595
-
1596
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1515
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1597
1516
  """
1598
- Specifies the PyPI packages for all steps of the flow.
1517
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1518
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1519
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1520
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
1521
+ starts only after all sensors finish.
1599
1522
 
1600
- Use `@pypi_base` to set common packages required by all
1601
- steps and use `@pypi` to specify step-specific overrides.
1602
1523
  Parameters
1603
1524
  ----------
1604
- packages : Dict[str, str], default: {}
1605
- Packages to use for this flow. The key is the name of the package
1606
- and the value is the version to use.
1607
- python : str, optional, default: None
1608
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1609
- that the version used will correspond to the version of the Python interpreter used to start the run.
1525
+ timeout : int
1526
+ Time, in seconds before the task times out and fails. (Default: 3600)
1527
+ poke_interval : int
1528
+ Time in seconds that the job should wait in between each try. (Default: 60)
1529
+ mode : str
1530
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1531
+ exponential_backoff : bool
1532
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1533
+ pool : str
1534
+ the slot pool this task should run in,
1535
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1536
+ soft_fail : bool
1537
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1538
+ name : str
1539
+ Name of the sensor on Airflow
1540
+ description : str
1541
+ Description of sensor in the Airflow UI
1542
+ bucket_key : Union[str, List[str]]
1543
+ The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1544
+ When it's specified as a full s3:// url, please leave `bucket_name` as None
1545
+ bucket_name : str
1546
+ Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1547
+ When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1548
+ wildcard_match : bool
1549
+ whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1550
+ aws_conn_id : str
1551
+ a reference to the s3 connection on Airflow. (Default: None)
1552
+ verify : bool
1553
+ Whether or not to verify SSL certificates for S3 connection. (Default: None)
1610
1554
  """
1611
1555
  ...
1612
1556
 
@@ -1659,48 +1603,6 @@ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packa
1659
1603
  """
1660
1604
  ...
1661
1605
 
1662
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1663
- """
1664
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1665
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1666
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1667
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
1668
- starts only after all sensors finish.
1669
-
1670
- Parameters
1671
- ----------
1672
- timeout : int
1673
- Time, in seconds before the task times out and fails. (Default: 3600)
1674
- poke_interval : int
1675
- Time in seconds that the job should wait in between each try. (Default: 60)
1676
- mode : str
1677
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1678
- exponential_backoff : bool
1679
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1680
- pool : str
1681
- the slot pool this task should run in,
1682
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1683
- soft_fail : bool
1684
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1685
- name : str
1686
- Name of the sensor on Airflow
1687
- description : str
1688
- Description of sensor in the Airflow UI
1689
- bucket_key : Union[str, List[str]]
1690
- The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1691
- When it's specified as a full s3:// url, please leave `bucket_name` as None
1692
- bucket_name : str
1693
- Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1694
- When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1695
- wildcard_match : bool
1696
- whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1697
- aws_conn_id : str
1698
- a reference to the s3 connection on Airflow. (Default: None)
1699
- verify : bool
1700
- Whether or not to verify SSL certificates for S3 connection. (Default: None)
1701
- """
1702
- ...
1703
-
1704
1606
  def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1705
1607
  """
1706
1608
  The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
@@ -1744,48 +1646,132 @@ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str,
1744
1646
  ...
1745
1647
 
1746
1648
  @typing.overload
1747
- def trigger_on_finish(*, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1649
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1748
1650
  """
1749
- Specifies the flow(s) that this flow depends on.
1651
+ Specifies the PyPI packages for all steps of the flow.
1652
+
1653
+ Use `@pypi_base` to set common packages required by all
1654
+ steps and use `@pypi` to specify step-specific overrides.
1655
+ Parameters
1656
+ ----------
1657
+ packages : Dict[str, str], default: {}
1658
+ Packages to use for this flow. The key is the name of the package
1659
+ and the value is the version to use.
1660
+ python : str, optional, default: None
1661
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1662
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1663
+ """
1664
+ ...
1665
+
1666
+ @typing.overload
1667
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1668
+ ...
1669
+
1670
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1671
+ """
1672
+ Specifies the PyPI packages for all steps of the flow.
1673
+
1674
+ Use `@pypi_base` to set common packages required by all
1675
+ steps and use `@pypi` to specify step-specific overrides.
1676
+ Parameters
1677
+ ----------
1678
+ packages : Dict[str, str], default: {}
1679
+ Packages to use for this flow. The key is the name of the package
1680
+ and the value is the version to use.
1681
+ python : str, optional, default: None
1682
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1683
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1684
+ """
1685
+ ...
1686
+
1687
+ @typing.overload
1688
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1689
+ """
1690
+ Specifies the times when the flow should be run when running on a
1691
+ production scheduler.
1692
+
1693
+ Parameters
1694
+ ----------
1695
+ hourly : bool, default False
1696
+ Run the workflow hourly.
1697
+ daily : bool, default True
1698
+ Run the workflow daily.
1699
+ weekly : bool, default False
1700
+ Run the workflow weekly.
1701
+ cron : str, optional, default None
1702
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1703
+ specified by this expression.
1704
+ timezone : str, optional, default None
1705
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1706
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1707
+ """
1708
+ ...
1709
+
1710
+ @typing.overload
1711
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1712
+ ...
1713
+
1714
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1715
+ """
1716
+ Specifies the times when the flow should be run when running on a
1717
+ production scheduler.
1718
+
1719
+ Parameters
1720
+ ----------
1721
+ hourly : bool, default False
1722
+ Run the workflow hourly.
1723
+ daily : bool, default True
1724
+ Run the workflow daily.
1725
+ weekly : bool, default False
1726
+ Run the workflow weekly.
1727
+ cron : str, optional, default None
1728
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1729
+ specified by this expression.
1730
+ timezone : str, optional, default None
1731
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1732
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1733
+ """
1734
+ ...
1735
+
1736
+ @typing.overload
1737
+ def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1738
+ """
1739
+ Specifies the event(s) that this flow depends on.
1750
1740
 
1751
1741
  ```
1752
- @trigger_on_finish(flow='FooFlow')
1742
+ @trigger(event='foo')
1753
1743
  ```
1754
1744
  or
1755
1745
  ```
1756
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1746
+ @trigger(events=['foo', 'bar'])
1757
1747
  ```
1758
- This decorator respects the @project decorator and triggers the flow
1759
- when upstream runs within the same namespace complete successfully
1760
1748
 
1761
- Additionally, you can specify project aware upstream flow dependencies
1762
- by specifying the fully qualified project_flow_name.
1749
+ Additionally, you can specify the parameter mappings
1750
+ to map event payload to Metaflow parameters for the flow.
1763
1751
  ```
1764
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1752
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1765
1753
  ```
1766
1754
  or
1767
1755
  ```
1768
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1756
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1757
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1769
1758
  ```
1770
1759
 
1771
- You can also specify just the project or project branch (other values will be
1772
- inferred from the current project or project branch):
1760
+ 'parameters' can also be a list of strings and tuples like so:
1773
1761
  ```
1774
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1762
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1763
+ ```
1764
+ This is equivalent to:
1765
+ ```
1766
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1775
1767
  ```
1776
-
1777
- Note that `branch` is typically one of:
1778
- - `prod`
1779
- - `user.bob`
1780
- - `test.my_experiment`
1781
- - `prod.staging`
1782
1768
 
1783
1769
  Parameters
1784
1770
  ----------
1785
- flow : Union[str, Dict[str, str]], optional, default None
1786
- Upstream flow dependency for this flow.
1787
- flows : List[Union[str, Dict[str, str]]], default []
1788
- Upstream flow dependencies for this flow.
1771
+ event : Union[str, Dict[str, Any]], optional, default None
1772
+ Event dependency for this flow.
1773
+ events : List[Union[str, Dict[str, Any]]], default []
1774
+ Events dependency for this flow.
1789
1775
  options : Dict[str, Any], default {}
1790
1776
  Backend-specific configuration for tuning eventing behavior.
1791
1777
 
@@ -1794,51 +1780,47 @@ def trigger_on_finish(*, flow: typing.Union[str, typing.Dict[str, str], None] =
1794
1780
  ...
1795
1781
 
1796
1782
  @typing.overload
1797
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1783
+ def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1798
1784
  ...
1799
1785
 
1800
- def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1786
+ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1801
1787
  """
1802
- Specifies the flow(s) that this flow depends on.
1788
+ Specifies the event(s) that this flow depends on.
1803
1789
 
1804
1790
  ```
1805
- @trigger_on_finish(flow='FooFlow')
1791
+ @trigger(event='foo')
1806
1792
  ```
1807
1793
  or
1808
1794
  ```
1809
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1795
+ @trigger(events=['foo', 'bar'])
1810
1796
  ```
1811
- This decorator respects the @project decorator and triggers the flow
1812
- when upstream runs within the same namespace complete successfully
1813
1797
 
1814
- Additionally, you can specify project aware upstream flow dependencies
1815
- by specifying the fully qualified project_flow_name.
1798
+ Additionally, you can specify the parameter mappings
1799
+ to map event payload to Metaflow parameters for the flow.
1816
1800
  ```
1817
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1801
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1818
1802
  ```
1819
1803
  or
1820
1804
  ```
1821
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1805
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1806
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1822
1807
  ```
1823
1808
 
1824
- You can also specify just the project or project branch (other values will be
1825
- inferred from the current project or project branch):
1809
+ 'parameters' can also be a list of strings and tuples like so:
1826
1810
  ```
1827
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1811
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1812
+ ```
1813
+ This is equivalent to:
1814
+ ```
1815
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1828
1816
  ```
1829
-
1830
- Note that `branch` is typically one of:
1831
- - `prod`
1832
- - `user.bob`
1833
- - `test.my_experiment`
1834
- - `prod.staging`
1835
1817
 
1836
1818
  Parameters
1837
1819
  ----------
1838
- flow : Union[str, Dict[str, str]], optional, default None
1839
- Upstream flow dependency for this flow.
1840
- flows : List[Union[str, Dict[str, str]]], default []
1841
- Upstream flow dependencies for this flow.
1820
+ event : Union[str, Dict[str, Any]], optional, default None
1821
+ Event dependency for this flow.
1822
+ events : List[Union[str, Dict[str, Any]]], default []
1823
+ Events dependency for this flow.
1842
1824
  options : Dict[str, Any], default {}
1843
1825
  Backend-specific configuration for tuning eventing behavior.
1844
1826
 
@@ -1846,6 +1828,24 @@ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *
1846
1828
  """
1847
1829
  ...
1848
1830
 
1831
+ def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1832
+ """
1833
+ Specifies what flows belong to the same project.
1834
+
1835
+ A project-specific namespace is created for all flows that
1836
+ use the same `@project(name)`.
1837
+
1838
+ Parameters
1839
+ ----------
1840
+ name : str
1841
+ Project name. Make sure that the name is unique amongst all
1842
+ projects that use the same production scheduler. The name may
1843
+ contain only lowercase alphanumeric characters and underscores.
1844
+
1845
+
1846
+ """
1847
+ ...
1848
+
1849
1849
  def namespace(ns: typing.Optional[str]) -> typing.Optional[str]:
1850
1850
  """
1851
1851
  Switch namespace to the one provided.