metaflow-stubs 2.12.12__py2.py3-none-any.whl → 2.12.14__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (150) hide show
  1. metaflow-stubs/__init__.pyi +564 -561
  2. metaflow-stubs/cards.pyi +5 -5
  3. metaflow-stubs/cli.pyi +2 -2
  4. metaflow-stubs/client/__init__.pyi +4 -4
  5. metaflow-stubs/client/core.pyi +6 -6
  6. metaflow-stubs/client/filecache.pyi +2 -2
  7. metaflow-stubs/clone_util.pyi +2 -2
  8. metaflow-stubs/events.pyi +2 -2
  9. metaflow-stubs/exception.pyi +2 -2
  10. metaflow-stubs/flowspec.pyi +5 -5
  11. metaflow-stubs/generated_for.txt +1 -1
  12. metaflow-stubs/includefile.pyi +4 -4
  13. metaflow-stubs/metadata/metadata.pyi +2 -2
  14. metaflow-stubs/metadata/util.pyi +2 -2
  15. metaflow-stubs/metaflow_config.pyi +6 -4
  16. metaflow-stubs/metaflow_current.pyi +23 -23
  17. metaflow-stubs/mflog/mflog.pyi +2 -2
  18. metaflow-stubs/multicore_utils.pyi +2 -2
  19. metaflow-stubs/parameters.pyi +3 -3
  20. metaflow-stubs/plugins/__init__.pyi +3 -3
  21. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  22. metaflow-stubs/plugins/airflow/airflow.pyi +2 -2
  23. metaflow-stubs/plugins/airflow/airflow_cli.pyi +2 -2
  24. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
  25. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  26. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  27. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  28. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
  29. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +3 -3
  30. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +3 -3
  31. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  32. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  33. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  34. metaflow-stubs/plugins/argo/argo_workflows.pyi +4 -4
  35. metaflow-stubs/plugins/argo/argo_workflows_cli.pyi +7 -7
  36. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +7 -7
  37. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +20 -5
  38. metaflow-stubs/plugins/aws/__init__.pyi +2 -2
  39. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  40. metaflow-stubs/plugins/aws/aws_utils.pyi +5 -2
  41. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  42. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  43. metaflow-stubs/plugins/aws/batch/batch_cli.pyi +2 -2
  44. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  45. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
  46. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  47. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +3 -3
  48. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  49. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
  50. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  51. metaflow-stubs/plugins/aws/step_functions/production_token.pyi +2 -2
  52. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  53. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  54. metaflow-stubs/plugins/aws/step_functions/step_functions_cli.pyi +4 -4
  55. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  56. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
  57. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +20 -5
  58. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  59. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  60. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  61. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +3 -3
  62. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  63. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  64. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  65. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  66. metaflow-stubs/plugins/cards/card_cli.pyi +4 -4
  67. metaflow-stubs/plugins/cards/card_client.pyi +3 -3
  68. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  69. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  70. metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
  71. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  72. metaflow-stubs/plugins/cards/card_modules/basic.pyi +3 -3
  73. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  74. metaflow-stubs/plugins/cards/card_modules/chevron/__init__.pyi +2 -2
  75. metaflow-stubs/plugins/cards/card_modules/chevron/main.pyi +2 -2
  76. metaflow-stubs/plugins/cards/card_modules/chevron/metadata.pyi +2 -2
  77. metaflow-stubs/plugins/cards/card_modules/chevron/renderer.pyi +2 -2
  78. metaflow-stubs/plugins/cards/card_modules/chevron/tokenizer.pyi +2 -2
  79. metaflow-stubs/plugins/cards/card_modules/components.pyi +4 -4
  80. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  81. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  82. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  83. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  84. metaflow-stubs/plugins/cards/component_serializer.pyi +4 -4
  85. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  86. metaflow-stubs/plugins/catch_decorator.pyi +2 -2
  87. metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
  88. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  89. metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
  90. metaflow-stubs/plugins/datatools/s3/s3.pyi +5 -5
  91. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  92. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  93. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  94. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  95. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  96. metaflow-stubs/plugins/events_decorator.pyi +2 -2
  97. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  98. metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
  99. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  100. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +3 -3
  101. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  102. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  103. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  104. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  105. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  106. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +6 -2
  107. metaflow-stubs/plugins/kubernetes/kubernetes_cli.pyi +2 -2
  108. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  109. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
  110. metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +2 -2
  111. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
  112. metaflow-stubs/plugins/logs_cli.pyi +3 -3
  113. metaflow-stubs/plugins/package_cli.pyi +2 -2
  114. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  115. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  116. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  117. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  118. metaflow-stubs/plugins/pypi/conda_environment.pyi +3 -3
  119. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +6 -2
  120. metaflow-stubs/plugins/pypi/pypi_environment.pyi +3 -3
  121. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  122. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  123. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  124. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  125. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +2 -2
  126. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  127. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  128. metaflow-stubs/plugins/tag_cli.pyi +5 -5
  129. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +3 -3
  130. metaflow-stubs/plugins/timeout_decorator.pyi +2 -2
  131. metaflow-stubs/procpoll.pyi +2 -2
  132. metaflow-stubs/pylint_wrapper.pyi +2 -2
  133. metaflow-stubs/runner/__init__.pyi +2 -2
  134. metaflow-stubs/runner/deployer.pyi +22 -7
  135. metaflow-stubs/runner/metaflow_runner.pyi +5 -5
  136. metaflow-stubs/runner/nbdeploy.pyi +4 -4
  137. metaflow-stubs/runner/nbrun.pyi +4 -4
  138. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  139. metaflow-stubs/runner/utils.pyi +2 -2
  140. metaflow-stubs/system/__init__.pyi +2 -2
  141. metaflow-stubs/system/system_logger.pyi +2 -2
  142. metaflow-stubs/system/system_monitor.pyi +2 -2
  143. metaflow-stubs/tagging_util.pyi +2 -2
  144. metaflow-stubs/tuple_util.pyi +2 -2
  145. metaflow-stubs/version.pyi +2 -2
  146. {metaflow_stubs-2.12.12.dist-info → metaflow_stubs-2.12.14.dist-info}/METADATA +2 -2
  147. metaflow_stubs-2.12.14.dist-info/RECORD +150 -0
  148. {metaflow_stubs-2.12.12.dist-info → metaflow_stubs-2.12.14.dist-info}/WHEEL +1 -1
  149. metaflow_stubs-2.12.12.dist-info/RECORD +0 -150
  150. {metaflow_stubs-2.12.12.dist-info → metaflow_stubs-2.12.14.dist-info}/top_level.txt +0 -0
@@ -1,25 +1,25 @@
1
1
  ##################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.12.12 #
4
- # Generated on 2024-08-13T23:49:26.963512 #
3
+ # MF version: 2.12.14 #
4
+ # Generated on 2024-08-22T15:17:51.800458 #
5
5
  ##################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import metaflow.runner.metaflow_runner
12
- import metaflow.plugins.datatools.s3.s3
13
- import io
14
- import metaflow.client.core
11
+ import metaflow.metaflow_current
15
12
  import metaflow.events
13
+ import metaflow.plugins.datatools.s3.s3
16
14
  import metaflow.parameters
17
- import typing
18
- import metaflow.metaflow_current
19
- import datetime
20
- import metaflow.datastore.inputs
15
+ import io
21
16
  import metaflow._vendor.click.types
17
+ import datetime
22
18
  import metaflow.flowspec
19
+ import metaflow.runner.metaflow_runner
20
+ import metaflow.datastore.inputs
21
+ import metaflow.client.core
22
+ import typing
23
23
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
24
24
  StepFlag = typing.NewType("StepFlag", bool)
25
25
 
@@ -728,149 +728,79 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
728
728
  ...
729
729
 
730
730
  @typing.overload
731
- def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
731
+ def resources(*, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
732
732
  """
733
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
733
+ Specifies the resources needed when executing this step.
734
+
735
+ Use `@resources` to specify the resource requirements
736
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
737
+
738
+ You can choose the compute layer on the command line by executing e.g.
739
+ ```
740
+ python myflow.py run --with batch
741
+ ```
742
+ or
743
+ ```
744
+ python myflow.py run --with kubernetes
745
+ ```
746
+ which executes the flow on the desired system using the
747
+ requirements specified in `@resources`.
734
748
 
735
749
  Parameters
736
750
  ----------
737
751
  cpu : int, default 1
738
- Number of CPUs required for this step. If `@resources` is
739
- also present, the maximum value from all decorators is used.
752
+ Number of CPUs required for this step.
740
753
  gpu : int, default 0
741
- Number of GPUs required for this step. If `@resources` is
742
- also present, the maximum value from all decorators is used.
754
+ Number of GPUs required for this step.
755
+ disk : int, optional, default None
756
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
743
757
  memory : int, default 4096
744
- Memory size (in MB) required for this step. If
745
- `@resources` is also present, the maximum value from all decorators is
746
- used.
747
- image : str, optional, default None
748
- Docker image to use when launching on AWS Batch. If not specified, and
749
- METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
750
- not, a default Docker image mapping to the current version of Python is used.
751
- queue : str, default METAFLOW_BATCH_JOB_QUEUE
752
- AWS Batch Job Queue to submit the job to.
753
- iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
754
- AWS IAM role that AWS Batch container uses to access AWS cloud resources.
755
- execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
756
- AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
757
- (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
758
+ Memory size (in MB) required for this step.
758
759
  shared_memory : int, optional, default None
759
760
  The value for the size (in MiB) of the /dev/shm volume for this step.
760
761
  This parameter maps to the `--shm-size` option in Docker.
761
- max_swap : int, optional, default None
762
- The total amount of swap memory (in MiB) a container can use for this
763
- step. This parameter is translated to the `--memory-swap` option in
764
- Docker where the value is the sum of the container memory plus the
765
- `max_swap` value.
766
- swappiness : int, optional, default None
767
- This allows you to tune memory swappiness behavior for this step.
768
- A swappiness value of 0 causes swapping not to happen unless absolutely
769
- necessary. A swappiness value of 100 causes pages to be swapped very
770
- aggressively. Accepted values are whole numbers between 0 and 100.
771
- use_tmpfs : bool, default False
772
- This enables an explicit tmpfs mount for this step. Note that tmpfs is
773
- not available on Fargate compute environments
774
- tmpfs_tempdir : bool, default True
775
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
776
- tmpfs_size : int, optional, default None
777
- The value for the size (in MiB) of the tmpfs mount for this step.
778
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
779
- memory allocated for this step.
780
- tmpfs_path : str, optional, default None
781
- Path to tmpfs mount for this step. Defaults to /metaflow_temp.
782
- inferentia : int, default 0
783
- Number of Inferentia chips required for this step.
784
- trainium : int, default None
785
- Alias for inferentia. Use only one of the two.
786
- efa : int, default 0
787
- Number of elastic fabric adapter network devices to attach to container
788
- ephemeral_storage : int, default None
789
- The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
790
- This is only relevant for Fargate compute environments
791
- log_driver: str, optional, default None
792
- The log driver to use for the Amazon ECS container.
793
- log_options: List[str], optional, default None
794
- List of strings containing options for the chosen log driver. The configurable values
795
- depend on the `log driver` chosen. Validation of these options is not supported yet.
796
- Example: [`awslogs-group:aws/batch/job`]
797
762
  """
798
763
  ...
799
764
 
800
765
  @typing.overload
801
- def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
766
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
802
767
  ...
803
768
 
804
769
  @typing.overload
805
- def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
770
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
806
771
  ...
807
772
 
808
- def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
773
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
809
774
  """
810
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
775
+ Specifies the resources needed when executing this step.
776
+
777
+ Use `@resources` to specify the resource requirements
778
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
779
+
780
+ You can choose the compute layer on the command line by executing e.g.
781
+ ```
782
+ python myflow.py run --with batch
783
+ ```
784
+ or
785
+ ```
786
+ python myflow.py run --with kubernetes
787
+ ```
788
+ which executes the flow on the desired system using the
789
+ requirements specified in `@resources`.
811
790
 
812
791
  Parameters
813
792
  ----------
814
793
  cpu : int, default 1
815
- Number of CPUs required for this step. If `@resources` is
816
- also present, the maximum value from all decorators is used.
794
+ Number of CPUs required for this step.
817
795
  gpu : int, default 0
818
- Number of GPUs required for this step. If `@resources` is
819
- also present, the maximum value from all decorators is used.
796
+ Number of GPUs required for this step.
797
+ disk : int, optional, default None
798
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
820
799
  memory : int, default 4096
821
- Memory size (in MB) required for this step. If
822
- `@resources` is also present, the maximum value from all decorators is
823
- used.
824
- image : str, optional, default None
825
- Docker image to use when launching on AWS Batch. If not specified, and
826
- METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
827
- not, a default Docker image mapping to the current version of Python is used.
828
- queue : str, default METAFLOW_BATCH_JOB_QUEUE
829
- AWS Batch Job Queue to submit the job to.
830
- iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
831
- AWS IAM role that AWS Batch container uses to access AWS cloud resources.
832
- execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
833
- AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
834
- (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
800
+ Memory size (in MB) required for this step.
835
801
  shared_memory : int, optional, default None
836
802
  The value for the size (in MiB) of the /dev/shm volume for this step.
837
803
  This parameter maps to the `--shm-size` option in Docker.
838
- max_swap : int, optional, default None
839
- The total amount of swap memory (in MiB) a container can use for this
840
- step. This parameter is translated to the `--memory-swap` option in
841
- Docker where the value is the sum of the container memory plus the
842
- `max_swap` value.
843
- swappiness : int, optional, default None
844
- This allows you to tune memory swappiness behavior for this step.
845
- A swappiness value of 0 causes swapping not to happen unless absolutely
846
- necessary. A swappiness value of 100 causes pages to be swapped very
847
- aggressively. Accepted values are whole numbers between 0 and 100.
848
- use_tmpfs : bool, default False
849
- This enables an explicit tmpfs mount for this step. Note that tmpfs is
850
- not available on Fargate compute environments
851
- tmpfs_tempdir : bool, default True
852
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
853
- tmpfs_size : int, optional, default None
854
- The value for the size (in MiB) of the tmpfs mount for this step.
855
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
856
- memory allocated for this step.
857
- tmpfs_path : str, optional, default None
858
- Path to tmpfs mount for this step. Defaults to /metaflow_temp.
859
- inferentia : int, default 0
860
- Number of Inferentia chips required for this step.
861
- trainium : int, default None
862
- Alias for inferentia. Use only one of the two.
863
- efa : int, default 0
864
- Number of elastic fabric adapter network devices to attach to container
865
- ephemeral_storage : int, default None
866
- The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
867
- This is only relevant for Fargate compute environments
868
- log_driver: str, optional, default None
869
- The log driver to use for the Amazon ECS container.
870
- log_options: List[str], optional, default None
871
- List of strings containing options for the chosen log driver. The configurable values
872
- depend on the `log driver` chosen. Validation of these options is not supported yet.
873
- Example: [`awslogs-group:aws/batch/job`]
874
804
  """
875
805
  ...
876
806
 
@@ -932,117 +862,90 @@ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
932
862
  ...
933
863
 
934
864
  @typing.overload
935
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
865
+ def card(*, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
936
866
  """
937
- Specifies secrets to be retrieved and injected as environment variables prior to
938
- the execution of a step.
867
+ Creates a human-readable report, a Metaflow Card, after this step completes.
868
+
869
+ Note that you may add multiple `@card` decorators in a step with different parameters.
939
870
 
940
871
  Parameters
941
872
  ----------
942
- sources : List[Union[str, Dict[str, Any]]], default: []
943
- List of secret specs, defining how the secrets are to be retrieved
873
+ type : str, default 'default'
874
+ Card type.
875
+ id : str, optional, default None
876
+ If multiple cards are present, use this id to identify this card.
877
+ options : Dict[str, Any], default {}
878
+ Options passed to the card. The contents depend on the card type.
879
+ timeout : int, default 45
880
+ Interrupt reporting if it takes more than this many seconds.
881
+
882
+
944
883
  """
945
884
  ...
946
885
 
947
886
  @typing.overload
948
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
887
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
949
888
  ...
950
889
 
951
890
  @typing.overload
952
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
891
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
953
892
  ...
954
893
 
955
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
894
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
956
895
  """
957
- Specifies secrets to be retrieved and injected as environment variables prior to
958
- the execution of a step.
896
+ Creates a human-readable report, a Metaflow Card, after this step completes.
897
+
898
+ Note that you may add multiple `@card` decorators in a step with different parameters.
959
899
 
960
900
  Parameters
961
901
  ----------
962
- sources : List[Union[str, Dict[str, Any]]], default: []
963
- List of secret specs, defining how the secrets are to be retrieved
902
+ type : str, default 'default'
903
+ Card type.
904
+ id : str, optional, default None
905
+ If multiple cards are present, use this id to identify this card.
906
+ options : Dict[str, Any], default {}
907
+ Options passed to the card. The contents depend on the card type.
908
+ timeout : int, default 45
909
+ Interrupt reporting if it takes more than this many seconds.
910
+
911
+
964
912
  """
965
913
  ...
966
914
 
967
915
  @typing.overload
968
- def resources(*, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
916
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
969
917
  """
970
- Specifies the resources needed when executing this step.
918
+ Specifies a timeout for your step.
971
919
 
972
- Use `@resources` to specify the resource requirements
973
- independently of the specific compute layer (`@batch`, `@kubernetes`).
920
+ This decorator is useful if this step may hang indefinitely.
974
921
 
975
- You can choose the compute layer on the command line by executing e.g.
976
- ```
977
- python myflow.py run --with batch
978
- ```
979
- or
980
- ```
981
- python myflow.py run --with kubernetes
982
- ```
983
- which executes the flow on the desired system using the
984
- requirements specified in `@resources`.
922
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
923
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
924
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
925
+
926
+ Note that all the values specified in parameters are added together so if you specify
927
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
985
928
 
986
929
  Parameters
987
930
  ----------
988
- cpu : int, default 1
989
- Number of CPUs required for this step.
990
- gpu : int, default 0
991
- Number of GPUs required for this step.
992
- disk : int, optional, default None
993
- Disk size (in MB) required for this step. Only applies on Kubernetes.
994
- memory : int, default 4096
995
- Memory size (in MB) required for this step.
996
- shared_memory : int, optional, default None
997
- The value for the size (in MiB) of the /dev/shm volume for this step.
998
- This parameter maps to the `--shm-size` option in Docker.
931
+ seconds : int, default 0
932
+ Number of seconds to wait prior to timing out.
933
+ minutes : int, default 0
934
+ Number of minutes to wait prior to timing out.
935
+ hours : int, default 0
936
+ Number of hours to wait prior to timing out.
999
937
  """
1000
938
  ...
1001
939
 
1002
940
  @typing.overload
1003
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
941
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1004
942
  ...
1005
943
 
1006
944
  @typing.overload
1007
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1008
- ...
1009
-
1010
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
1011
- """
1012
- Specifies the resources needed when executing this step.
1013
-
1014
- Use `@resources` to specify the resource requirements
1015
- independently of the specific compute layer (`@batch`, `@kubernetes`).
1016
-
1017
- You can choose the compute layer on the command line by executing e.g.
1018
- ```
1019
- python myflow.py run --with batch
1020
- ```
1021
- or
1022
- ```
1023
- python myflow.py run --with kubernetes
1024
- ```
1025
- which executes the flow on the desired system using the
1026
- requirements specified in `@resources`.
1027
-
1028
- Parameters
1029
- ----------
1030
- cpu : int, default 1
1031
- Number of CPUs required for this step.
1032
- gpu : int, default 0
1033
- Number of GPUs required for this step.
1034
- disk : int, optional, default None
1035
- Disk size (in MB) required for this step. Only applies on Kubernetes.
1036
- memory : int, default 4096
1037
- Memory size (in MB) required for this step.
1038
- shared_memory : int, optional, default None
1039
- The value for the size (in MiB) of the /dev/shm volume for this step.
1040
- This parameter maps to the `--shm-size` option in Docker.
1041
- """
945
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1042
946
  ...
1043
947
 
1044
- @typing.overload
1045
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
948
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
1046
949
  """
1047
950
  Specifies a timeout for your step.
1048
951
 
@@ -1067,65 +970,182 @@ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Cal
1067
970
  ...
1068
971
 
1069
972
  @typing.overload
1070
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
973
+ def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
974
+ """
975
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
976
+
977
+ Parameters
978
+ ----------
979
+ cpu : int, default 1
980
+ Number of CPUs required for this step. If `@resources` is
981
+ also present, the maximum value from all decorators is used.
982
+ gpu : int, default 0
983
+ Number of GPUs required for this step. If `@resources` is
984
+ also present, the maximum value from all decorators is used.
985
+ memory : int, default 4096
986
+ Memory size (in MB) required for this step. If
987
+ `@resources` is also present, the maximum value from all decorators is
988
+ used.
989
+ image : str, optional, default None
990
+ Docker image to use when launching on AWS Batch. If not specified, and
991
+ METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
992
+ not, a default Docker image mapping to the current version of Python is used.
993
+ queue : str, default METAFLOW_BATCH_JOB_QUEUE
994
+ AWS Batch Job Queue to submit the job to.
995
+ iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
996
+ AWS IAM role that AWS Batch container uses to access AWS cloud resources.
997
+ execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
998
+ AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
999
+ (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
1000
+ shared_memory : int, optional, default None
1001
+ The value for the size (in MiB) of the /dev/shm volume for this step.
1002
+ This parameter maps to the `--shm-size` option in Docker.
1003
+ max_swap : int, optional, default None
1004
+ The total amount of swap memory (in MiB) a container can use for this
1005
+ step. This parameter is translated to the `--memory-swap` option in
1006
+ Docker where the value is the sum of the container memory plus the
1007
+ `max_swap` value.
1008
+ swappiness : int, optional, default None
1009
+ This allows you to tune memory swappiness behavior for this step.
1010
+ A swappiness value of 0 causes swapping not to happen unless absolutely
1011
+ necessary. A swappiness value of 100 causes pages to be swapped very
1012
+ aggressively. Accepted values are whole numbers between 0 and 100.
1013
+ use_tmpfs : bool, default False
1014
+ This enables an explicit tmpfs mount for this step. Note that tmpfs is
1015
+ not available on Fargate compute environments
1016
+ tmpfs_tempdir : bool, default True
1017
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1018
+ tmpfs_size : int, optional, default None
1019
+ The value for the size (in MiB) of the tmpfs mount for this step.
1020
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1021
+ memory allocated for this step.
1022
+ tmpfs_path : str, optional, default None
1023
+ Path to tmpfs mount for this step. Defaults to /metaflow_temp.
1024
+ inferentia : int, default 0
1025
+ Number of Inferentia chips required for this step.
1026
+ trainium : int, default None
1027
+ Alias for inferentia. Use only one of the two.
1028
+ efa : int, default 0
1029
+ Number of elastic fabric adapter network devices to attach to container
1030
+ ephemeral_storage : int, default None
1031
+ The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
1032
+ This is only relevant for Fargate compute environments
1033
+ log_driver: str, optional, default None
1034
+ The log driver to use for the Amazon ECS container.
1035
+ log_options: List[str], optional, default None
1036
+ List of strings containing options for the chosen log driver. The configurable values
1037
+ depend on the `log driver` chosen. Validation of these options is not supported yet.
1038
+ Example: [`awslogs-group:aws/batch/job`]
1039
+ """
1071
1040
  ...
1072
1041
 
1073
1042
  @typing.overload
1074
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1043
+ def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1075
1044
  ...
1076
1045
 
1077
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
1046
+ @typing.overload
1047
+ def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1048
+ ...
1049
+
1050
+ def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
1078
1051
  """
1079
- Specifies a timeout for your step.
1080
-
1081
- This decorator is useful if this step may hang indefinitely.
1082
-
1083
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1084
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
1085
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
1086
-
1087
- Note that all the values specified in parameters are added together so if you specify
1088
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1052
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
1089
1053
 
1090
1054
  Parameters
1091
1055
  ----------
1092
- seconds : int, default 0
1093
- Number of seconds to wait prior to timing out.
1094
- minutes : int, default 0
1095
- Number of minutes to wait prior to timing out.
1096
- hours : int, default 0
1097
- Number of hours to wait prior to timing out.
1056
+ cpu : int, default 1
1057
+ Number of CPUs required for this step. If `@resources` is
1058
+ also present, the maximum value from all decorators is used.
1059
+ gpu : int, default 0
1060
+ Number of GPUs required for this step. If `@resources` is
1061
+ also present, the maximum value from all decorators is used.
1062
+ memory : int, default 4096
1063
+ Memory size (in MB) required for this step. If
1064
+ `@resources` is also present, the maximum value from all decorators is
1065
+ used.
1066
+ image : str, optional, default None
1067
+ Docker image to use when launching on AWS Batch. If not specified, and
1068
+ METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
1069
+ not, a default Docker image mapping to the current version of Python is used.
1070
+ queue : str, default METAFLOW_BATCH_JOB_QUEUE
1071
+ AWS Batch Job Queue to submit the job to.
1072
+ iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
1073
+ AWS IAM role that AWS Batch container uses to access AWS cloud resources.
1074
+ execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
1075
+ AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
1076
+ (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
1077
+ shared_memory : int, optional, default None
1078
+ The value for the size (in MiB) of the /dev/shm volume for this step.
1079
+ This parameter maps to the `--shm-size` option in Docker.
1080
+ max_swap : int, optional, default None
1081
+ The total amount of swap memory (in MiB) a container can use for this
1082
+ step. This parameter is translated to the `--memory-swap` option in
1083
+ Docker where the value is the sum of the container memory plus the
1084
+ `max_swap` value.
1085
+ swappiness : int, optional, default None
1086
+ This allows you to tune memory swappiness behavior for this step.
1087
+ A swappiness value of 0 causes swapping not to happen unless absolutely
1088
+ necessary. A swappiness value of 100 causes pages to be swapped very
1089
+ aggressively. Accepted values are whole numbers between 0 and 100.
1090
+ use_tmpfs : bool, default False
1091
+ This enables an explicit tmpfs mount for this step. Note that tmpfs is
1092
+ not available on Fargate compute environments
1093
+ tmpfs_tempdir : bool, default True
1094
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1095
+ tmpfs_size : int, optional, default None
1096
+ The value for the size (in MiB) of the tmpfs mount for this step.
1097
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1098
+ memory allocated for this step.
1099
+ tmpfs_path : str, optional, default None
1100
+ Path to tmpfs mount for this step. Defaults to /metaflow_temp.
1101
+ inferentia : int, default 0
1102
+ Number of Inferentia chips required for this step.
1103
+ trainium : int, default None
1104
+ Alias for inferentia. Use only one of the two.
1105
+ efa : int, default 0
1106
+ Number of elastic fabric adapter network devices to attach to container
1107
+ ephemeral_storage : int, default None
1108
+ The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
1109
+ This is only relevant for Fargate compute environments
1110
+ log_driver: str, optional, default None
1111
+ The log driver to use for the Amazon ECS container.
1112
+ log_options: List[str], optional, default None
1113
+ List of strings containing options for the chosen log driver. The configurable values
1114
+ depend on the `log driver` chosen. Validation of these options is not supported yet.
1115
+ Example: [`awslogs-group:aws/batch/job`]
1098
1116
  """
1099
1117
  ...
1100
1118
 
1101
1119
  @typing.overload
1102
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1120
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1103
1121
  """
1104
- Specifies environment variables to be set prior to the execution of a step.
1122
+ Specifies secrets to be retrieved and injected as environment variables prior to
1123
+ the execution of a step.
1105
1124
 
1106
1125
  Parameters
1107
1126
  ----------
1108
- vars : Dict[str, str], default {}
1109
- Dictionary of environment variables to set.
1127
+ sources : List[Union[str, Dict[str, Any]]], default: []
1128
+ List of secret specs, defining how the secrets are to be retrieved
1110
1129
  """
1111
1130
  ...
1112
1131
 
1113
1132
  @typing.overload
1114
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1133
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1115
1134
  ...
1116
1135
 
1117
1136
  @typing.overload
1118
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1137
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1119
1138
  ...
1120
1139
 
1121
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
1140
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
1122
1141
  """
1123
- Specifies environment variables to be set prior to the execution of a step.
1142
+ Specifies secrets to be retrieved and injected as environment variables prior to
1143
+ the execution of a step.
1124
1144
 
1125
1145
  Parameters
1126
1146
  ----------
1127
- vars : Dict[str, str], default {}
1128
- Dictionary of environment variables to set.
1147
+ sources : List[Union[str, Dict[str, Any]]], default: []
1148
+ List of secret specs, defining how the secrets are to be retrieved
1129
1149
  """
1130
1150
  ...
1131
1151
 
@@ -1183,121 +1203,33 @@ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
1183
1203
  ...
1184
1204
 
1185
1205
  @typing.overload
1186
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1206
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1187
1207
  """
1188
- Decorator prototype for all step decorators. This function gets specialized
1189
- and imported for all decorators types by _import_plugin_decorators().
1208
+ Specifies environment variables to be set prior to the execution of a step.
1209
+
1210
+ Parameters
1211
+ ----------
1212
+ vars : Dict[str, str], default {}
1213
+ Dictionary of environment variables to set.
1190
1214
  """
1191
1215
  ...
1192
1216
 
1193
1217
  @typing.overload
1194
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1218
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1195
1219
  ...
1196
1220
 
1197
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1198
- """
1199
- Decorator prototype for all step decorators. This function gets specialized
1200
- and imported for all decorators types by _import_plugin_decorators().
1201
- """
1221
+ @typing.overload
1222
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1202
1223
  ...
1203
1224
 
1204
- @typing.overload
1205
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1225
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
1206
1226
  """
1207
- Specifies that the step will success under all circumstances.
1208
-
1209
- The decorator will create an optional artifact, specified by `var`, which
1210
- contains the exception raised. You can use it to detect the presence
1211
- of errors, indicating that all happy-path artifacts produced by the step
1212
- are missing.
1227
+ Specifies environment variables to be set prior to the execution of a step.
1213
1228
 
1214
1229
  Parameters
1215
1230
  ----------
1216
- var : str, optional, default None
1217
- Name of the artifact in which to store the caught exception.
1218
- If not specified, the exception is not stored.
1219
- print_exception : bool, default True
1220
- Determines whether or not the exception is printed to
1221
- stdout when caught.
1222
- """
1223
- ...
1224
-
1225
- @typing.overload
1226
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1227
- ...
1228
-
1229
- @typing.overload
1230
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1231
- ...
1232
-
1233
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
1234
- """
1235
- Specifies that the step will success under all circumstances.
1236
-
1237
- The decorator will create an optional artifact, specified by `var`, which
1238
- contains the exception raised. You can use it to detect the presence
1239
- of errors, indicating that all happy-path artifacts produced by the step
1240
- are missing.
1241
-
1242
- Parameters
1243
- ----------
1244
- var : str, optional, default None
1245
- Name of the artifact in which to store the caught exception.
1246
- If not specified, the exception is not stored.
1247
- print_exception : bool, default True
1248
- Determines whether or not the exception is printed to
1249
- stdout when caught.
1250
- """
1251
- ...
1252
-
1253
- @typing.overload
1254
- def card(*, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1255
- """
1256
- Creates a human-readable report, a Metaflow Card, after this step completes.
1257
-
1258
- Note that you may add multiple `@card` decorators in a step with different parameters.
1259
-
1260
- Parameters
1261
- ----------
1262
- type : str, default 'default'
1263
- Card type.
1264
- id : str, optional, default None
1265
- If multiple cards are present, use this id to identify this card.
1266
- options : Dict[str, Any], default {}
1267
- Options passed to the card. The contents depend on the card type.
1268
- timeout : int, default 45
1269
- Interrupt reporting if it takes more than this many seconds.
1270
-
1271
-
1272
- """
1273
- ...
1274
-
1275
- @typing.overload
1276
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1277
- ...
1278
-
1279
- @typing.overload
1280
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1281
- ...
1282
-
1283
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
1284
- """
1285
- Creates a human-readable report, a Metaflow Card, after this step completes.
1286
-
1287
- Note that you may add multiple `@card` decorators in a step with different parameters.
1288
-
1289
- Parameters
1290
- ----------
1291
- type : str, default 'default'
1292
- Card type.
1293
- id : str, optional, default None
1294
- If multiple cards are present, use this id to identify this card.
1295
- options : Dict[str, Any], default {}
1296
- Options passed to the card. The contents depend on the card type.
1297
- timeout : int, default 45
1298
- Interrupt reporting if it takes more than this many seconds.
1299
-
1300
-
1231
+ vars : Dict[str, str], default {}
1232
+ Dictionary of environment variables to set.
1301
1233
  """
1302
1234
  ...
1303
1235
 
@@ -1350,7 +1282,56 @@ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
1350
1282
  """
1351
1283
  ...
1352
1284
 
1353
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = "KUBERNETES_IMAGE_PULL_POLICY", service_account: str = "METAFLOW_KUBERNETES_SERVICE_ACCOUNT", secrets: typing.Optional[typing.List[str]] = None, namespace: str = "METAFLOW_KUBERNETES_NAMESPACE", gpu: typing.Optional[int] = None, gpu_vendor: str = "KUBERNETES_GPU_VENDOR", tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = "/metaflow_temp", persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1285
+ @typing.overload
1286
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1287
+ """
1288
+ Specifies that the step will success under all circumstances.
1289
+
1290
+ The decorator will create an optional artifact, specified by `var`, which
1291
+ contains the exception raised. You can use it to detect the presence
1292
+ of errors, indicating that all happy-path artifacts produced by the step
1293
+ are missing.
1294
+
1295
+ Parameters
1296
+ ----------
1297
+ var : str, optional, default None
1298
+ Name of the artifact in which to store the caught exception.
1299
+ If not specified, the exception is not stored.
1300
+ print_exception : bool, default True
1301
+ Determines whether or not the exception is printed to
1302
+ stdout when caught.
1303
+ """
1304
+ ...
1305
+
1306
+ @typing.overload
1307
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1308
+ ...
1309
+
1310
+ @typing.overload
1311
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1312
+ ...
1313
+
1314
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
1315
+ """
1316
+ Specifies that the step will success under all circumstances.
1317
+
1318
+ The decorator will create an optional artifact, specified by `var`, which
1319
+ contains the exception raised. You can use it to detect the presence
1320
+ of errors, indicating that all happy-path artifacts produced by the step
1321
+ are missing.
1322
+
1323
+ Parameters
1324
+ ----------
1325
+ var : str, optional, default None
1326
+ Name of the artifact in which to store the caught exception.
1327
+ If not specified, the exception is not stored.
1328
+ print_exception : bool, default True
1329
+ Determines whether or not the exception is printed to
1330
+ stdout when caught.
1331
+ """
1332
+ ...
1333
+
1334
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = "KUBERNETES_IMAGE_PULL_POLICY", service_account: str = "METAFLOW_KUBERNETES_SERVICE_ACCOUNT", secrets: typing.Optional[typing.List[str]] = None, namespace: str = "METAFLOW_KUBERNETES_NAMESPACE", gpu: typing.Optional[int] = None, gpu_vendor: str = "KUBERNETES_GPU_VENDOR", tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = "/metaflow_temp", persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1354
1335
  """
1355
1336
  Specifies that this step should execute on Kubernetes.
1356
1337
 
@@ -1406,52 +1387,112 @@ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: ty
1406
1387
  Shared memory size (in MiB) required for this step
1407
1388
  port: int, optional
1408
1389
  Port number to specify in the Kubernetes job object
1390
+ compute_pool : str, optional, default None
1391
+ Compute pool to be used for for this step.
1392
+ If not specified, any accessible compute pool within the perimeter is used.
1409
1393
  """
1410
1394
  ...
1411
1395
 
1412
1396
  @typing.overload
1413
- def trigger_on_finish(*, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1397
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1414
1398
  """
1415
- Specifies the flow(s) that this flow depends on.
1399
+ Decorator prototype for all step decorators. This function gets specialized
1400
+ and imported for all decorators types by _import_plugin_decorators().
1401
+ """
1402
+ ...
1403
+
1404
+ @typing.overload
1405
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1406
+ ...
1407
+
1408
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1409
+ """
1410
+ Decorator prototype for all step decorators. This function gets specialized
1411
+ and imported for all decorators types by _import_plugin_decorators().
1412
+ """
1413
+ ...
1414
+
1415
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1416
+ """
1417
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1418
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1419
+
1420
+ Parameters
1421
+ ----------
1422
+ timeout : int
1423
+ Time, in seconds before the task times out and fails. (Default: 3600)
1424
+ poke_interval : int
1425
+ Time in seconds that the job should wait in between each try. (Default: 60)
1426
+ mode : str
1427
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1428
+ exponential_backoff : bool
1429
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1430
+ pool : str
1431
+ the slot pool this task should run in,
1432
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1433
+ soft_fail : bool
1434
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1435
+ name : str
1436
+ Name of the sensor on Airflow
1437
+ description : str
1438
+ Description of sensor in the Airflow UI
1439
+ external_dag_id : str
1440
+ The dag_id that contains the task you want to wait for.
1441
+ external_task_ids : List[str]
1442
+ The list of task_ids that you want to wait for.
1443
+ If None (default value) the sensor waits for the DAG. (Default: None)
1444
+ allowed_states : List[str]
1445
+ Iterable of allowed states, (Default: ['success'])
1446
+ failed_states : List[str]
1447
+ Iterable of failed or dis-allowed states. (Default: None)
1448
+ execution_delta : datetime.timedelta
1449
+ time difference with the previous execution to look at,
1450
+ the default is the same logical date as the current task or DAG. (Default: None)
1451
+ check_existence: bool
1452
+ Set to True to check if the external task exists or check if
1453
+ the DAG to wait for exists. (Default: True)
1454
+ """
1455
+ ...
1456
+
1457
+ @typing.overload
1458
+ def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1459
+ """
1460
+ Specifies the event(s) that this flow depends on.
1416
1461
 
1417
1462
  ```
1418
- @trigger_on_finish(flow='FooFlow')
1463
+ @trigger(event='foo')
1419
1464
  ```
1420
1465
  or
1421
1466
  ```
1422
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1467
+ @trigger(events=['foo', 'bar'])
1423
1468
  ```
1424
- This decorator respects the @project decorator and triggers the flow
1425
- when upstream runs within the same namespace complete successfully
1426
1469
 
1427
- Additionally, you can specify project aware upstream flow dependencies
1428
- by specifying the fully qualified project_flow_name.
1470
+ Additionally, you can specify the parameter mappings
1471
+ to map event payload to Metaflow parameters for the flow.
1429
1472
  ```
1430
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1473
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1431
1474
  ```
1432
1475
  or
1433
1476
  ```
1434
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1477
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1478
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1435
1479
  ```
1436
1480
 
1437
- You can also specify just the project or project branch (other values will be
1438
- inferred from the current project or project branch):
1481
+ 'parameters' can also be a list of strings and tuples like so:
1439
1482
  ```
1440
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1483
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1484
+ ```
1485
+ This is equivalent to:
1486
+ ```
1487
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1441
1488
  ```
1442
-
1443
- Note that `branch` is typically one of:
1444
- - `prod`
1445
- - `user.bob`
1446
- - `test.my_experiment`
1447
- - `prod.staging`
1448
1489
 
1449
1490
  Parameters
1450
1491
  ----------
1451
- flow : Union[str, Dict[str, str]], optional, default None
1452
- Upstream flow dependency for this flow.
1453
- flows : List[Union[str, Dict[str, str]]], default []
1454
- Upstream flow dependencies for this flow.
1492
+ event : Union[str, Dict[str, Any]], optional, default None
1493
+ Event dependency for this flow.
1494
+ events : List[Union[str, Dict[str, Any]]], default []
1495
+ Events dependency for this flow.
1455
1496
  options : Dict[str, Any], default {}
1456
1497
  Backend-specific configuration for tuning eventing behavior.
1457
1498
 
@@ -1460,51 +1501,47 @@ def trigger_on_finish(*, flow: typing.Union[str, typing.Dict[str, str], None] =
1460
1501
  ...
1461
1502
 
1462
1503
  @typing.overload
1463
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1504
+ def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1464
1505
  ...
1465
1506
 
1466
- def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1507
+ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1467
1508
  """
1468
- Specifies the flow(s) that this flow depends on.
1509
+ Specifies the event(s) that this flow depends on.
1469
1510
 
1470
1511
  ```
1471
- @trigger_on_finish(flow='FooFlow')
1512
+ @trigger(event='foo')
1472
1513
  ```
1473
1514
  or
1474
1515
  ```
1475
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1516
+ @trigger(events=['foo', 'bar'])
1476
1517
  ```
1477
- This decorator respects the @project decorator and triggers the flow
1478
- when upstream runs within the same namespace complete successfully
1479
1518
 
1480
- Additionally, you can specify project aware upstream flow dependencies
1481
- by specifying the fully qualified project_flow_name.
1519
+ Additionally, you can specify the parameter mappings
1520
+ to map event payload to Metaflow parameters for the flow.
1482
1521
  ```
1483
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1522
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1484
1523
  ```
1485
1524
  or
1486
1525
  ```
1487
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1526
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1527
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1488
1528
  ```
1489
1529
 
1490
- You can also specify just the project or project branch (other values will be
1491
- inferred from the current project or project branch):
1530
+ 'parameters' can also be a list of strings and tuples like so:
1492
1531
  ```
1493
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1532
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1533
+ ```
1534
+ This is equivalent to:
1535
+ ```
1536
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1494
1537
  ```
1495
-
1496
- Note that `branch` is typically one of:
1497
- - `prod`
1498
- - `user.bob`
1499
- - `test.my_experiment`
1500
- - `prod.staging`
1501
1538
 
1502
1539
  Parameters
1503
1540
  ----------
1504
- flow : Union[str, Dict[str, str]], optional, default None
1505
- Upstream flow dependency for this flow.
1506
- flows : List[Union[str, Dict[str, str]]], default []
1507
- Upstream flow dependencies for this flow.
1541
+ event : Union[str, Dict[str, Any]], optional, default None
1542
+ Event dependency for this flow.
1543
+ events : List[Union[str, Dict[str, Any]]], default []
1544
+ Events dependency for this flow.
1508
1545
  options : Dict[str, Any], default {}
1509
1546
  Backend-specific configuration for tuning eventing behavior.
1510
1547
 
@@ -1512,6 +1549,73 @@ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *
1512
1549
  """
1513
1550
  ...
1514
1551
 
1552
+ @typing.overload
1553
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1554
+ """
1555
+ Specifies the times when the flow should be run when running on a
1556
+ production scheduler.
1557
+
1558
+ Parameters
1559
+ ----------
1560
+ hourly : bool, default False
1561
+ Run the workflow hourly.
1562
+ daily : bool, default True
1563
+ Run the workflow daily.
1564
+ weekly : bool, default False
1565
+ Run the workflow weekly.
1566
+ cron : str, optional, default None
1567
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1568
+ specified by this expression.
1569
+ timezone : str, optional, default None
1570
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1571
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1572
+ """
1573
+ ...
1574
+
1575
+ @typing.overload
1576
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1577
+ ...
1578
+
1579
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1580
+ """
1581
+ Specifies the times when the flow should be run when running on a
1582
+ production scheduler.
1583
+
1584
+ Parameters
1585
+ ----------
1586
+ hourly : bool, default False
1587
+ Run the workflow hourly.
1588
+ daily : bool, default True
1589
+ Run the workflow daily.
1590
+ weekly : bool, default False
1591
+ Run the workflow weekly.
1592
+ cron : str, optional, default None
1593
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1594
+ specified by this expression.
1595
+ timezone : str, optional, default None
1596
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1597
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1598
+ """
1599
+ ...
1600
+
1601
+ def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1602
+ """
1603
+ Specifies what flows belong to the same project.
1604
+
1605
+ A project-specific namespace is created for all flows that
1606
+ use the same `@project(name)`.
1607
+
1608
+ Parameters
1609
+ ----------
1610
+ name : str
1611
+ Project name. Make sure that the name is unique amongst all
1612
+ projects that use the same production scheduler. The name may
1613
+ contain only lowercase alphanumeric characters and underscores.
1614
+
1615
+
1616
+ """
1617
+ ...
1618
+
1515
1619
  def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1516
1620
  """
1517
1621
  The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
@@ -1603,175 +1707,49 @@ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packa
1603
1707
  """
1604
1708
  ...
1605
1709
 
1606
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1607
- """
1608
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1609
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1610
-
1611
- Parameters
1612
- ----------
1613
- timeout : int
1614
- Time, in seconds before the task times out and fails. (Default: 3600)
1615
- poke_interval : int
1616
- Time in seconds that the job should wait in between each try. (Default: 60)
1617
- mode : str
1618
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1619
- exponential_backoff : bool
1620
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1621
- pool : str
1622
- the slot pool this task should run in,
1623
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1624
- soft_fail : bool
1625
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1626
- name : str
1627
- Name of the sensor on Airflow
1628
- description : str
1629
- Description of sensor in the Airflow UI
1630
- external_dag_id : str
1631
- The dag_id that contains the task you want to wait for.
1632
- external_task_ids : List[str]
1633
- The list of task_ids that you want to wait for.
1634
- If None (default value) the sensor waits for the DAG. (Default: None)
1635
- allowed_states : List[str]
1636
- Iterable of allowed states, (Default: ['success'])
1637
- failed_states : List[str]
1638
- Iterable of failed or dis-allowed states. (Default: None)
1639
- execution_delta : datetime.timedelta
1640
- time difference with the previous execution to look at,
1641
- the default is the same logical date as the current task or DAG. (Default: None)
1642
- check_existence: bool
1643
- Set to True to check if the external task exists or check if
1644
- the DAG to wait for exists. (Default: True)
1645
- """
1646
- ...
1647
-
1648
- @typing.overload
1649
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1650
- """
1651
- Specifies the PyPI packages for all steps of the flow.
1652
-
1653
- Use `@pypi_base` to set common packages required by all
1654
- steps and use `@pypi` to specify step-specific overrides.
1655
- Parameters
1656
- ----------
1657
- packages : Dict[str, str], default: {}
1658
- Packages to use for this flow. The key is the name of the package
1659
- and the value is the version to use.
1660
- python : str, optional, default: None
1661
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1662
- that the version used will correspond to the version of the Python interpreter used to start the run.
1663
- """
1664
- ...
1665
-
1666
- @typing.overload
1667
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1668
- ...
1669
-
1670
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1671
- """
1672
- Specifies the PyPI packages for all steps of the flow.
1673
-
1674
- Use `@pypi_base` to set common packages required by all
1675
- steps and use `@pypi` to specify step-specific overrides.
1676
- Parameters
1677
- ----------
1678
- packages : Dict[str, str], default: {}
1679
- Packages to use for this flow. The key is the name of the package
1680
- and the value is the version to use.
1681
- python : str, optional, default: None
1682
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1683
- that the version used will correspond to the version of the Python interpreter used to start the run.
1684
- """
1685
- ...
1686
-
1687
- @typing.overload
1688
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1689
- """
1690
- Specifies the times when the flow should be run when running on a
1691
- production scheduler.
1692
-
1693
- Parameters
1694
- ----------
1695
- hourly : bool, default False
1696
- Run the workflow hourly.
1697
- daily : bool, default True
1698
- Run the workflow daily.
1699
- weekly : bool, default False
1700
- Run the workflow weekly.
1701
- cron : str, optional, default None
1702
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1703
- specified by this expression.
1704
- timezone : str, optional, default None
1705
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1706
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1707
- """
1708
- ...
1709
-
1710
- @typing.overload
1711
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1712
- ...
1713
-
1714
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1715
- """
1716
- Specifies the times when the flow should be run when running on a
1717
- production scheduler.
1718
-
1719
- Parameters
1720
- ----------
1721
- hourly : bool, default False
1722
- Run the workflow hourly.
1723
- daily : bool, default True
1724
- Run the workflow daily.
1725
- weekly : bool, default False
1726
- Run the workflow weekly.
1727
- cron : str, optional, default None
1728
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1729
- specified by this expression.
1730
- timezone : str, optional, default None
1731
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1732
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1733
- """
1734
- ...
1735
-
1736
1710
  @typing.overload
1737
- def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1711
+ def trigger_on_finish(*, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1738
1712
  """
1739
- Specifies the event(s) that this flow depends on.
1713
+ Specifies the flow(s) that this flow depends on.
1740
1714
 
1741
1715
  ```
1742
- @trigger(event='foo')
1716
+ @trigger_on_finish(flow='FooFlow')
1743
1717
  ```
1744
1718
  or
1745
1719
  ```
1746
- @trigger(events=['foo', 'bar'])
1720
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1747
1721
  ```
1722
+ This decorator respects the @project decorator and triggers the flow
1723
+ when upstream runs within the same namespace complete successfully
1748
1724
 
1749
- Additionally, you can specify the parameter mappings
1750
- to map event payload to Metaflow parameters for the flow.
1725
+ Additionally, you can specify project aware upstream flow dependencies
1726
+ by specifying the fully qualified project_flow_name.
1751
1727
  ```
1752
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1728
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1753
1729
  ```
1754
1730
  or
1755
1731
  ```
1756
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1757
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1732
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1758
1733
  ```
1759
1734
 
1760
- 'parameters' can also be a list of strings and tuples like so:
1761
- ```
1762
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1763
- ```
1764
- This is equivalent to:
1735
+ You can also specify just the project or project branch (other values will be
1736
+ inferred from the current project or project branch):
1765
1737
  ```
1766
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1738
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1767
1739
  ```
1768
1740
 
1741
+ Note that `branch` is typically one of:
1742
+ - `prod`
1743
+ - `user.bob`
1744
+ - `test.my_experiment`
1745
+ - `prod.staging`
1746
+
1769
1747
  Parameters
1770
1748
  ----------
1771
- event : Union[str, Dict[str, Any]], optional, default None
1772
- Event dependency for this flow.
1773
- events : List[Union[str, Dict[str, Any]]], default []
1774
- Events dependency for this flow.
1749
+ flow : Union[str, Dict[str, str]], optional, default None
1750
+ Upstream flow dependency for this flow.
1751
+ flows : List[Union[str, Dict[str, str]]], default []
1752
+ Upstream flow dependencies for this flow.
1775
1753
  options : Dict[str, Any], default {}
1776
1754
  Backend-specific configuration for tuning eventing behavior.
1777
1755
 
@@ -1780,47 +1758,51 @@ def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = No
1780
1758
  ...
1781
1759
 
1782
1760
  @typing.overload
1783
- def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1761
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1784
1762
  ...
1785
1763
 
1786
- def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1764
+ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1787
1765
  """
1788
- Specifies the event(s) that this flow depends on.
1766
+ Specifies the flow(s) that this flow depends on.
1789
1767
 
1790
1768
  ```
1791
- @trigger(event='foo')
1769
+ @trigger_on_finish(flow='FooFlow')
1792
1770
  ```
1793
1771
  or
1794
1772
  ```
1795
- @trigger(events=['foo', 'bar'])
1773
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1796
1774
  ```
1775
+ This decorator respects the @project decorator and triggers the flow
1776
+ when upstream runs within the same namespace complete successfully
1797
1777
 
1798
- Additionally, you can specify the parameter mappings
1799
- to map event payload to Metaflow parameters for the flow.
1778
+ Additionally, you can specify project aware upstream flow dependencies
1779
+ by specifying the fully qualified project_flow_name.
1800
1780
  ```
1801
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1781
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1802
1782
  ```
1803
1783
  or
1804
1784
  ```
1805
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1806
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1785
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1807
1786
  ```
1808
1787
 
1809
- 'parameters' can also be a list of strings and tuples like so:
1810
- ```
1811
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1812
- ```
1813
- This is equivalent to:
1788
+ You can also specify just the project or project branch (other values will be
1789
+ inferred from the current project or project branch):
1814
1790
  ```
1815
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1791
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1816
1792
  ```
1817
1793
 
1794
+ Note that `branch` is typically one of:
1795
+ - `prod`
1796
+ - `user.bob`
1797
+ - `test.my_experiment`
1798
+ - `prod.staging`
1799
+
1818
1800
  Parameters
1819
1801
  ----------
1820
- event : Union[str, Dict[str, Any]], optional, default None
1821
- Event dependency for this flow.
1822
- events : List[Union[str, Dict[str, Any]]], default []
1823
- Events dependency for this flow.
1802
+ flow : Union[str, Dict[str, str]], optional, default None
1803
+ Upstream flow dependency for this flow.
1804
+ flows : List[Union[str, Dict[str, str]]], default []
1805
+ Upstream flow dependencies for this flow.
1824
1806
  options : Dict[str, Any], default {}
1825
1807
  Backend-specific configuration for tuning eventing behavior.
1826
1808
 
@@ -1828,21 +1810,42 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
1828
1810
  """
1829
1811
  ...
1830
1812
 
1831
- def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1813
+ @typing.overload
1814
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1832
1815
  """
1833
- Specifies what flows belong to the same project.
1834
-
1835
- A project-specific namespace is created for all flows that
1836
- use the same `@project(name)`.
1816
+ Specifies the PyPI packages for all steps of the flow.
1837
1817
 
1818
+ Use `@pypi_base` to set common packages required by all
1819
+ steps and use `@pypi` to specify step-specific overrides.
1838
1820
  Parameters
1839
1821
  ----------
1840
- name : str
1841
- Project name. Make sure that the name is unique amongst all
1842
- projects that use the same production scheduler. The name may
1843
- contain only lowercase alphanumeric characters and underscores.
1844
-
1822
+ packages : Dict[str, str], default: {}
1823
+ Packages to use for this flow. The key is the name of the package
1824
+ and the value is the version to use.
1825
+ python : str, optional, default: None
1826
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1827
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1828
+ """
1829
+ ...
1830
+
1831
+ @typing.overload
1832
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1833
+ ...
1834
+
1835
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1836
+ """
1837
+ Specifies the PyPI packages for all steps of the flow.
1845
1838
 
1839
+ Use `@pypi_base` to set common packages required by all
1840
+ steps and use `@pypi` to specify step-specific overrides.
1841
+ Parameters
1842
+ ----------
1843
+ packages : Dict[str, str], default: {}
1844
+ Packages to use for this flow. The key is the name of the package
1845
+ and the value is the version to use.
1846
+ python : str, optional, default: None
1847
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1848
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1846
1849
  """
1847
1850
  ...
1848
1851
 
@@ -2797,7 +2800,7 @@ class DataArtifact(metaflow.client.core.MetaflowObject, metaclass=type):
2797
2800
  ...
2798
2801
 
2799
2802
  class Runner(object, metaclass=type):
2800
- def __init__(self, flow_file: str, show_output: bool = True, profile: typing.Optional[str] = None, env: typing.Optional[typing.Dict] = None, cwd: typing.Optional[str] = None, **kwargs):
2803
+ def __init__(self, flow_file: str, show_output: bool = True, profile: typing.Optional[str] = None, env: typing.Optional[typing.Dict] = None, cwd: typing.Optional[str] = None, file_read_timeout: int = 3600, **kwargs):
2801
2804
  ...
2802
2805
  def __enter__(self) -> metaflow.runner.metaflow_runner.Runner:
2803
2806
  ...
@@ -2889,7 +2892,7 @@ class Runner(object, metaclass=type):
2889
2892
  ...
2890
2893
 
2891
2894
  class NBRunner(object, metaclass=type):
2892
- def __init__(self, flow, show_output: bool = True, profile: typing.Optional[str] = None, env: typing.Optional[typing.Dict] = None, base_dir: str = "/tmp", **kwargs):
2895
+ def __init__(self, flow, show_output: bool = True, profile: typing.Optional[str] = None, env: typing.Optional[typing.Dict] = None, base_dir: str = "/tmp", file_read_timeout: int = 3600, **kwargs):
2893
2896
  ...
2894
2897
  def nbrun(self, **kwargs):
2895
2898
  """
@@ -2995,7 +2998,7 @@ class NBRunner(object, metaclass=type):
2995
2998
  ...
2996
2999
 
2997
3000
  class Deployer(object, metaclass=type):
2998
- def __init__(self, flow_file: str, show_output: bool = True, profile: typing.Optional[str] = None, env: typing.Optional[typing.Dict] = None, cwd: typing.Optional[str] = None, **kwargs):
3001
+ def __init__(self, flow_file: str, show_output: bool = True, profile: typing.Optional[str] = None, env: typing.Optional[typing.Dict] = None, cwd: typing.Optional[str] = None, file_read_timeout: int = 3600, **kwargs):
2999
3002
  ...
3000
3003
  def _Deployer__make_function(self, deployer_class):
3001
3004
  """
@@ -3015,7 +3018,7 @@ class Deployer(object, metaclass=type):
3015
3018
  ...
3016
3019
 
3017
3020
  class NBDeployer(object, metaclass=type):
3018
- def __init__(self, flow, show_output: bool = True, profile: typing.Optional[str] = None, env: typing.Optional[typing.Dict] = None, base_dir: str = "/tmp", **kwargs):
3021
+ def __init__(self, flow, show_output: bool = True, profile: typing.Optional[str] = None, env: typing.Optional[typing.Dict] = None, base_dir: str = "/tmp", file_read_timeout: int = 3600, **kwargs):
3019
3022
  ...
3020
3023
  def cleanup(self):
3021
3024
  """