metaflow-stubs 2.12.23__py2.py3-none-any.whl → 2.12.25__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (152) hide show
  1. metaflow-stubs/__init__.pyi +571 -571
  2. metaflow-stubs/cards.pyi +6 -6
  3. metaflow-stubs/cli.pyi +3 -3
  4. metaflow-stubs/client/__init__.pyi +4 -4
  5. metaflow-stubs/client/core.pyi +7 -7
  6. metaflow-stubs/client/filecache.pyi +3 -3
  7. metaflow-stubs/clone_util.pyi +2 -2
  8. metaflow-stubs/events.pyi +2 -2
  9. metaflow-stubs/exception.pyi +2 -2
  10. metaflow-stubs/flowspec.pyi +6 -6
  11. metaflow-stubs/generated_for.txt +1 -1
  12. metaflow-stubs/includefile.pyi +5 -5
  13. metaflow-stubs/info_file.pyi +2 -2
  14. metaflow-stubs/metadata/metadata.pyi +3 -3
  15. metaflow-stubs/metadata/util.pyi +2 -2
  16. metaflow-stubs/metaflow_config.pyi +2 -2
  17. metaflow-stubs/metaflow_current.pyi +24 -24
  18. metaflow-stubs/mflog/mflog.pyi +2 -2
  19. metaflow-stubs/multicore_utils.pyi +2 -2
  20. metaflow-stubs/parameters.pyi +3 -3
  21. metaflow-stubs/plugins/__init__.pyi +2 -2
  22. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  23. metaflow-stubs/plugins/airflow/airflow.pyi +3 -3
  24. metaflow-stubs/plugins/airflow/airflow_cli.pyi +3 -3
  25. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
  26. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  27. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  28. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  29. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
  30. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +4 -4
  31. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +4 -4
  32. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  33. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  34. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  35. metaflow-stubs/plugins/argo/argo_workflows.pyi +6 -4
  36. metaflow-stubs/plugins/argo/argo_workflows_cli.pyi +6 -6
  37. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +7 -7
  38. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +3 -3
  39. metaflow-stubs/plugins/aws/__init__.pyi +2 -2
  40. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  41. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  42. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  43. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  44. metaflow-stubs/plugins/aws/batch/batch_cli.pyi +2 -2
  45. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  46. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +3 -3
  47. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  48. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +3 -3
  49. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  50. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
  51. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  52. metaflow-stubs/plugins/aws/step_functions/production_token.pyi +2 -2
  53. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  54. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  55. metaflow-stubs/plugins/aws/step_functions/step_functions_cli.pyi +3 -3
  56. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  57. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
  58. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +2 -2
  59. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  60. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  61. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  62. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +3 -3
  63. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  64. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  65. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  66. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  67. metaflow-stubs/plugins/cards/card_cli.pyi +4 -4
  68. metaflow-stubs/plugins/cards/card_client.pyi +2 -2
  69. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  70. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  71. metaflow-stubs/plugins/cards/card_decorator.pyi +3 -3
  72. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  73. metaflow-stubs/plugins/cards/card_modules/basic.pyi +3 -3
  74. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  75. metaflow-stubs/plugins/cards/card_modules/chevron/__init__.pyi +2 -2
  76. metaflow-stubs/plugins/cards/card_modules/chevron/main.pyi +2 -2
  77. metaflow-stubs/plugins/cards/card_modules/chevron/metadata.pyi +2 -2
  78. metaflow-stubs/plugins/cards/card_modules/chevron/renderer.pyi +2 -2
  79. metaflow-stubs/plugins/cards/card_modules/chevron/tokenizer.pyi +2 -2
  80. metaflow-stubs/plugins/cards/card_modules/components.pyi +4 -4
  81. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  82. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  83. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  84. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  85. metaflow-stubs/plugins/cards/component_serializer.pyi +4 -4
  86. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  87. metaflow-stubs/plugins/catch_decorator.pyi +3 -3
  88. metaflow-stubs/plugins/datatools/__init__.pyi +3 -3
  89. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  90. metaflow-stubs/plugins/datatools/s3/__init__.pyi +3 -3
  91. metaflow-stubs/plugins/datatools/s3/s3.pyi +6 -6
  92. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  93. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  94. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  95. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  96. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  97. metaflow-stubs/plugins/events_decorator.pyi +3 -3
  98. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  99. metaflow-stubs/plugins/frameworks/pytorch.pyi +4 -4
  100. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  101. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +3 -3
  102. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  103. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  104. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  105. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  106. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  107. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +2 -2
  108. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +3 -3
  109. metaflow-stubs/plugins/kubernetes/kubernetes_cli.pyi +2 -2
  110. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  111. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +3 -3
  112. metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +2 -2
  113. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
  114. metaflow-stubs/plugins/logs_cli.pyi +3 -3
  115. metaflow-stubs/plugins/package_cli.pyi +2 -2
  116. metaflow-stubs/plugins/parallel_decorator.pyi +3 -3
  117. metaflow-stubs/plugins/project_decorator.pyi +3 -3
  118. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  119. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  120. metaflow-stubs/plugins/pypi/conda_environment.pyi +4 -4
  121. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  122. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  123. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  124. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  125. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  126. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  127. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +2 -2
  128. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  129. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  130. metaflow-stubs/plugins/tag_cli.pyi +5 -5
  131. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +2 -2
  132. metaflow-stubs/plugins/timeout_decorator.pyi +2 -2
  133. metaflow-stubs/procpoll.pyi +2 -2
  134. metaflow-stubs/pylint_wrapper.pyi +2 -2
  135. metaflow-stubs/runner/__init__.pyi +2 -2
  136. metaflow-stubs/runner/deployer.pyi +3 -3
  137. metaflow-stubs/runner/metaflow_runner.pyi +5 -5
  138. metaflow-stubs/runner/nbdeploy.pyi +2 -2
  139. metaflow-stubs/runner/nbrun.pyi +2 -2
  140. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  141. metaflow-stubs/runner/utils.pyi +2 -2
  142. metaflow-stubs/system/__init__.pyi +3 -3
  143. metaflow-stubs/system/system_logger.pyi +2 -2
  144. metaflow-stubs/system/system_monitor.pyi +3 -3
  145. metaflow-stubs/tagging_util.pyi +2 -2
  146. metaflow-stubs/tuple_util.pyi +2 -2
  147. metaflow-stubs/version.pyi +2 -2
  148. {metaflow_stubs-2.12.23.dist-info → metaflow_stubs-2.12.25.dist-info}/METADATA +2 -2
  149. metaflow_stubs-2.12.25.dist-info/RECORD +152 -0
  150. metaflow_stubs-2.12.23.dist-info/RECORD +0 -152
  151. {metaflow_stubs-2.12.23.dist-info → metaflow_stubs-2.12.25.dist-info}/WHEEL +0 -0
  152. {metaflow_stubs-2.12.23.dist-info → metaflow_stubs-2.12.25.dist-info}/top_level.txt +0 -0
@@ -1,25 +1,25 @@
1
1
  ##################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.12.23 #
4
- # Generated on 2024-10-01T14:32:39.945734 #
3
+ # MF version: 2.12.25 #
4
+ # Generated on 2024-10-07T19:08:03.779487 #
5
5
  ##################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import metaflow.plugins.datatools.s3.s3
12
- import metaflow.events
13
- import metaflow._vendor.click.types
11
+ import typing
12
+ import metaflow.metaflow_current
14
13
  import io
14
+ import metaflow.client.core
15
+ import metaflow.datastore.inputs
16
+ import metaflow._vendor.click.types
17
+ import metaflow.flowspec
18
+ import metaflow.plugins.datatools.s3.s3
15
19
  import datetime
16
20
  import metaflow.parameters
21
+ import metaflow.events
17
22
  import metaflow.runner.metaflow_runner
18
- import metaflow.datastore.inputs
19
- import metaflow.metaflow_current
20
- import metaflow.client.core
21
- import metaflow.flowspec
22
- import typing
23
23
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
24
24
  StepFlag = typing.NewType("StepFlag", bool)
25
25
 
@@ -855,149 +855,51 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
855
855
  ...
856
856
 
857
857
  @typing.overload
858
- def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
858
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
859
859
  """
860
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
860
+ Specifies that the step will success under all circumstances.
861
+
862
+ The decorator will create an optional artifact, specified by `var`, which
863
+ contains the exception raised. You can use it to detect the presence
864
+ of errors, indicating that all happy-path artifacts produced by the step
865
+ are missing.
861
866
 
862
867
  Parameters
863
868
  ----------
864
- cpu : int, default 1
865
- Number of CPUs required for this step. If `@resources` is
866
- also present, the maximum value from all decorators is used.
867
- gpu : int, default 0
868
- Number of GPUs required for this step. If `@resources` is
869
- also present, the maximum value from all decorators is used.
870
- memory : int, default 4096
871
- Memory size (in MB) required for this step. If
872
- `@resources` is also present, the maximum value from all decorators is
873
- used.
874
- image : str, optional, default None
875
- Docker image to use when launching on AWS Batch. If not specified, and
876
- METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
877
- not, a default Docker image mapping to the current version of Python is used.
878
- queue : str, default METAFLOW_BATCH_JOB_QUEUE
879
- AWS Batch Job Queue to submit the job to.
880
- iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
881
- AWS IAM role that AWS Batch container uses to access AWS cloud resources.
882
- execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
883
- AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
884
- (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
885
- shared_memory : int, optional, default None
886
- The value for the size (in MiB) of the /dev/shm volume for this step.
887
- This parameter maps to the `--shm-size` option in Docker.
888
- max_swap : int, optional, default None
889
- The total amount of swap memory (in MiB) a container can use for this
890
- step. This parameter is translated to the `--memory-swap` option in
891
- Docker where the value is the sum of the container memory plus the
892
- `max_swap` value.
893
- swappiness : int, optional, default None
894
- This allows you to tune memory swappiness behavior for this step.
895
- A swappiness value of 0 causes swapping not to happen unless absolutely
896
- necessary. A swappiness value of 100 causes pages to be swapped very
897
- aggressively. Accepted values are whole numbers between 0 and 100.
898
- use_tmpfs : bool, default False
899
- This enables an explicit tmpfs mount for this step. Note that tmpfs is
900
- not available on Fargate compute environments
901
- tmpfs_tempdir : bool, default True
902
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
903
- tmpfs_size : int, optional, default None
904
- The value for the size (in MiB) of the tmpfs mount for this step.
905
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
906
- memory allocated for this step.
907
- tmpfs_path : str, optional, default None
908
- Path to tmpfs mount for this step. Defaults to /metaflow_temp.
909
- inferentia : int, default 0
910
- Number of Inferentia chips required for this step.
911
- trainium : int, default None
912
- Alias for inferentia. Use only one of the two.
913
- efa : int, default 0
914
- Number of elastic fabric adapter network devices to attach to container
915
- ephemeral_storage : int, default None
916
- The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
917
- This is only relevant for Fargate compute environments
918
- log_driver: str, optional, default None
919
- The log driver to use for the Amazon ECS container.
920
- log_options: List[str], optional, default None
921
- List of strings containing options for the chosen log driver. The configurable values
922
- depend on the `log driver` chosen. Validation of these options is not supported yet.
923
- Example: [`awslogs-group:aws/batch/job`]
869
+ var : str, optional, default None
870
+ Name of the artifact in which to store the caught exception.
871
+ If not specified, the exception is not stored.
872
+ print_exception : bool, default True
873
+ Determines whether or not the exception is printed to
874
+ stdout when caught.
924
875
  """
925
876
  ...
926
877
 
927
878
  @typing.overload
928
- def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
879
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
929
880
  ...
930
881
 
931
882
  @typing.overload
932
- def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
883
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
933
884
  ...
934
885
 
935
- def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
886
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
936
887
  """
937
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
888
+ Specifies that the step will success under all circumstances.
889
+
890
+ The decorator will create an optional artifact, specified by `var`, which
891
+ contains the exception raised. You can use it to detect the presence
892
+ of errors, indicating that all happy-path artifacts produced by the step
893
+ are missing.
938
894
 
939
895
  Parameters
940
896
  ----------
941
- cpu : int, default 1
942
- Number of CPUs required for this step. If `@resources` is
943
- also present, the maximum value from all decorators is used.
944
- gpu : int, default 0
945
- Number of GPUs required for this step. If `@resources` is
946
- also present, the maximum value from all decorators is used.
947
- memory : int, default 4096
948
- Memory size (in MB) required for this step. If
949
- `@resources` is also present, the maximum value from all decorators is
950
- used.
951
- image : str, optional, default None
952
- Docker image to use when launching on AWS Batch. If not specified, and
953
- METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
954
- not, a default Docker image mapping to the current version of Python is used.
955
- queue : str, default METAFLOW_BATCH_JOB_QUEUE
956
- AWS Batch Job Queue to submit the job to.
957
- iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
958
- AWS IAM role that AWS Batch container uses to access AWS cloud resources.
959
- execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
960
- AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
961
- (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
962
- shared_memory : int, optional, default None
963
- The value for the size (in MiB) of the /dev/shm volume for this step.
964
- This parameter maps to the `--shm-size` option in Docker.
965
- max_swap : int, optional, default None
966
- The total amount of swap memory (in MiB) a container can use for this
967
- step. This parameter is translated to the `--memory-swap` option in
968
- Docker where the value is the sum of the container memory plus the
969
- `max_swap` value.
970
- swappiness : int, optional, default None
971
- This allows you to tune memory swappiness behavior for this step.
972
- A swappiness value of 0 causes swapping not to happen unless absolutely
973
- necessary. A swappiness value of 100 causes pages to be swapped very
974
- aggressively. Accepted values are whole numbers between 0 and 100.
975
- use_tmpfs : bool, default False
976
- This enables an explicit tmpfs mount for this step. Note that tmpfs is
977
- not available on Fargate compute environments
978
- tmpfs_tempdir : bool, default True
979
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
980
- tmpfs_size : int, optional, default None
981
- The value for the size (in MiB) of the tmpfs mount for this step.
982
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
983
- memory allocated for this step.
984
- tmpfs_path : str, optional, default None
985
- Path to tmpfs mount for this step. Defaults to /metaflow_temp.
986
- inferentia : int, default 0
987
- Number of Inferentia chips required for this step.
988
- trainium : int, default None
989
- Alias for inferentia. Use only one of the two.
990
- efa : int, default 0
991
- Number of elastic fabric adapter network devices to attach to container
992
- ephemeral_storage : int, default None
993
- The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
994
- This is only relevant for Fargate compute environments
995
- log_driver: str, optional, default None
996
- The log driver to use for the Amazon ECS container.
997
- log_options: List[str], optional, default None
998
- List of strings containing options for the chosen log driver. The configurable values
999
- depend on the `log driver` chosen. Validation of these options is not supported yet.
1000
- Example: [`awslogs-group:aws/batch/job`]
897
+ var : str, optional, default None
898
+ Name of the artifact in which to store the caught exception.
899
+ If not specified, the exception is not stored.
900
+ print_exception : bool, default True
901
+ Determines whether or not the exception is printed to
902
+ stdout when caught.
1001
903
  """
1002
904
  ...
1003
905
 
@@ -1058,83 +960,126 @@ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
1058
960
  """
1059
961
  ...
1060
962
 
963
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = "KUBERNETES_IMAGE_PULL_POLICY", service_account: str = "METAFLOW_KUBERNETES_SERVICE_ACCOUNT", secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = "METAFLOW_KUBERNETES_NAMESPACE", gpu: typing.Optional[int] = None, gpu_vendor: str = "KUBERNETES_GPU_VENDOR", tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = "/metaflow_temp", persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
964
+ """
965
+ Specifies that this step should execute on Kubernetes.
966
+
967
+ Parameters
968
+ ----------
969
+ cpu : int, default 1
970
+ Number of CPUs required for this step. If `@resources` is
971
+ also present, the maximum value from all decorators is used.
972
+ memory : int, default 4096
973
+ Memory size (in MB) required for this step. If
974
+ `@resources` is also present, the maximum value from all decorators is
975
+ used.
976
+ disk : int, default 10240
977
+ Disk size (in MB) required for this step. If
978
+ `@resources` is also present, the maximum value from all decorators is
979
+ used.
980
+ image : str, optional, default None
981
+ Docker image to use when launching on Kubernetes. If not specified, and
982
+ METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
983
+ not, a default Docker image mapping to the current version of Python is used.
984
+ image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
985
+ If given, the imagePullPolicy to be applied to the Docker image of the step.
986
+ service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
987
+ Kubernetes service account to use when launching pod in Kubernetes.
988
+ secrets : List[str], optional, default None
989
+ Kubernetes secrets to use when launching pod in Kubernetes. These
990
+ secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
991
+ in Metaflow configuration.
992
+ node_selector: Union[Dict[str,str], str], optional, default None
993
+ Kubernetes node selector(s) to apply to the pod running the task.
994
+ Can be passed in as a comma separated string of values e.g. "kubernetes.io/os=linux,kubernetes.io/arch=amd64"
995
+ or as a dictionary {"kubernetes.io/os": "linux", "kubernetes.io/arch": "amd64"}
996
+ namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
997
+ Kubernetes namespace to use when launching pod in Kubernetes.
998
+ gpu : int, optional, default None
999
+ Number of GPUs required for this step. A value of zero implies that
1000
+ the scheduled node should not have GPUs.
1001
+ gpu_vendor : str, default KUBERNETES_GPU_VENDOR
1002
+ The vendor of the GPUs to be used for this step.
1003
+ tolerations : List[str], default []
1004
+ The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
1005
+ Kubernetes tolerations to use when launching pod in Kubernetes.
1006
+ use_tmpfs : bool, default False
1007
+ This enables an explicit tmpfs mount for this step.
1008
+ tmpfs_tempdir : bool, default True
1009
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1010
+ tmpfs_size : int, optional, default: None
1011
+ The value for the size (in MiB) of the tmpfs mount for this step.
1012
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1013
+ memory allocated for this step.
1014
+ tmpfs_path : str, optional, default /metaflow_temp
1015
+ Path to tmpfs mount for this step.
1016
+ persistent_volume_claims : Dict[str, str], optional, default None
1017
+ A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
1018
+ volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
1019
+ shared_memory: int, optional
1020
+ Shared memory size (in MiB) required for this step
1021
+ port: int, optional
1022
+ Port number to specify in the Kubernetes job object
1023
+ compute_pool : str, optional, default None
1024
+ Compute pool to be used for for this step.
1025
+ If not specified, any accessible compute pool within the perimeter is used.
1026
+ """
1027
+ ...
1028
+
1061
1029
  @typing.overload
1062
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1030
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1063
1031
  """
1064
- Specifies the PyPI packages for the step.
1032
+ Specifies the Conda environment for the step.
1065
1033
 
1066
1034
  Information in this decorator will augment any
1067
- attributes set in the `@pyi_base` flow-level decorator. Hence,
1068
- you can use `@pypi_base` to set packages required by all
1069
- steps and use `@pypi` to specify step-specific overrides.
1035
+ attributes set in the `@conda_base` flow-level decorator. Hence,
1036
+ you can use `@conda_base` to set packages required by all
1037
+ steps and use `@conda` to specify step-specific overrides.
1070
1038
 
1071
1039
  Parameters
1072
1040
  ----------
1073
- packages : Dict[str, str], default: {}
1041
+ packages : Dict[str, str], default {}
1074
1042
  Packages to use for this step. The key is the name of the package
1075
1043
  and the value is the version to use.
1076
- python : str, optional, default: None
1044
+ libraries : Dict[str, str], default {}
1045
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1046
+ python : str, optional, default None
1077
1047
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1078
1048
  that the version used will correspond to the version of the Python interpreter used to start the run.
1049
+ disabled : bool, default False
1050
+ If set to True, disables @conda.
1079
1051
  """
1080
1052
  ...
1081
1053
 
1082
1054
  @typing.overload
1083
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1055
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1084
1056
  ...
1085
1057
 
1086
1058
  @typing.overload
1087
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1059
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1088
1060
  ...
1089
1061
 
1090
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1062
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1091
1063
  """
1092
- Specifies the PyPI packages for the step.
1064
+ Specifies the Conda environment for the step.
1093
1065
 
1094
1066
  Information in this decorator will augment any
1095
- attributes set in the `@pyi_base` flow-level decorator. Hence,
1096
- you can use `@pypi_base` to set packages required by all
1097
- steps and use `@pypi` to specify step-specific overrides.
1067
+ attributes set in the `@conda_base` flow-level decorator. Hence,
1068
+ you can use `@conda_base` to set packages required by all
1069
+ steps and use `@conda` to specify step-specific overrides.
1098
1070
 
1099
1071
  Parameters
1100
1072
  ----------
1101
- packages : Dict[str, str], default: {}
1073
+ packages : Dict[str, str], default {}
1102
1074
  Packages to use for this step. The key is the name of the package
1103
1075
  and the value is the version to use.
1104
- python : str, optional, default: None
1076
+ libraries : Dict[str, str], default {}
1077
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1078
+ python : str, optional, default None
1105
1079
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1106
1080
  that the version used will correspond to the version of the Python interpreter used to start the run.
1107
- """
1108
- ...
1109
-
1110
- @typing.overload
1111
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1112
- """
1113
- Specifies environment variables to be set prior to the execution of a step.
1114
-
1115
- Parameters
1116
- ----------
1117
- vars : Dict[str, str], default {}
1118
- Dictionary of environment variables to set.
1119
- """
1120
- ...
1121
-
1122
- @typing.overload
1123
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1124
- ...
1125
-
1126
- @typing.overload
1127
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1128
- ...
1129
-
1130
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
1131
- """
1132
- Specifies environment variables to be set prior to the execution of a step.
1133
-
1134
- Parameters
1135
- ----------
1136
- vars : Dict[str, str], default {}
1137
- Dictionary of environment variables to set.
1081
+ disabled : bool, default False
1082
+ If set to True, disables @conda.
1138
1083
  """
1139
1084
  ...
1140
1085
 
@@ -1215,126 +1160,136 @@ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None]
1215
1160
  """
1216
1161
  ...
1217
1162
 
1218
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = "KUBERNETES_IMAGE_PULL_POLICY", service_account: str = "METAFLOW_KUBERNETES_SERVICE_ACCOUNT", secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = "METAFLOW_KUBERNETES_NAMESPACE", gpu: typing.Optional[int] = None, gpu_vendor: str = "KUBERNETES_GPU_VENDOR", tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = "/metaflow_temp", persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1163
+ @typing.overload
1164
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1219
1165
  """
1220
- Specifies that this step should execute on Kubernetes.
1166
+ Specifies secrets to be retrieved and injected as environment variables prior to
1167
+ the execution of a step.
1221
1168
 
1222
1169
  Parameters
1223
1170
  ----------
1224
- cpu : int, default 1
1225
- Number of CPUs required for this step. If `@resources` is
1226
- also present, the maximum value from all decorators is used.
1227
- memory : int, default 4096
1228
- Memory size (in MB) required for this step. If
1229
- `@resources` is also present, the maximum value from all decorators is
1230
- used.
1231
- disk : int, default 10240
1232
- Disk size (in MB) required for this step. If
1233
- `@resources` is also present, the maximum value from all decorators is
1234
- used.
1235
- image : str, optional, default None
1236
- Docker image to use when launching on Kubernetes. If not specified, and
1237
- METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
1238
- not, a default Docker image mapping to the current version of Python is used.
1239
- image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
1240
- If given, the imagePullPolicy to be applied to the Docker image of the step.
1241
- service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
1242
- Kubernetes service account to use when launching pod in Kubernetes.
1243
- secrets : List[str], optional, default None
1244
- Kubernetes secrets to use when launching pod in Kubernetes. These
1245
- secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
1246
- in Metaflow configuration.
1247
- node_selector: Union[Dict[str,str], str], optional, default None
1248
- Kubernetes node selector(s) to apply to the pod running the task.
1249
- Can be passed in as a comma separated string of values e.g. "kubernetes.io/os=linux,kubernetes.io/arch=amd64"
1250
- or as a dictionary {"kubernetes.io/os": "linux", "kubernetes.io/arch": "amd64"}
1251
- namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
1252
- Kubernetes namespace to use when launching pod in Kubernetes.
1253
- gpu : int, optional, default None
1254
- Number of GPUs required for this step. A value of zero implies that
1255
- the scheduled node should not have GPUs.
1256
- gpu_vendor : str, default KUBERNETES_GPU_VENDOR
1257
- The vendor of the GPUs to be used for this step.
1258
- tolerations : List[str], default []
1259
- The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
1260
- Kubernetes tolerations to use when launching pod in Kubernetes.
1261
- use_tmpfs : bool, default False
1262
- This enables an explicit tmpfs mount for this step.
1263
- tmpfs_tempdir : bool, default True
1264
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1265
- tmpfs_size : int, optional, default: None
1266
- The value for the size (in MiB) of the tmpfs mount for this step.
1267
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1268
- memory allocated for this step.
1269
- tmpfs_path : str, optional, default /metaflow_temp
1270
- Path to tmpfs mount for this step.
1271
- persistent_volume_claims : Dict[str, str], optional, default None
1272
- A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
1273
- volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
1274
- shared_memory: int, optional
1275
- Shared memory size (in MiB) required for this step
1276
- port: int, optional
1277
- Port number to specify in the Kubernetes job object
1278
- compute_pool : str, optional, default None
1279
- Compute pool to be used for for this step.
1280
- If not specified, any accessible compute pool within the perimeter is used.
1171
+ sources : List[Union[str, Dict[str, Any]]], default: []
1172
+ List of secret specs, defining how the secrets are to be retrieved
1281
1173
  """
1282
1174
  ...
1283
1175
 
1284
1176
  @typing.overload
1285
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1177
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1178
+ ...
1179
+
1180
+ @typing.overload
1181
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1182
+ ...
1183
+
1184
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
1286
1185
  """
1287
- Specifies the Conda environment for the step.
1186
+ Specifies secrets to be retrieved and injected as environment variables prior to
1187
+ the execution of a step.
1188
+
1189
+ Parameters
1190
+ ----------
1191
+ sources : List[Union[str, Dict[str, Any]]], default: []
1192
+ List of secret specs, defining how the secrets are to be retrieved
1193
+ """
1194
+ ...
1195
+
1196
+ @typing.overload
1197
+ def card(*, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1198
+ """
1199
+ Creates a human-readable report, a Metaflow Card, after this step completes.
1200
+
1201
+ Note that you may add multiple `@card` decorators in a step with different parameters.
1202
+
1203
+ Parameters
1204
+ ----------
1205
+ type : str, default 'default'
1206
+ Card type.
1207
+ id : str, optional, default None
1208
+ If multiple cards are present, use this id to identify this card.
1209
+ options : Dict[str, Any], default {}
1210
+ Options passed to the card. The contents depend on the card type.
1211
+ timeout : int, default 45
1212
+ Interrupt reporting if it takes more than this many seconds.
1213
+
1214
+
1215
+ """
1216
+ ...
1217
+
1218
+ @typing.overload
1219
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1220
+ ...
1221
+
1222
+ @typing.overload
1223
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1224
+ ...
1225
+
1226
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
1227
+ """
1228
+ Creates a human-readable report, a Metaflow Card, after this step completes.
1229
+
1230
+ Note that you may add multiple `@card` decorators in a step with different parameters.
1231
+
1232
+ Parameters
1233
+ ----------
1234
+ type : str, default 'default'
1235
+ Card type.
1236
+ id : str, optional, default None
1237
+ If multiple cards are present, use this id to identify this card.
1238
+ options : Dict[str, Any], default {}
1239
+ Options passed to the card. The contents depend on the card type.
1240
+ timeout : int, default 45
1241
+ Interrupt reporting if it takes more than this many seconds.
1242
+
1243
+
1244
+ """
1245
+ ...
1246
+
1247
+ @typing.overload
1248
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1249
+ """
1250
+ Specifies the PyPI packages for the step.
1288
1251
 
1289
1252
  Information in this decorator will augment any
1290
- attributes set in the `@conda_base` flow-level decorator. Hence,
1291
- you can use `@conda_base` to set packages required by all
1292
- steps and use `@conda` to specify step-specific overrides.
1253
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
1254
+ you can use `@pypi_base` to set packages required by all
1255
+ steps and use `@pypi` to specify step-specific overrides.
1293
1256
 
1294
1257
  Parameters
1295
1258
  ----------
1296
- packages : Dict[str, str], default {}
1259
+ packages : Dict[str, str], default: {}
1297
1260
  Packages to use for this step. The key is the name of the package
1298
1261
  and the value is the version to use.
1299
- libraries : Dict[str, str], default {}
1300
- Supported for backward compatibility. When used with packages, packages will take precedence.
1301
- python : str, optional, default None
1262
+ python : str, optional, default: None
1302
1263
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1303
1264
  that the version used will correspond to the version of the Python interpreter used to start the run.
1304
- disabled : bool, default False
1305
- If set to True, disables @conda.
1306
1265
  """
1307
1266
  ...
1308
1267
 
1309
1268
  @typing.overload
1310
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1269
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1311
1270
  ...
1312
1271
 
1313
1272
  @typing.overload
1314
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1273
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1315
1274
  ...
1316
1275
 
1317
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1276
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1318
1277
  """
1319
- Specifies the Conda environment for the step.
1278
+ Specifies the PyPI packages for the step.
1320
1279
 
1321
1280
  Information in this decorator will augment any
1322
- attributes set in the `@conda_base` flow-level decorator. Hence,
1323
- you can use `@conda_base` to set packages required by all
1324
- steps and use `@conda` to specify step-specific overrides.
1281
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
1282
+ you can use `@pypi_base` to set packages required by all
1283
+ steps and use `@pypi` to specify step-specific overrides.
1325
1284
 
1326
1285
  Parameters
1327
1286
  ----------
1328
- packages : Dict[str, str], default {}
1287
+ packages : Dict[str, str], default: {}
1329
1288
  Packages to use for this step. The key is the name of the package
1330
1289
  and the value is the version to use.
1331
- libraries : Dict[str, str], default {}
1332
- Supported for backward compatibility. When used with packages, packages will take precedence.
1333
- python : str, optional, default None
1290
+ python : str, optional, default: None
1334
1291
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1335
1292
  that the version used will correspond to the version of the Python interpreter used to start the run.
1336
- disabled : bool, default False
1337
- If set to True, disables @conda.
1338
1293
  """
1339
1294
  ...
1340
1295
 
@@ -1411,135 +1366,283 @@ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
1411
1366
  ...
1412
1367
 
1413
1368
  @typing.overload
1414
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1369
+ def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1415
1370
  """
1416
- Specifies that the step will success under all circumstances.
1417
-
1418
- The decorator will create an optional artifact, specified by `var`, which
1419
- contains the exception raised. You can use it to detect the presence
1420
- of errors, indicating that all happy-path artifacts produced by the step
1421
- are missing.
1371
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
1422
1372
 
1423
1373
  Parameters
1424
1374
  ----------
1425
- var : str, optional, default None
1426
- Name of the artifact in which to store the caught exception.
1427
- If not specified, the exception is not stored.
1428
- print_exception : bool, default True
1429
- Determines whether or not the exception is printed to
1430
- stdout when caught.
1431
- """
1432
- ...
1433
-
1434
- @typing.overload
1435
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1436
- ...
1437
-
1438
- @typing.overload
1439
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1440
- ...
1441
-
1442
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
1443
- """
1444
- Specifies that the step will success under all circumstances.
1445
-
1446
- The decorator will create an optional artifact, specified by `var`, which
1447
- contains the exception raised. You can use it to detect the presence
1448
- of errors, indicating that all happy-path artifacts produced by the step
1449
- are missing.
1375
+ cpu : int, default 1
1376
+ Number of CPUs required for this step. If `@resources` is
1377
+ also present, the maximum value from all decorators is used.
1378
+ gpu : int, default 0
1379
+ Number of GPUs required for this step. If `@resources` is
1380
+ also present, the maximum value from all decorators is used.
1381
+ memory : int, default 4096
1382
+ Memory size (in MB) required for this step. If
1383
+ `@resources` is also present, the maximum value from all decorators is
1384
+ used.
1385
+ image : str, optional, default None
1386
+ Docker image to use when launching on AWS Batch. If not specified, and
1387
+ METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
1388
+ not, a default Docker image mapping to the current version of Python is used.
1389
+ queue : str, default METAFLOW_BATCH_JOB_QUEUE
1390
+ AWS Batch Job Queue to submit the job to.
1391
+ iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
1392
+ AWS IAM role that AWS Batch container uses to access AWS cloud resources.
1393
+ execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
1394
+ AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
1395
+ (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
1396
+ shared_memory : int, optional, default None
1397
+ The value for the size (in MiB) of the /dev/shm volume for this step.
1398
+ This parameter maps to the `--shm-size` option in Docker.
1399
+ max_swap : int, optional, default None
1400
+ The total amount of swap memory (in MiB) a container can use for this
1401
+ step. This parameter is translated to the `--memory-swap` option in
1402
+ Docker where the value is the sum of the container memory plus the
1403
+ `max_swap` value.
1404
+ swappiness : int, optional, default None
1405
+ This allows you to tune memory swappiness behavior for this step.
1406
+ A swappiness value of 0 causes swapping not to happen unless absolutely
1407
+ necessary. A swappiness value of 100 causes pages to be swapped very
1408
+ aggressively. Accepted values are whole numbers between 0 and 100.
1409
+ use_tmpfs : bool, default False
1410
+ This enables an explicit tmpfs mount for this step. Note that tmpfs is
1411
+ not available on Fargate compute environments
1412
+ tmpfs_tempdir : bool, default True
1413
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1414
+ tmpfs_size : int, optional, default None
1415
+ The value for the size (in MiB) of the tmpfs mount for this step.
1416
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1417
+ memory allocated for this step.
1418
+ tmpfs_path : str, optional, default None
1419
+ Path to tmpfs mount for this step. Defaults to /metaflow_temp.
1420
+ inferentia : int, default 0
1421
+ Number of Inferentia chips required for this step.
1422
+ trainium : int, default None
1423
+ Alias for inferentia. Use only one of the two.
1424
+ efa : int, default 0
1425
+ Number of elastic fabric adapter network devices to attach to container
1426
+ ephemeral_storage : int, default None
1427
+ The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
1428
+ This is only relevant for Fargate compute environments
1429
+ log_driver: str, optional, default None
1430
+ The log driver to use for the Amazon ECS container.
1431
+ log_options: List[str], optional, default None
1432
+ List of strings containing options for the chosen log driver. The configurable values
1433
+ depend on the `log driver` chosen. Validation of these options is not supported yet.
1434
+ Example: [`awslogs-group:aws/batch/job`]
1435
+ """
1436
+ ...
1437
+
1438
+ @typing.overload
1439
+ def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1440
+ ...
1441
+
1442
+ @typing.overload
1443
+ def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1444
+ ...
1445
+
1446
+ def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
1447
+ """
1448
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
1450
1449
 
1451
1450
  Parameters
1452
1451
  ----------
1453
- var : str, optional, default None
1454
- Name of the artifact in which to store the caught exception.
1455
- If not specified, the exception is not stored.
1456
- print_exception : bool, default True
1457
- Determines whether or not the exception is printed to
1458
- stdout when caught.
1452
+ cpu : int, default 1
1453
+ Number of CPUs required for this step. If `@resources` is
1454
+ also present, the maximum value from all decorators is used.
1455
+ gpu : int, default 0
1456
+ Number of GPUs required for this step. If `@resources` is
1457
+ also present, the maximum value from all decorators is used.
1458
+ memory : int, default 4096
1459
+ Memory size (in MB) required for this step. If
1460
+ `@resources` is also present, the maximum value from all decorators is
1461
+ used.
1462
+ image : str, optional, default None
1463
+ Docker image to use when launching on AWS Batch. If not specified, and
1464
+ METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
1465
+ not, a default Docker image mapping to the current version of Python is used.
1466
+ queue : str, default METAFLOW_BATCH_JOB_QUEUE
1467
+ AWS Batch Job Queue to submit the job to.
1468
+ iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
1469
+ AWS IAM role that AWS Batch container uses to access AWS cloud resources.
1470
+ execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
1471
+ AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
1472
+ (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
1473
+ shared_memory : int, optional, default None
1474
+ The value for the size (in MiB) of the /dev/shm volume for this step.
1475
+ This parameter maps to the `--shm-size` option in Docker.
1476
+ max_swap : int, optional, default None
1477
+ The total amount of swap memory (in MiB) a container can use for this
1478
+ step. This parameter is translated to the `--memory-swap` option in
1479
+ Docker where the value is the sum of the container memory plus the
1480
+ `max_swap` value.
1481
+ swappiness : int, optional, default None
1482
+ This allows you to tune memory swappiness behavior for this step.
1483
+ A swappiness value of 0 causes swapping not to happen unless absolutely
1484
+ necessary. A swappiness value of 100 causes pages to be swapped very
1485
+ aggressively. Accepted values are whole numbers between 0 and 100.
1486
+ use_tmpfs : bool, default False
1487
+ This enables an explicit tmpfs mount for this step. Note that tmpfs is
1488
+ not available on Fargate compute environments
1489
+ tmpfs_tempdir : bool, default True
1490
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1491
+ tmpfs_size : int, optional, default None
1492
+ The value for the size (in MiB) of the tmpfs mount for this step.
1493
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1494
+ memory allocated for this step.
1495
+ tmpfs_path : str, optional, default None
1496
+ Path to tmpfs mount for this step. Defaults to /metaflow_temp.
1497
+ inferentia : int, default 0
1498
+ Number of Inferentia chips required for this step.
1499
+ trainium : int, default None
1500
+ Alias for inferentia. Use only one of the two.
1501
+ efa : int, default 0
1502
+ Number of elastic fabric adapter network devices to attach to container
1503
+ ephemeral_storage : int, default None
1504
+ The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
1505
+ This is only relevant for Fargate compute environments
1506
+ log_driver: str, optional, default None
1507
+ The log driver to use for the Amazon ECS container.
1508
+ log_options: List[str], optional, default None
1509
+ List of strings containing options for the chosen log driver. The configurable values
1510
+ depend on the `log driver` chosen. Validation of these options is not supported yet.
1511
+ Example: [`awslogs-group:aws/batch/job`]
1459
1512
  """
1460
1513
  ...
1461
1514
 
1462
1515
  @typing.overload
1463
- def card(*, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1516
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1464
1517
  """
1465
- Creates a human-readable report, a Metaflow Card, after this step completes.
1466
-
1467
- Note that you may add multiple `@card` decorators in a step with different parameters.
1518
+ Specifies environment variables to be set prior to the execution of a step.
1468
1519
 
1469
1520
  Parameters
1470
1521
  ----------
1471
- type : str, default 'default'
1472
- Card type.
1473
- id : str, optional, default None
1474
- If multiple cards are present, use this id to identify this card.
1475
- options : Dict[str, Any], default {}
1476
- Options passed to the card. The contents depend on the card type.
1477
- timeout : int, default 45
1478
- Interrupt reporting if it takes more than this many seconds.
1479
-
1480
-
1522
+ vars : Dict[str, str], default {}
1523
+ Dictionary of environment variables to set.
1481
1524
  """
1482
1525
  ...
1483
1526
 
1484
1527
  @typing.overload
1485
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1528
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1486
1529
  ...
1487
1530
 
1488
1531
  @typing.overload
1489
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1532
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1490
1533
  ...
1491
1534
 
1492
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
1535
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
1493
1536
  """
1494
- Creates a human-readable report, a Metaflow Card, after this step completes.
1495
-
1496
- Note that you may add multiple `@card` decorators in a step with different parameters.
1537
+ Specifies environment variables to be set prior to the execution of a step.
1497
1538
 
1498
1539
  Parameters
1499
1540
  ----------
1500
- type : str, default 'default'
1501
- Card type.
1502
- id : str, optional, default None
1503
- If multiple cards are present, use this id to identify this card.
1504
- options : Dict[str, Any], default {}
1505
- Options passed to the card. The contents depend on the card type.
1506
- timeout : int, default 45
1507
- Interrupt reporting if it takes more than this many seconds.
1508
-
1509
-
1541
+ vars : Dict[str, str], default {}
1542
+ Dictionary of environment variables to set.
1510
1543
  """
1511
1544
  ...
1512
1545
 
1513
1546
  @typing.overload
1514
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1547
+ def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1515
1548
  """
1516
- Specifies secrets to be retrieved and injected as environment variables prior to
1517
- the execution of a step.
1549
+ Specifies the flow(s) that this flow depends on.
1550
+
1551
+ ```
1552
+ @trigger_on_finish(flow='FooFlow')
1553
+ ```
1554
+ or
1555
+ ```
1556
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1557
+ ```
1558
+ This decorator respects the @project decorator and triggers the flow
1559
+ when upstream runs within the same namespace complete successfully
1560
+
1561
+ Additionally, you can specify project aware upstream flow dependencies
1562
+ by specifying the fully qualified project_flow_name.
1563
+ ```
1564
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1565
+ ```
1566
+ or
1567
+ ```
1568
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1569
+ ```
1570
+
1571
+ You can also specify just the project or project branch (other values will be
1572
+ inferred from the current project or project branch):
1573
+ ```
1574
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1575
+ ```
1576
+
1577
+ Note that `branch` is typically one of:
1578
+ - `prod`
1579
+ - `user.bob`
1580
+ - `test.my_experiment`
1581
+ - `prod.staging`
1518
1582
 
1519
1583
  Parameters
1520
1584
  ----------
1521
- sources : List[Union[str, Dict[str, Any]]], default: []
1522
- List of secret specs, defining how the secrets are to be retrieved
1585
+ flow : Union[str, Dict[str, str]], optional, default None
1586
+ Upstream flow dependency for this flow.
1587
+ flows : List[Union[str, Dict[str, str]]], default []
1588
+ Upstream flow dependencies for this flow.
1589
+ options : Dict[str, Any], default {}
1590
+ Backend-specific configuration for tuning eventing behavior.
1591
+
1592
+
1523
1593
  """
1524
1594
  ...
1525
1595
 
1526
1596
  @typing.overload
1527
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1528
- ...
1529
-
1530
- @typing.overload
1531
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1597
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1532
1598
  ...
1533
1599
 
1534
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
1600
+ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1535
1601
  """
1536
- Specifies secrets to be retrieved and injected as environment variables prior to
1537
- the execution of a step.
1602
+ Specifies the flow(s) that this flow depends on.
1603
+
1604
+ ```
1605
+ @trigger_on_finish(flow='FooFlow')
1606
+ ```
1607
+ or
1608
+ ```
1609
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1610
+ ```
1611
+ This decorator respects the @project decorator and triggers the flow
1612
+ when upstream runs within the same namespace complete successfully
1613
+
1614
+ Additionally, you can specify project aware upstream flow dependencies
1615
+ by specifying the fully qualified project_flow_name.
1616
+ ```
1617
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1618
+ ```
1619
+ or
1620
+ ```
1621
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1622
+ ```
1623
+
1624
+ You can also specify just the project or project branch (other values will be
1625
+ inferred from the current project or project branch):
1626
+ ```
1627
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1628
+ ```
1629
+
1630
+ Note that `branch` is typically one of:
1631
+ - `prod`
1632
+ - `user.bob`
1633
+ - `test.my_experiment`
1634
+ - `prod.staging`
1538
1635
 
1539
1636
  Parameters
1540
1637
  ----------
1541
- sources : List[Union[str, Dict[str, Any]]], default: []
1542
- List of secret specs, defining how the secrets are to be retrieved
1638
+ flow : Union[str, Dict[str, str]], optional, default None
1639
+ Upstream flow dependency for this flow.
1640
+ flows : List[Union[str, Dict[str, str]]], default []
1641
+ Upstream flow dependencies for this flow.
1642
+ options : Dict[str, Any], default {}
1643
+ Backend-specific configuration for tuning eventing behavior.
1644
+
1645
+
1543
1646
  """
1544
1647
  ...
1545
1648
 
@@ -1565,20 +1668,111 @@ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[s
1565
1668
  def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1566
1669
  ...
1567
1670
 
1568
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1671
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1672
+ """
1673
+ Specifies the PyPI packages for all steps of the flow.
1674
+
1675
+ Use `@pypi_base` to set common packages required by all
1676
+ steps and use `@pypi` to specify step-specific overrides.
1677
+ Parameters
1678
+ ----------
1679
+ packages : Dict[str, str], default: {}
1680
+ Packages to use for this flow. The key is the name of the package
1681
+ and the value is the version to use.
1682
+ python : str, optional, default: None
1683
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1684
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1685
+ """
1686
+ ...
1687
+
1688
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1689
+ """
1690
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1691
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1692
+
1693
+ Parameters
1694
+ ----------
1695
+ timeout : int
1696
+ Time, in seconds before the task times out and fails. (Default: 3600)
1697
+ poke_interval : int
1698
+ Time in seconds that the job should wait in between each try. (Default: 60)
1699
+ mode : str
1700
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1701
+ exponential_backoff : bool
1702
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1703
+ pool : str
1704
+ the slot pool this task should run in,
1705
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1706
+ soft_fail : bool
1707
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1708
+ name : str
1709
+ Name of the sensor on Airflow
1710
+ description : str
1711
+ Description of sensor in the Airflow UI
1712
+ external_dag_id : str
1713
+ The dag_id that contains the task you want to wait for.
1714
+ external_task_ids : List[str]
1715
+ The list of task_ids that you want to wait for.
1716
+ If None (default value) the sensor waits for the DAG. (Default: None)
1717
+ allowed_states : List[str]
1718
+ Iterable of allowed states, (Default: ['success'])
1719
+ failed_states : List[str]
1720
+ Iterable of failed or dis-allowed states. (Default: None)
1721
+ execution_delta : datetime.timedelta
1722
+ time difference with the previous execution to look at,
1723
+ the default is the same logical date as the current task or DAG. (Default: None)
1724
+ check_existence: bool
1725
+ Set to True to check if the external task exists or check if
1726
+ the DAG to wait for exists. (Default: True)
1727
+ """
1728
+ ...
1729
+
1730
+ @typing.overload
1731
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1732
+ """
1733
+ Specifies the times when the flow should be run when running on a
1734
+ production scheduler.
1735
+
1736
+ Parameters
1737
+ ----------
1738
+ hourly : bool, default False
1739
+ Run the workflow hourly.
1740
+ daily : bool, default True
1741
+ Run the workflow daily.
1742
+ weekly : bool, default False
1743
+ Run the workflow weekly.
1744
+ cron : str, optional, default None
1745
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1746
+ specified by this expression.
1747
+ timezone : str, optional, default None
1748
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1749
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1750
+ """
1751
+ ...
1752
+
1753
+ @typing.overload
1754
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1755
+ ...
1756
+
1757
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1569
1758
  """
1570
- Specifies the PyPI packages for all steps of the flow.
1759
+ Specifies the times when the flow should be run when running on a
1760
+ production scheduler.
1571
1761
 
1572
- Use `@pypi_base` to set common packages required by all
1573
- steps and use `@pypi` to specify step-specific overrides.
1574
1762
  Parameters
1575
1763
  ----------
1576
- packages : Dict[str, str], default: {}
1577
- Packages to use for this flow. The key is the name of the package
1578
- and the value is the version to use.
1579
- python : str, optional, default: None
1580
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1581
- that the version used will correspond to the version of the Python interpreter used to start the run.
1764
+ hourly : bool, default False
1765
+ Run the workflow hourly.
1766
+ daily : bool, default True
1767
+ Run the workflow daily.
1768
+ weekly : bool, default False
1769
+ Run the workflow weekly.
1770
+ cron : str, optional, default None
1771
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1772
+ specified by this expression.
1773
+ timezone : str, optional, default None
1774
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1775
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1582
1776
  """
1583
1777
  ...
1584
1778
 
@@ -1677,48 +1871,6 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
1677
1871
  """
1678
1872
  ...
1679
1873
 
1680
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1681
- """
1682
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1683
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1684
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1685
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
1686
- starts only after all sensors finish.
1687
-
1688
- Parameters
1689
- ----------
1690
- timeout : int
1691
- Time, in seconds before the task times out and fails. (Default: 3600)
1692
- poke_interval : int
1693
- Time in seconds that the job should wait in between each try. (Default: 60)
1694
- mode : str
1695
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1696
- exponential_backoff : bool
1697
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1698
- pool : str
1699
- the slot pool this task should run in,
1700
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1701
- soft_fail : bool
1702
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1703
- name : str
1704
- Name of the sensor on Airflow
1705
- description : str
1706
- Description of sensor in the Airflow UI
1707
- bucket_key : Union[str, List[str]]
1708
- The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1709
- When it's specified as a full s3:// url, please leave `bucket_name` as None
1710
- bucket_name : str
1711
- Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1712
- When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1713
- wildcard_match : bool
1714
- whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1715
- aws_conn_id : str
1716
- a reference to the s3 connection on Airflow. (Default: None)
1717
- verify : bool
1718
- Whether or not to verify SSL certificates for S3 connection. (Default: None)
1719
- """
1720
- ...
1721
-
1722
1874
  def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1723
1875
  """
1724
1876
  Specifies what flows belong to the same project.
@@ -1786,10 +1938,13 @@ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packa
1786
1938
  """
1787
1939
  ...
1788
1940
 
1789
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1941
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1790
1942
  """
1791
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1792
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1943
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1944
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1945
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1946
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
1947
+ starts only after all sensors finish.
1793
1948
 
1794
1949
  Parameters
1795
1950
  ----------
@@ -1810,173 +1965,18 @@ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str,
1810
1965
  Name of the sensor on Airflow
1811
1966
  description : str
1812
1967
  Description of sensor in the Airflow UI
1813
- external_dag_id : str
1814
- The dag_id that contains the task you want to wait for.
1815
- external_task_ids : List[str]
1816
- The list of task_ids that you want to wait for.
1817
- If None (default value) the sensor waits for the DAG. (Default: None)
1818
- allowed_states : List[str]
1819
- Iterable of allowed states, (Default: ['success'])
1820
- failed_states : List[str]
1821
- Iterable of failed or dis-allowed states. (Default: None)
1822
- execution_delta : datetime.timedelta
1823
- time difference with the previous execution to look at,
1824
- the default is the same logical date as the current task or DAG. (Default: None)
1825
- check_existence: bool
1826
- Set to True to check if the external task exists or check if
1827
- the DAG to wait for exists. (Default: True)
1828
- """
1829
- ...
1830
-
1831
- @typing.overload
1832
- def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1833
- """
1834
- Specifies the flow(s) that this flow depends on.
1835
-
1836
- ```
1837
- @trigger_on_finish(flow='FooFlow')
1838
- ```
1839
- or
1840
- ```
1841
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1842
- ```
1843
- This decorator respects the @project decorator and triggers the flow
1844
- when upstream runs within the same namespace complete successfully
1845
-
1846
- Additionally, you can specify project aware upstream flow dependencies
1847
- by specifying the fully qualified project_flow_name.
1848
- ```
1849
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1850
- ```
1851
- or
1852
- ```
1853
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1854
- ```
1855
-
1856
- You can also specify just the project or project branch (other values will be
1857
- inferred from the current project or project branch):
1858
- ```
1859
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1860
- ```
1861
-
1862
- Note that `branch` is typically one of:
1863
- - `prod`
1864
- - `user.bob`
1865
- - `test.my_experiment`
1866
- - `prod.staging`
1867
-
1868
- Parameters
1869
- ----------
1870
- flow : Union[str, Dict[str, str]], optional, default None
1871
- Upstream flow dependency for this flow.
1872
- flows : List[Union[str, Dict[str, str]]], default []
1873
- Upstream flow dependencies for this flow.
1874
- options : Dict[str, Any], default {}
1875
- Backend-specific configuration for tuning eventing behavior.
1876
-
1877
-
1878
- """
1879
- ...
1880
-
1881
- @typing.overload
1882
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1883
- ...
1884
-
1885
- def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1886
- """
1887
- Specifies the flow(s) that this flow depends on.
1888
-
1889
- ```
1890
- @trigger_on_finish(flow='FooFlow')
1891
- ```
1892
- or
1893
- ```
1894
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1895
- ```
1896
- This decorator respects the @project decorator and triggers the flow
1897
- when upstream runs within the same namespace complete successfully
1898
-
1899
- Additionally, you can specify project aware upstream flow dependencies
1900
- by specifying the fully qualified project_flow_name.
1901
- ```
1902
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1903
- ```
1904
- or
1905
- ```
1906
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1907
- ```
1908
-
1909
- You can also specify just the project or project branch (other values will be
1910
- inferred from the current project or project branch):
1911
- ```
1912
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1913
- ```
1914
-
1915
- Note that `branch` is typically one of:
1916
- - `prod`
1917
- - `user.bob`
1918
- - `test.my_experiment`
1919
- - `prod.staging`
1920
-
1921
- Parameters
1922
- ----------
1923
- flow : Union[str, Dict[str, str]], optional, default None
1924
- Upstream flow dependency for this flow.
1925
- flows : List[Union[str, Dict[str, str]]], default []
1926
- Upstream flow dependencies for this flow.
1927
- options : Dict[str, Any], default {}
1928
- Backend-specific configuration for tuning eventing behavior.
1929
-
1930
-
1931
- """
1932
- ...
1933
-
1934
- @typing.overload
1935
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1936
- """
1937
- Specifies the times when the flow should be run when running on a
1938
- production scheduler.
1939
-
1940
- Parameters
1941
- ----------
1942
- hourly : bool, default False
1943
- Run the workflow hourly.
1944
- daily : bool, default True
1945
- Run the workflow daily.
1946
- weekly : bool, default False
1947
- Run the workflow weekly.
1948
- cron : str, optional, default None
1949
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1950
- specified by this expression.
1951
- timezone : str, optional, default None
1952
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1953
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1954
- """
1955
- ...
1956
-
1957
- @typing.overload
1958
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1959
- ...
1960
-
1961
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1962
- """
1963
- Specifies the times when the flow should be run when running on a
1964
- production scheduler.
1965
-
1966
- Parameters
1967
- ----------
1968
- hourly : bool, default False
1969
- Run the workflow hourly.
1970
- daily : bool, default True
1971
- Run the workflow daily.
1972
- weekly : bool, default False
1973
- Run the workflow weekly.
1974
- cron : str, optional, default None
1975
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1976
- specified by this expression.
1977
- timezone : str, optional, default None
1978
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1979
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1968
+ bucket_key : Union[str, List[str]]
1969
+ The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1970
+ When it's specified as a full s3:// url, please leave `bucket_name` as None
1971
+ bucket_name : str
1972
+ Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1973
+ When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1974
+ wildcard_match : bool
1975
+ whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1976
+ aws_conn_id : str
1977
+ a reference to the s3 connection on Airflow. (Default: None)
1978
+ verify : bool
1979
+ Whether or not to verify SSL certificates for S3 connection. (Default: None)
1980
1980
  """
1981
1981
  ...
1982
1982