metaflow-stubs 2.12.24__py2.py3-none-any.whl → 2.12.25__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (152) hide show
  1. metaflow-stubs/__init__.pyi +502 -502
  2. metaflow-stubs/cards.pyi +5 -5
  3. metaflow-stubs/cli.pyi +3 -3
  4. metaflow-stubs/client/__init__.pyi +4 -4
  5. metaflow-stubs/client/core.pyi +5 -5
  6. metaflow-stubs/client/filecache.pyi +3 -3
  7. metaflow-stubs/clone_util.pyi +2 -2
  8. metaflow-stubs/events.pyi +3 -3
  9. metaflow-stubs/exception.pyi +2 -2
  10. metaflow-stubs/flowspec.pyi +4 -4
  11. metaflow-stubs/generated_for.txt +1 -1
  12. metaflow-stubs/includefile.pyi +4 -4
  13. metaflow-stubs/info_file.pyi +2 -2
  14. metaflow-stubs/metadata/metadata.pyi +3 -3
  15. metaflow-stubs/metadata/util.pyi +2 -2
  16. metaflow-stubs/metaflow_config.pyi +2 -2
  17. metaflow-stubs/metaflow_current.pyi +22 -22
  18. metaflow-stubs/mflog/mflog.pyi +2 -2
  19. metaflow-stubs/multicore_utils.pyi +2 -2
  20. metaflow-stubs/parameters.pyi +3 -3
  21. metaflow-stubs/plugins/__init__.pyi +2 -2
  22. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  23. metaflow-stubs/plugins/airflow/airflow.pyi +4 -4
  24. metaflow-stubs/plugins/airflow/airflow_cli.pyi +4 -4
  25. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
  26. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  27. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  28. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  29. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +3 -3
  30. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
  31. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
  32. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  33. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  34. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  35. metaflow-stubs/plugins/argo/argo_workflows.pyi +4 -4
  36. metaflow-stubs/plugins/argo/argo_workflows_cli.pyi +4 -4
  37. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +5 -5
  38. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +3 -3
  39. metaflow-stubs/plugins/aws/__init__.pyi +2 -2
  40. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  41. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  42. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  43. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  44. metaflow-stubs/plugins/aws/batch/batch_cli.pyi +2 -2
  45. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  46. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +3 -3
  47. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  48. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +3 -3
  49. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  50. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
  51. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  52. metaflow-stubs/plugins/aws/step_functions/production_token.pyi +2 -2
  53. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  54. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  55. metaflow-stubs/plugins/aws/step_functions/step_functions_cli.pyi +2 -2
  56. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  57. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
  58. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +2 -2
  59. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  60. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  61. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  62. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +3 -3
  63. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  64. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  65. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  66. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  67. metaflow-stubs/plugins/cards/card_cli.pyi +4 -4
  68. metaflow-stubs/plugins/cards/card_client.pyi +3 -3
  69. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  70. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  71. metaflow-stubs/plugins/cards/card_decorator.pyi +3 -3
  72. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  73. metaflow-stubs/plugins/cards/card_modules/basic.pyi +3 -3
  74. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  75. metaflow-stubs/plugins/cards/card_modules/chevron/__init__.pyi +2 -2
  76. metaflow-stubs/plugins/cards/card_modules/chevron/main.pyi +2 -2
  77. metaflow-stubs/plugins/cards/card_modules/chevron/metadata.pyi +2 -2
  78. metaflow-stubs/plugins/cards/card_modules/chevron/renderer.pyi +2 -2
  79. metaflow-stubs/plugins/cards/card_modules/chevron/tokenizer.pyi +2 -2
  80. metaflow-stubs/plugins/cards/card_modules/components.pyi +2 -2
  81. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  82. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  83. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  84. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  85. metaflow-stubs/plugins/cards/component_serializer.pyi +3 -3
  86. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  87. metaflow-stubs/plugins/catch_decorator.pyi +4 -4
  88. metaflow-stubs/plugins/datatools/__init__.pyi +4 -4
  89. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  90. metaflow-stubs/plugins/datatools/s3/__init__.pyi +4 -4
  91. metaflow-stubs/plugins/datatools/s3/s3.pyi +4 -4
  92. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  93. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  94. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  95. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  96. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  97. metaflow-stubs/plugins/events_decorator.pyi +3 -3
  98. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  99. metaflow-stubs/plugins/frameworks/pytorch.pyi +3 -3
  100. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  101. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +3 -3
  102. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  103. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  104. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  105. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  106. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  107. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +2 -2
  108. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +3 -3
  109. metaflow-stubs/plugins/kubernetes/kubernetes_cli.pyi +3 -3
  110. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  111. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +3 -3
  112. metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +2 -2
  113. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
  114. metaflow-stubs/plugins/logs_cli.pyi +4 -4
  115. metaflow-stubs/plugins/package_cli.pyi +2 -2
  116. metaflow-stubs/plugins/parallel_decorator.pyi +3 -3
  117. metaflow-stubs/plugins/project_decorator.pyi +3 -3
  118. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  119. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  120. metaflow-stubs/plugins/pypi/conda_environment.pyi +4 -4
  121. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  122. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  123. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  124. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  125. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  126. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  127. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +3 -3
  128. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  129. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  130. metaflow-stubs/plugins/tag_cli.pyi +3 -3
  131. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +2 -2
  132. metaflow-stubs/plugins/timeout_decorator.pyi +3 -3
  133. metaflow-stubs/procpoll.pyi +2 -2
  134. metaflow-stubs/pylint_wrapper.pyi +2 -2
  135. metaflow-stubs/runner/__init__.pyi +2 -2
  136. metaflow-stubs/runner/deployer.pyi +3 -3
  137. metaflow-stubs/runner/metaflow_runner.pyi +2 -2
  138. metaflow-stubs/runner/nbdeploy.pyi +2 -2
  139. metaflow-stubs/runner/nbrun.pyi +2 -2
  140. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  141. metaflow-stubs/runner/utils.pyi +2 -2
  142. metaflow-stubs/system/__init__.pyi +3 -3
  143. metaflow-stubs/system/system_logger.pyi +2 -2
  144. metaflow-stubs/system/system_monitor.pyi +3 -3
  145. metaflow-stubs/tagging_util.pyi +2 -2
  146. metaflow-stubs/tuple_util.pyi +2 -2
  147. metaflow-stubs/version.pyi +2 -2
  148. {metaflow_stubs-2.12.24.dist-info → metaflow_stubs-2.12.25.dist-info}/METADATA +2 -2
  149. metaflow_stubs-2.12.25.dist-info/RECORD +152 -0
  150. metaflow_stubs-2.12.24.dist-info/RECORD +0 -152
  151. {metaflow_stubs-2.12.24.dist-info → metaflow_stubs-2.12.25.dist-info}/WHEEL +0 -0
  152. {metaflow_stubs-2.12.24.dist-info → metaflow_stubs-2.12.25.dist-info}/top_level.txt +0 -0
@@ -1,24 +1,24 @@
1
1
  ##################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.12.24 #
4
- # Generated on 2024-10-04T11:37:46.949506 #
3
+ # MF version: 2.12.25 #
4
+ # Generated on 2024-10-07T19:08:03.779487 #
5
5
  ##################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
+ import typing
11
12
  import metaflow.metaflow_current
12
- import metaflow._vendor.click.types
13
- import metaflow.datastore.inputs
13
+ import io
14
14
  import metaflow.client.core
15
- import typing
16
- import metaflow.events
17
- import metaflow.parameters
15
+ import metaflow.datastore.inputs
16
+ import metaflow._vendor.click.types
18
17
  import metaflow.flowspec
19
18
  import metaflow.plugins.datatools.s3.s3
20
- import io
21
19
  import datetime
20
+ import metaflow.parameters
21
+ import metaflow.events
22
22
  import metaflow.runner.metaflow_runner
23
23
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
24
24
  StepFlag = typing.NewType("StepFlag", bool)
@@ -855,79 +855,51 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
855
855
  ...
856
856
 
857
857
  @typing.overload
858
- def resources(*, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
858
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
859
859
  """
860
- Specifies the resources needed when executing this step.
861
-
862
- Use `@resources` to specify the resource requirements
863
- independently of the specific compute layer (`@batch`, `@kubernetes`).
860
+ Specifies that the step will success under all circumstances.
864
861
 
865
- You can choose the compute layer on the command line by executing e.g.
866
- ```
867
- python myflow.py run --with batch
868
- ```
869
- or
870
- ```
871
- python myflow.py run --with kubernetes
872
- ```
873
- which executes the flow on the desired system using the
874
- requirements specified in `@resources`.
862
+ The decorator will create an optional artifact, specified by `var`, which
863
+ contains the exception raised. You can use it to detect the presence
864
+ of errors, indicating that all happy-path artifacts produced by the step
865
+ are missing.
875
866
 
876
867
  Parameters
877
868
  ----------
878
- cpu : int, default 1
879
- Number of CPUs required for this step.
880
- gpu : int, default 0
881
- Number of GPUs required for this step.
882
- disk : int, optional, default None
883
- Disk size (in MB) required for this step. Only applies on Kubernetes.
884
- memory : int, default 4096
885
- Memory size (in MB) required for this step.
886
- shared_memory : int, optional, default None
887
- The value for the size (in MiB) of the /dev/shm volume for this step.
888
- This parameter maps to the `--shm-size` option in Docker.
869
+ var : str, optional, default None
870
+ Name of the artifact in which to store the caught exception.
871
+ If not specified, the exception is not stored.
872
+ print_exception : bool, default True
873
+ Determines whether or not the exception is printed to
874
+ stdout when caught.
889
875
  """
890
876
  ...
891
877
 
892
878
  @typing.overload
893
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
879
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
894
880
  ...
895
881
 
896
882
  @typing.overload
897
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
883
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
898
884
  ...
899
885
 
900
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
886
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
901
887
  """
902
- Specifies the resources needed when executing this step.
903
-
904
- Use `@resources` to specify the resource requirements
905
- independently of the specific compute layer (`@batch`, `@kubernetes`).
888
+ Specifies that the step will success under all circumstances.
906
889
 
907
- You can choose the compute layer on the command line by executing e.g.
908
- ```
909
- python myflow.py run --with batch
910
- ```
911
- or
912
- ```
913
- python myflow.py run --with kubernetes
914
- ```
915
- which executes the flow on the desired system using the
916
- requirements specified in `@resources`.
890
+ The decorator will create an optional artifact, specified by `var`, which
891
+ contains the exception raised. You can use it to detect the presence
892
+ of errors, indicating that all happy-path artifacts produced by the step
893
+ are missing.
917
894
 
918
895
  Parameters
919
896
  ----------
920
- cpu : int, default 1
921
- Number of CPUs required for this step.
922
- gpu : int, default 0
923
- Number of GPUs required for this step.
924
- disk : int, optional, default None
925
- Disk size (in MB) required for this step. Only applies on Kubernetes.
926
- memory : int, default 4096
927
- Memory size (in MB) required for this step.
928
- shared_memory : int, optional, default None
929
- The value for the size (in MiB) of the /dev/shm volume for this step.
930
- This parameter maps to the `--shm-size` option in Docker.
897
+ var : str, optional, default None
898
+ Name of the artifact in which to store the caught exception.
899
+ If not specified, the exception is not stored.
900
+ print_exception : bool, default True
901
+ Determines whether or not the exception is printed to
902
+ stdout when caught.
931
903
  """
932
904
  ...
933
905
 
@@ -988,102 +960,236 @@ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
988
960
  """
989
961
  ...
990
962
 
991
- @typing.overload
992
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
963
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = "KUBERNETES_IMAGE_PULL_POLICY", service_account: str = "METAFLOW_KUBERNETES_SERVICE_ACCOUNT", secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = "METAFLOW_KUBERNETES_NAMESPACE", gpu: typing.Optional[int] = None, gpu_vendor: str = "KUBERNETES_GPU_VENDOR", tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = "/metaflow_temp", persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
993
964
  """
994
- Specifies environment variables to be set prior to the execution of a step.
965
+ Specifies that this step should execute on Kubernetes.
995
966
 
996
967
  Parameters
997
968
  ----------
998
- vars : Dict[str, str], default {}
999
- Dictionary of environment variables to set.
969
+ cpu : int, default 1
970
+ Number of CPUs required for this step. If `@resources` is
971
+ also present, the maximum value from all decorators is used.
972
+ memory : int, default 4096
973
+ Memory size (in MB) required for this step. If
974
+ `@resources` is also present, the maximum value from all decorators is
975
+ used.
976
+ disk : int, default 10240
977
+ Disk size (in MB) required for this step. If
978
+ `@resources` is also present, the maximum value from all decorators is
979
+ used.
980
+ image : str, optional, default None
981
+ Docker image to use when launching on Kubernetes. If not specified, and
982
+ METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
983
+ not, a default Docker image mapping to the current version of Python is used.
984
+ image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
985
+ If given, the imagePullPolicy to be applied to the Docker image of the step.
986
+ service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
987
+ Kubernetes service account to use when launching pod in Kubernetes.
988
+ secrets : List[str], optional, default None
989
+ Kubernetes secrets to use when launching pod in Kubernetes. These
990
+ secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
991
+ in Metaflow configuration.
992
+ node_selector: Union[Dict[str,str], str], optional, default None
993
+ Kubernetes node selector(s) to apply to the pod running the task.
994
+ Can be passed in as a comma separated string of values e.g. "kubernetes.io/os=linux,kubernetes.io/arch=amd64"
995
+ or as a dictionary {"kubernetes.io/os": "linux", "kubernetes.io/arch": "amd64"}
996
+ namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
997
+ Kubernetes namespace to use when launching pod in Kubernetes.
998
+ gpu : int, optional, default None
999
+ Number of GPUs required for this step. A value of zero implies that
1000
+ the scheduled node should not have GPUs.
1001
+ gpu_vendor : str, default KUBERNETES_GPU_VENDOR
1002
+ The vendor of the GPUs to be used for this step.
1003
+ tolerations : List[str], default []
1004
+ The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
1005
+ Kubernetes tolerations to use when launching pod in Kubernetes.
1006
+ use_tmpfs : bool, default False
1007
+ This enables an explicit tmpfs mount for this step.
1008
+ tmpfs_tempdir : bool, default True
1009
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1010
+ tmpfs_size : int, optional, default: None
1011
+ The value for the size (in MiB) of the tmpfs mount for this step.
1012
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1013
+ memory allocated for this step.
1014
+ tmpfs_path : str, optional, default /metaflow_temp
1015
+ Path to tmpfs mount for this step.
1016
+ persistent_volume_claims : Dict[str, str], optional, default None
1017
+ A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
1018
+ volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
1019
+ shared_memory: int, optional
1020
+ Shared memory size (in MiB) required for this step
1021
+ port: int, optional
1022
+ Port number to specify in the Kubernetes job object
1023
+ compute_pool : str, optional, default None
1024
+ Compute pool to be used for for this step.
1025
+ If not specified, any accessible compute pool within the perimeter is used.
1000
1026
  """
1001
1027
  ...
1002
1028
 
1003
1029
  @typing.overload
1004
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1005
- ...
1006
-
1007
- @typing.overload
1008
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1009
- ...
1010
-
1011
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
1030
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1012
1031
  """
1013
- Specifies environment variables to be set prior to the execution of a step.
1032
+ Specifies the Conda environment for the step.
1033
+
1034
+ Information in this decorator will augment any
1035
+ attributes set in the `@conda_base` flow-level decorator. Hence,
1036
+ you can use `@conda_base` to set packages required by all
1037
+ steps and use `@conda` to specify step-specific overrides.
1014
1038
 
1015
1039
  Parameters
1016
1040
  ----------
1017
- vars : Dict[str, str], default {}
1018
- Dictionary of environment variables to set.
1041
+ packages : Dict[str, str], default {}
1042
+ Packages to use for this step. The key is the name of the package
1043
+ and the value is the version to use.
1044
+ libraries : Dict[str, str], default {}
1045
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1046
+ python : str, optional, default None
1047
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1048
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1049
+ disabled : bool, default False
1050
+ If set to True, disables @conda.
1019
1051
  """
1020
1052
  ...
1021
1053
 
1022
1054
  @typing.overload
1023
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1024
- """
1025
- Decorator prototype for all step decorators. This function gets specialized
1026
- and imported for all decorators types by _import_plugin_decorators().
1027
- """
1055
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1028
1056
  ...
1029
1057
 
1030
1058
  @typing.overload
1031
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1059
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1032
1060
  ...
1033
1061
 
1034
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1062
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1035
1063
  """
1036
- Decorator prototype for all step decorators. This function gets specialized
1037
- and imported for all decorators types by _import_plugin_decorators().
1064
+ Specifies the Conda environment for the step.
1065
+
1066
+ Information in this decorator will augment any
1067
+ attributes set in the `@conda_base` flow-level decorator. Hence,
1068
+ you can use `@conda_base` to set packages required by all
1069
+ steps and use `@conda` to specify step-specific overrides.
1070
+
1071
+ Parameters
1072
+ ----------
1073
+ packages : Dict[str, str], default {}
1074
+ Packages to use for this step. The key is the name of the package
1075
+ and the value is the version to use.
1076
+ libraries : Dict[str, str], default {}
1077
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1078
+ python : str, optional, default None
1079
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1080
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1081
+ disabled : bool, default False
1082
+ If set to True, disables @conda.
1038
1083
  """
1039
1084
  ...
1040
1085
 
1041
1086
  @typing.overload
1042
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1087
+ def resources(*, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1043
1088
  """
1044
- Specifies that the step will success under all circumstances.
1089
+ Specifies the resources needed when executing this step.
1045
1090
 
1046
- The decorator will create an optional artifact, specified by `var`, which
1047
- contains the exception raised. You can use it to detect the presence
1048
- of errors, indicating that all happy-path artifacts produced by the step
1049
- are missing.
1091
+ Use `@resources` to specify the resource requirements
1092
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
1093
+
1094
+ You can choose the compute layer on the command line by executing e.g.
1095
+ ```
1096
+ python myflow.py run --with batch
1097
+ ```
1098
+ or
1099
+ ```
1100
+ python myflow.py run --with kubernetes
1101
+ ```
1102
+ which executes the flow on the desired system using the
1103
+ requirements specified in `@resources`.
1050
1104
 
1051
1105
  Parameters
1052
1106
  ----------
1053
- var : str, optional, default None
1054
- Name of the artifact in which to store the caught exception.
1055
- If not specified, the exception is not stored.
1056
- print_exception : bool, default True
1057
- Determines whether or not the exception is printed to
1058
- stdout when caught.
1107
+ cpu : int, default 1
1108
+ Number of CPUs required for this step.
1109
+ gpu : int, default 0
1110
+ Number of GPUs required for this step.
1111
+ disk : int, optional, default None
1112
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
1113
+ memory : int, default 4096
1114
+ Memory size (in MB) required for this step.
1115
+ shared_memory : int, optional, default None
1116
+ The value for the size (in MiB) of the /dev/shm volume for this step.
1117
+ This parameter maps to the `--shm-size` option in Docker.
1059
1118
  """
1060
1119
  ...
1061
1120
 
1062
1121
  @typing.overload
1063
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1122
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1064
1123
  ...
1065
1124
 
1066
1125
  @typing.overload
1067
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1126
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1068
1127
  ...
1069
1128
 
1070
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
1129
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
1071
1130
  """
1072
- Specifies that the step will success under all circumstances.
1131
+ Specifies the resources needed when executing this step.
1073
1132
 
1074
- The decorator will create an optional artifact, specified by `var`, which
1075
- contains the exception raised. You can use it to detect the presence
1076
- of errors, indicating that all happy-path artifacts produced by the step
1077
- are missing.
1133
+ Use `@resources` to specify the resource requirements
1134
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
1135
+
1136
+ You can choose the compute layer on the command line by executing e.g.
1137
+ ```
1138
+ python myflow.py run --with batch
1139
+ ```
1140
+ or
1141
+ ```
1142
+ python myflow.py run --with kubernetes
1143
+ ```
1144
+ which executes the flow on the desired system using the
1145
+ requirements specified in `@resources`.
1078
1146
 
1079
1147
  Parameters
1080
1148
  ----------
1081
- var : str, optional, default None
1082
- Name of the artifact in which to store the caught exception.
1083
- If not specified, the exception is not stored.
1084
- print_exception : bool, default True
1085
- Determines whether or not the exception is printed to
1086
- stdout when caught.
1149
+ cpu : int, default 1
1150
+ Number of CPUs required for this step.
1151
+ gpu : int, default 0
1152
+ Number of GPUs required for this step.
1153
+ disk : int, optional, default None
1154
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
1155
+ memory : int, default 4096
1156
+ Memory size (in MB) required for this step.
1157
+ shared_memory : int, optional, default None
1158
+ The value for the size (in MiB) of the /dev/shm volume for this step.
1159
+ This parameter maps to the `--shm-size` option in Docker.
1160
+ """
1161
+ ...
1162
+
1163
+ @typing.overload
1164
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1165
+ """
1166
+ Specifies secrets to be retrieved and injected as environment variables prior to
1167
+ the execution of a step.
1168
+
1169
+ Parameters
1170
+ ----------
1171
+ sources : List[Union[str, Dict[str, Any]]], default: []
1172
+ List of secret specs, defining how the secrets are to be retrieved
1173
+ """
1174
+ ...
1175
+
1176
+ @typing.overload
1177
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1178
+ ...
1179
+
1180
+ @typing.overload
1181
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1182
+ ...
1183
+
1184
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
1185
+ """
1186
+ Specifies secrets to be retrieved and injected as environment variables prior to
1187
+ the execution of a step.
1188
+
1189
+ Parameters
1190
+ ----------
1191
+ sources : List[Union[str, Dict[str, Any]]], default: []
1192
+ List of secret specs, defining how the secrets are to be retrieved
1087
1193
  """
1088
1194
  ...
1089
1195
 
@@ -1187,6 +1293,78 @@ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
1187
1293
  """
1188
1294
  ...
1189
1295
 
1296
+ @typing.overload
1297
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1298
+ """
1299
+ Decorator prototype for all step decorators. This function gets specialized
1300
+ and imported for all decorators types by _import_plugin_decorators().
1301
+ """
1302
+ ...
1303
+
1304
+ @typing.overload
1305
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1306
+ ...
1307
+
1308
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1309
+ """
1310
+ Decorator prototype for all step decorators. This function gets specialized
1311
+ and imported for all decorators types by _import_plugin_decorators().
1312
+ """
1313
+ ...
1314
+
1315
+ @typing.overload
1316
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1317
+ """
1318
+ Specifies the number of times the task corresponding
1319
+ to a step needs to be retried.
1320
+
1321
+ This decorator is useful for handling transient errors, such as networking issues.
1322
+ If your task contains operations that can't be retried safely, e.g. database updates,
1323
+ it is advisable to annotate it with `@retry(times=0)`.
1324
+
1325
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
1326
+ decorator will execute a no-op task after all retries have been exhausted,
1327
+ ensuring that the flow execution can continue.
1328
+
1329
+ Parameters
1330
+ ----------
1331
+ times : int, default 3
1332
+ Number of times to retry this task.
1333
+ minutes_between_retries : int, default 2
1334
+ Number of minutes between retries.
1335
+ """
1336
+ ...
1337
+
1338
+ @typing.overload
1339
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1340
+ ...
1341
+
1342
+ @typing.overload
1343
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1344
+ ...
1345
+
1346
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
1347
+ """
1348
+ Specifies the number of times the task corresponding
1349
+ to a step needs to be retried.
1350
+
1351
+ This decorator is useful for handling transient errors, such as networking issues.
1352
+ If your task contains operations that can't be retried safely, e.g. database updates,
1353
+ it is advisable to annotate it with `@retry(times=0)`.
1354
+
1355
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
1356
+ decorator will execute a no-op task after all retries have been exhausted,
1357
+ ensuring that the flow execution can continue.
1358
+
1359
+ Parameters
1360
+ ----------
1361
+ times : int, default 3
1362
+ Number of times to retry this task.
1363
+ minutes_between_retries : int, default 2
1364
+ Number of minutes between retries.
1365
+ """
1366
+ ...
1367
+
1190
1368
  @typing.overload
1191
1369
  def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1192
1370
  """
@@ -1335,260 +1513,175 @@ def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
1335
1513
  ...
1336
1514
 
1337
1515
  @typing.overload
1338
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1516
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1339
1517
  """
1340
- Specifies the Conda environment for the step.
1341
-
1342
- Information in this decorator will augment any
1343
- attributes set in the `@conda_base` flow-level decorator. Hence,
1344
- you can use `@conda_base` to set packages required by all
1345
- steps and use `@conda` to specify step-specific overrides.
1518
+ Specifies environment variables to be set prior to the execution of a step.
1346
1519
 
1347
1520
  Parameters
1348
1521
  ----------
1349
- packages : Dict[str, str], default {}
1350
- Packages to use for this step. The key is the name of the package
1351
- and the value is the version to use.
1352
- libraries : Dict[str, str], default {}
1353
- Supported for backward compatibility. When used with packages, packages will take precedence.
1354
- python : str, optional, default None
1355
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1356
- that the version used will correspond to the version of the Python interpreter used to start the run.
1357
- disabled : bool, default False
1358
- If set to True, disables @conda.
1522
+ vars : Dict[str, str], default {}
1523
+ Dictionary of environment variables to set.
1359
1524
  """
1360
1525
  ...
1361
1526
 
1362
1527
  @typing.overload
1363
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1528
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1364
1529
  ...
1365
1530
 
1366
1531
  @typing.overload
1367
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1532
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1368
1533
  ...
1369
1534
 
1370
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1535
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
1371
1536
  """
1372
- Specifies the Conda environment for the step.
1373
-
1374
- Information in this decorator will augment any
1375
- attributes set in the `@conda_base` flow-level decorator. Hence,
1376
- you can use `@conda_base` to set packages required by all
1377
- steps and use `@conda` to specify step-specific overrides.
1537
+ Specifies environment variables to be set prior to the execution of a step.
1378
1538
 
1379
1539
  Parameters
1380
1540
  ----------
1381
- packages : Dict[str, str], default {}
1382
- Packages to use for this step. The key is the name of the package
1383
- and the value is the version to use.
1384
- libraries : Dict[str, str], default {}
1385
- Supported for backward compatibility. When used with packages, packages will take precedence.
1386
- python : str, optional, default None
1387
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1388
- that the version used will correspond to the version of the Python interpreter used to start the run.
1389
- disabled : bool, default False
1390
- If set to True, disables @conda.
1541
+ vars : Dict[str, str], default {}
1542
+ Dictionary of environment variables to set.
1391
1543
  """
1392
1544
  ...
1393
1545
 
1394
1546
  @typing.overload
1395
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1547
+ def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1396
1548
  """
1397
- Specifies secrets to be retrieved and injected as environment variables prior to
1398
- the execution of a step.
1549
+ Specifies the flow(s) that this flow depends on.
1550
+
1551
+ ```
1552
+ @trigger_on_finish(flow='FooFlow')
1553
+ ```
1554
+ or
1555
+ ```
1556
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1557
+ ```
1558
+ This decorator respects the @project decorator and triggers the flow
1559
+ when upstream runs within the same namespace complete successfully
1560
+
1561
+ Additionally, you can specify project aware upstream flow dependencies
1562
+ by specifying the fully qualified project_flow_name.
1563
+ ```
1564
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1565
+ ```
1566
+ or
1567
+ ```
1568
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1569
+ ```
1570
+
1571
+ You can also specify just the project or project branch (other values will be
1572
+ inferred from the current project or project branch):
1573
+ ```
1574
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1575
+ ```
1576
+
1577
+ Note that `branch` is typically one of:
1578
+ - `prod`
1579
+ - `user.bob`
1580
+ - `test.my_experiment`
1581
+ - `prod.staging`
1399
1582
 
1400
1583
  Parameters
1401
1584
  ----------
1402
- sources : List[Union[str, Dict[str, Any]]], default: []
1403
- List of secret specs, defining how the secrets are to be retrieved
1585
+ flow : Union[str, Dict[str, str]], optional, default None
1586
+ Upstream flow dependency for this flow.
1587
+ flows : List[Union[str, Dict[str, str]]], default []
1588
+ Upstream flow dependencies for this flow.
1589
+ options : Dict[str, Any], default {}
1590
+ Backend-specific configuration for tuning eventing behavior.
1591
+
1592
+
1404
1593
  """
1405
1594
  ...
1406
1595
 
1407
1596
  @typing.overload
1408
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1409
- ...
1410
-
1411
- @typing.overload
1412
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1597
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1413
1598
  ...
1414
1599
 
1415
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
1600
+ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1416
1601
  """
1417
- Specifies secrets to be retrieved and injected as environment variables prior to
1418
- the execution of a step.
1602
+ Specifies the flow(s) that this flow depends on.
1419
1603
 
1420
- Parameters
1421
- ----------
1422
- sources : List[Union[str, Dict[str, Any]]], default: []
1423
- List of secret specs, defining how the secrets are to be retrieved
1424
- """
1425
- ...
1426
-
1427
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = "KUBERNETES_IMAGE_PULL_POLICY", service_account: str = "METAFLOW_KUBERNETES_SERVICE_ACCOUNT", secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = "METAFLOW_KUBERNETES_NAMESPACE", gpu: typing.Optional[int] = None, gpu_vendor: str = "KUBERNETES_GPU_VENDOR", tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = "/metaflow_temp", persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1428
- """
1429
- Specifies that this step should execute on Kubernetes.
1604
+ ```
1605
+ @trigger_on_finish(flow='FooFlow')
1606
+ ```
1607
+ or
1608
+ ```
1609
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1610
+ ```
1611
+ This decorator respects the @project decorator and triggers the flow
1612
+ when upstream runs within the same namespace complete successfully
1430
1613
 
1431
- Parameters
1432
- ----------
1433
- cpu : int, default 1
1434
- Number of CPUs required for this step. If `@resources` is
1435
- also present, the maximum value from all decorators is used.
1436
- memory : int, default 4096
1437
- Memory size (in MB) required for this step. If
1438
- `@resources` is also present, the maximum value from all decorators is
1439
- used.
1440
- disk : int, default 10240
1441
- Disk size (in MB) required for this step. If
1442
- `@resources` is also present, the maximum value from all decorators is
1443
- used.
1444
- image : str, optional, default None
1445
- Docker image to use when launching on Kubernetes. If not specified, and
1446
- METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
1447
- not, a default Docker image mapping to the current version of Python is used.
1448
- image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
1449
- If given, the imagePullPolicy to be applied to the Docker image of the step.
1450
- service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
1451
- Kubernetes service account to use when launching pod in Kubernetes.
1452
- secrets : List[str], optional, default None
1453
- Kubernetes secrets to use when launching pod in Kubernetes. These
1454
- secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
1455
- in Metaflow configuration.
1456
- node_selector: Union[Dict[str,str], str], optional, default None
1457
- Kubernetes node selector(s) to apply to the pod running the task.
1458
- Can be passed in as a comma separated string of values e.g. "kubernetes.io/os=linux,kubernetes.io/arch=amd64"
1459
- or as a dictionary {"kubernetes.io/os": "linux", "kubernetes.io/arch": "amd64"}
1460
- namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
1461
- Kubernetes namespace to use when launching pod in Kubernetes.
1462
- gpu : int, optional, default None
1463
- Number of GPUs required for this step. A value of zero implies that
1464
- the scheduled node should not have GPUs.
1465
- gpu_vendor : str, default KUBERNETES_GPU_VENDOR
1466
- The vendor of the GPUs to be used for this step.
1467
- tolerations : List[str], default []
1468
- The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
1469
- Kubernetes tolerations to use when launching pod in Kubernetes.
1470
- use_tmpfs : bool, default False
1471
- This enables an explicit tmpfs mount for this step.
1472
- tmpfs_tempdir : bool, default True
1473
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1474
- tmpfs_size : int, optional, default: None
1475
- The value for the size (in MiB) of the tmpfs mount for this step.
1476
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1477
- memory allocated for this step.
1478
- tmpfs_path : str, optional, default /metaflow_temp
1479
- Path to tmpfs mount for this step.
1480
- persistent_volume_claims : Dict[str, str], optional, default None
1481
- A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
1482
- volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
1483
- shared_memory: int, optional
1484
- Shared memory size (in MiB) required for this step
1485
- port: int, optional
1486
- Port number to specify in the Kubernetes job object
1487
- compute_pool : str, optional, default None
1488
- Compute pool to be used for for this step.
1489
- If not specified, any accessible compute pool within the perimeter is used.
1490
- """
1491
- ...
1492
-
1493
- @typing.overload
1494
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1495
- """
1496
- Specifies the number of times the task corresponding
1497
- to a step needs to be retried.
1614
+ Additionally, you can specify project aware upstream flow dependencies
1615
+ by specifying the fully qualified project_flow_name.
1616
+ ```
1617
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1618
+ ```
1619
+ or
1620
+ ```
1621
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1622
+ ```
1498
1623
 
1499
- This decorator is useful for handling transient errors, such as networking issues.
1500
- If your task contains operations that can't be retried safely, e.g. database updates,
1501
- it is advisable to annotate it with `@retry(times=0)`.
1624
+ You can also specify just the project or project branch (other values will be
1625
+ inferred from the current project or project branch):
1626
+ ```
1627
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1628
+ ```
1502
1629
 
1503
- This can be used in conjunction with the `@catch` decorator. The `@catch`
1504
- decorator will execute a no-op task after all retries have been exhausted,
1505
- ensuring that the flow execution can continue.
1630
+ Note that `branch` is typically one of:
1631
+ - `prod`
1632
+ - `user.bob`
1633
+ - `test.my_experiment`
1634
+ - `prod.staging`
1506
1635
 
1507
1636
  Parameters
1508
1637
  ----------
1509
- times : int, default 3
1510
- Number of times to retry this task.
1511
- minutes_between_retries : int, default 2
1512
- Number of minutes between retries.
1513
- """
1514
- ...
1515
-
1516
- @typing.overload
1517
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1518
- ...
1519
-
1520
- @typing.overload
1521
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1522
- ...
1523
-
1524
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
1525
- """
1526
- Specifies the number of times the task corresponding
1527
- to a step needs to be retried.
1528
-
1529
- This decorator is useful for handling transient errors, such as networking issues.
1530
- If your task contains operations that can't be retried safely, e.g. database updates,
1531
- it is advisable to annotate it with `@retry(times=0)`.
1638
+ flow : Union[str, Dict[str, str]], optional, default None
1639
+ Upstream flow dependency for this flow.
1640
+ flows : List[Union[str, Dict[str, str]]], default []
1641
+ Upstream flow dependencies for this flow.
1642
+ options : Dict[str, Any], default {}
1643
+ Backend-specific configuration for tuning eventing behavior.
1532
1644
 
1533
- This can be used in conjunction with the `@catch` decorator. The `@catch`
1534
- decorator will execute a no-op task after all retries have been exhausted,
1535
- ensuring that the flow execution can continue.
1536
1645
 
1537
- Parameters
1538
- ----------
1539
- times : int, default 3
1540
- Number of times to retry this task.
1541
- minutes_between_retries : int, default 2
1542
- Number of minutes between retries.
1543
1646
  """
1544
1647
  ...
1545
1648
 
1546
1649
  @typing.overload
1547
- def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1650
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1548
1651
  """
1549
- Specifies the Conda environment for all steps of the flow.
1550
-
1551
- Use `@conda_base` to set common libraries required by all
1552
- steps and use `@conda` to specify step-specific additions.
1652
+ Specifies the PyPI packages for all steps of the flow.
1553
1653
 
1654
+ Use `@pypi_base` to set common packages required by all
1655
+ steps and use `@pypi` to specify step-specific overrides.
1554
1656
  Parameters
1555
1657
  ----------
1556
- packages : Dict[str, str], default {}
1658
+ packages : Dict[str, str], default: {}
1557
1659
  Packages to use for this flow. The key is the name of the package
1558
1660
  and the value is the version to use.
1559
- libraries : Dict[str, str], default {}
1560
- Supported for backward compatibility. When used with packages, packages will take precedence.
1561
- python : str, optional, default None
1661
+ python : str, optional, default: None
1562
1662
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1563
1663
  that the version used will correspond to the version of the Python interpreter used to start the run.
1564
- disabled : bool, default False
1565
- If set to True, disables Conda.
1566
1664
  """
1567
1665
  ...
1568
1666
 
1569
1667
  @typing.overload
1570
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1668
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1571
1669
  ...
1572
1670
 
1573
- def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1671
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1574
1672
  """
1575
- Specifies the Conda environment for all steps of the flow.
1576
-
1577
- Use `@conda_base` to set common libraries required by all
1578
- steps and use `@conda` to specify step-specific additions.
1673
+ Specifies the PyPI packages for all steps of the flow.
1579
1674
 
1675
+ Use `@pypi_base` to set common packages required by all
1676
+ steps and use `@pypi` to specify step-specific overrides.
1580
1677
  Parameters
1581
1678
  ----------
1582
- packages : Dict[str, str], default {}
1679
+ packages : Dict[str, str], default: {}
1583
1680
  Packages to use for this flow. The key is the name of the package
1584
1681
  and the value is the version to use.
1585
- libraries : Dict[str, str], default {}
1586
- Supported for backward compatibility. When used with packages, packages will take precedence.
1587
- python : str, optional, default None
1682
+ python : str, optional, default: None
1588
1683
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1589
1684
  that the version used will correspond to the version of the Python interpreter used to start the run.
1590
- disabled : bool, default False
1591
- If set to True, disables Conda.
1592
1685
  """
1593
1686
  ...
1594
1687
 
@@ -1634,190 +1727,6 @@ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str,
1634
1727
  """
1635
1728
  ...
1636
1729
 
1637
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1638
- """
1639
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1640
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1641
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1642
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
1643
- starts only after all sensors finish.
1644
-
1645
- Parameters
1646
- ----------
1647
- timeout : int
1648
- Time, in seconds before the task times out and fails. (Default: 3600)
1649
- poke_interval : int
1650
- Time in seconds that the job should wait in between each try. (Default: 60)
1651
- mode : str
1652
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1653
- exponential_backoff : bool
1654
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1655
- pool : str
1656
- the slot pool this task should run in,
1657
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1658
- soft_fail : bool
1659
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1660
- name : str
1661
- Name of the sensor on Airflow
1662
- description : str
1663
- Description of sensor in the Airflow UI
1664
- bucket_key : Union[str, List[str]]
1665
- The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1666
- When it's specified as a full s3:// url, please leave `bucket_name` as None
1667
- bucket_name : str
1668
- Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1669
- When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1670
- wildcard_match : bool
1671
- whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1672
- aws_conn_id : str
1673
- a reference to the s3 connection on Airflow. (Default: None)
1674
- verify : bool
1675
- Whether or not to verify SSL certificates for S3 connection. (Default: None)
1676
- """
1677
- ...
1678
-
1679
- @typing.overload
1680
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1681
- """
1682
- Specifies the PyPI packages for all steps of the flow.
1683
-
1684
- Use `@pypi_base` to set common packages required by all
1685
- steps and use `@pypi` to specify step-specific overrides.
1686
- Parameters
1687
- ----------
1688
- packages : Dict[str, str], default: {}
1689
- Packages to use for this flow. The key is the name of the package
1690
- and the value is the version to use.
1691
- python : str, optional, default: None
1692
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1693
- that the version used will correspond to the version of the Python interpreter used to start the run.
1694
- """
1695
- ...
1696
-
1697
- @typing.overload
1698
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1699
- ...
1700
-
1701
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1702
- """
1703
- Specifies the PyPI packages for all steps of the flow.
1704
-
1705
- Use `@pypi_base` to set common packages required by all
1706
- steps and use `@pypi` to specify step-specific overrides.
1707
- Parameters
1708
- ----------
1709
- packages : Dict[str, str], default: {}
1710
- Packages to use for this flow. The key is the name of the package
1711
- and the value is the version to use.
1712
- python : str, optional, default: None
1713
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1714
- that the version used will correspond to the version of the Python interpreter used to start the run.
1715
- """
1716
- ...
1717
-
1718
- @typing.overload
1719
- def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1720
- """
1721
- Specifies the flow(s) that this flow depends on.
1722
-
1723
- ```
1724
- @trigger_on_finish(flow='FooFlow')
1725
- ```
1726
- or
1727
- ```
1728
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1729
- ```
1730
- This decorator respects the @project decorator and triggers the flow
1731
- when upstream runs within the same namespace complete successfully
1732
-
1733
- Additionally, you can specify project aware upstream flow dependencies
1734
- by specifying the fully qualified project_flow_name.
1735
- ```
1736
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1737
- ```
1738
- or
1739
- ```
1740
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1741
- ```
1742
-
1743
- You can also specify just the project or project branch (other values will be
1744
- inferred from the current project or project branch):
1745
- ```
1746
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1747
- ```
1748
-
1749
- Note that `branch` is typically one of:
1750
- - `prod`
1751
- - `user.bob`
1752
- - `test.my_experiment`
1753
- - `prod.staging`
1754
-
1755
- Parameters
1756
- ----------
1757
- flow : Union[str, Dict[str, str]], optional, default None
1758
- Upstream flow dependency for this flow.
1759
- flows : List[Union[str, Dict[str, str]]], default []
1760
- Upstream flow dependencies for this flow.
1761
- options : Dict[str, Any], default {}
1762
- Backend-specific configuration for tuning eventing behavior.
1763
-
1764
-
1765
- """
1766
- ...
1767
-
1768
- @typing.overload
1769
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1770
- ...
1771
-
1772
- def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1773
- """
1774
- Specifies the flow(s) that this flow depends on.
1775
-
1776
- ```
1777
- @trigger_on_finish(flow='FooFlow')
1778
- ```
1779
- or
1780
- ```
1781
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1782
- ```
1783
- This decorator respects the @project decorator and triggers the flow
1784
- when upstream runs within the same namespace complete successfully
1785
-
1786
- Additionally, you can specify project aware upstream flow dependencies
1787
- by specifying the fully qualified project_flow_name.
1788
- ```
1789
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1790
- ```
1791
- or
1792
- ```
1793
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1794
- ```
1795
-
1796
- You can also specify just the project or project branch (other values will be
1797
- inferred from the current project or project branch):
1798
- ```
1799
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1800
- ```
1801
-
1802
- Note that `branch` is typically one of:
1803
- - `prod`
1804
- - `user.bob`
1805
- - `test.my_experiment`
1806
- - `prod.staging`
1807
-
1808
- Parameters
1809
- ----------
1810
- flow : Union[str, Dict[str, str]], optional, default None
1811
- Upstream flow dependency for this flow.
1812
- flows : List[Union[str, Dict[str, str]]], default []
1813
- Upstream flow dependencies for this flow.
1814
- options : Dict[str, Any], default {}
1815
- Backend-specific configuration for tuning eventing behavior.
1816
-
1817
-
1818
- """
1819
- ...
1820
-
1821
1730
  @typing.overload
1822
1731
  def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1823
1732
  """
@@ -1867,24 +1776,6 @@ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly:
1867
1776
  """
1868
1777
  ...
1869
1778
 
1870
- def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1871
- """
1872
- Specifies what flows belong to the same project.
1873
-
1874
- A project-specific namespace is created for all flows that
1875
- use the same `@project(name)`.
1876
-
1877
- Parameters
1878
- ----------
1879
- name : str
1880
- Project name. Make sure that the name is unique amongst all
1881
- projects that use the same production scheduler. The name may
1882
- contain only lowercase alphanumeric characters and underscores.
1883
-
1884
-
1885
- """
1886
- ...
1887
-
1888
1779
  @typing.overload
1889
1780
  def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1890
1781
  """
@@ -1980,6 +1871,115 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
1980
1871
  """
1981
1872
  ...
1982
1873
 
1874
+ def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1875
+ """
1876
+ Specifies what flows belong to the same project.
1877
+
1878
+ A project-specific namespace is created for all flows that
1879
+ use the same `@project(name)`.
1880
+
1881
+ Parameters
1882
+ ----------
1883
+ name : str
1884
+ Project name. Make sure that the name is unique amongst all
1885
+ projects that use the same production scheduler. The name may
1886
+ contain only lowercase alphanumeric characters and underscores.
1887
+
1888
+
1889
+ """
1890
+ ...
1891
+
1892
+ @typing.overload
1893
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1894
+ """
1895
+ Specifies the Conda environment for all steps of the flow.
1896
+
1897
+ Use `@conda_base` to set common libraries required by all
1898
+ steps and use `@conda` to specify step-specific additions.
1899
+
1900
+ Parameters
1901
+ ----------
1902
+ packages : Dict[str, str], default {}
1903
+ Packages to use for this flow. The key is the name of the package
1904
+ and the value is the version to use.
1905
+ libraries : Dict[str, str], default {}
1906
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1907
+ python : str, optional, default None
1908
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1909
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1910
+ disabled : bool, default False
1911
+ If set to True, disables Conda.
1912
+ """
1913
+ ...
1914
+
1915
+ @typing.overload
1916
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1917
+ ...
1918
+
1919
+ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1920
+ """
1921
+ Specifies the Conda environment for all steps of the flow.
1922
+
1923
+ Use `@conda_base` to set common libraries required by all
1924
+ steps and use `@conda` to specify step-specific additions.
1925
+
1926
+ Parameters
1927
+ ----------
1928
+ packages : Dict[str, str], default {}
1929
+ Packages to use for this flow. The key is the name of the package
1930
+ and the value is the version to use.
1931
+ libraries : Dict[str, str], default {}
1932
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1933
+ python : str, optional, default None
1934
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1935
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1936
+ disabled : bool, default False
1937
+ If set to True, disables Conda.
1938
+ """
1939
+ ...
1940
+
1941
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1942
+ """
1943
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1944
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1945
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1946
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
1947
+ starts only after all sensors finish.
1948
+
1949
+ Parameters
1950
+ ----------
1951
+ timeout : int
1952
+ Time, in seconds before the task times out and fails. (Default: 3600)
1953
+ poke_interval : int
1954
+ Time in seconds that the job should wait in between each try. (Default: 60)
1955
+ mode : str
1956
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1957
+ exponential_backoff : bool
1958
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1959
+ pool : str
1960
+ the slot pool this task should run in,
1961
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1962
+ soft_fail : bool
1963
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1964
+ name : str
1965
+ Name of the sensor on Airflow
1966
+ description : str
1967
+ Description of sensor in the Airflow UI
1968
+ bucket_key : Union[str, List[str]]
1969
+ The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1970
+ When it's specified as a full s3:// url, please leave `bucket_name` as None
1971
+ bucket_name : str
1972
+ Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1973
+ When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1974
+ wildcard_match : bool
1975
+ whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1976
+ aws_conn_id : str
1977
+ a reference to the s3 connection on Airflow. (Default: None)
1978
+ verify : bool
1979
+ Whether or not to verify SSL certificates for S3 connection. (Default: None)
1980
+ """
1981
+ ...
1982
+
1983
1983
  def namespace(ns: typing.Optional[str]) -> typing.Optional[str]:
1984
1984
  """
1985
1985
  Switch namespace to the one provided.