metaflow-stubs 2.12.4__py2.py3-none-any.whl → 2.12.5__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (142) hide show
  1. metaflow-stubs/__init__.pyi +421 -421
  2. metaflow-stubs/cards.pyi +11 -5
  3. metaflow-stubs/cli.pyi +3 -3
  4. metaflow-stubs/client/__init__.pyi +2 -2
  5. metaflow-stubs/client/core.pyi +7 -7
  6. metaflow-stubs/client/filecache.pyi +3 -3
  7. metaflow-stubs/clone_util.pyi +2 -2
  8. metaflow-stubs/events.pyi +2 -2
  9. metaflow-stubs/exception.pyi +2 -2
  10. metaflow-stubs/flowspec.pyi +5 -5
  11. metaflow-stubs/generated_for.txt +1 -1
  12. metaflow-stubs/includefile.pyi +4 -4
  13. metaflow-stubs/metadata/metadata.pyi +2 -2
  14. metaflow-stubs/metadata/util.pyi +2 -2
  15. metaflow-stubs/metaflow_config.pyi +2 -2
  16. metaflow-stubs/metaflow_current.pyi +17 -17
  17. metaflow-stubs/mflog/mflog.pyi +2 -2
  18. metaflow-stubs/multicore_utils.pyi +2 -2
  19. metaflow-stubs/parameters.pyi +4 -4
  20. metaflow-stubs/plugins/__init__.pyi +9 -3
  21. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  22. metaflow-stubs/plugins/airflow/airflow.pyi +3 -3
  23. metaflow-stubs/plugins/airflow/airflow_cli.pyi +3 -3
  24. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
  25. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  26. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  27. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  28. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +3 -3
  29. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +3 -3
  30. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +3 -3
  31. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  32. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  33. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  34. metaflow-stubs/plugins/argo/argo_workflows.pyi +4 -4
  35. metaflow-stubs/plugins/argo/argo_workflows_cli.pyi +4 -4
  36. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +3 -3
  37. metaflow-stubs/plugins/aws/__init__.pyi +2 -2
  38. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  39. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  40. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  41. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  42. metaflow-stubs/plugins/aws/batch/batch_cli.pyi +2 -2
  43. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  44. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +3 -3
  45. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  46. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +3 -3
  47. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  48. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
  49. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  50. metaflow-stubs/plugins/aws/step_functions/production_token.pyi +2 -2
  51. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  52. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  53. metaflow-stubs/plugins/aws/step_functions/step_functions_cli.pyi +4 -4
  54. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  55. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
  56. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  57. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  58. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  59. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +3 -3
  60. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  61. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  62. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  63. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  64. metaflow-stubs/plugins/cards/card_cli.pyi +4 -4
  65. metaflow-stubs/plugins/cards/card_client.pyi +3 -3
  66. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  67. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  68. metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
  69. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  70. metaflow-stubs/plugins/cards/card_modules/basic.pyi +9 -3
  71. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  72. metaflow-stubs/plugins/cards/card_modules/chevron/__init__.pyi +2 -2
  73. metaflow-stubs/plugins/cards/card_modules/chevron/main.pyi +2 -2
  74. metaflow-stubs/plugins/cards/card_modules/chevron/metadata.pyi +2 -2
  75. metaflow-stubs/plugins/cards/card_modules/chevron/renderer.pyi +2 -2
  76. metaflow-stubs/plugins/cards/card_modules/chevron/tokenizer.pyi +2 -2
  77. metaflow-stubs/plugins/cards/card_modules/components.pyi +3 -3
  78. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  79. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  80. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +3 -3
  81. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  82. metaflow-stubs/plugins/cards/component_serializer.pyi +3 -3
  83. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  84. metaflow-stubs/plugins/catch_decorator.pyi +3 -3
  85. metaflow-stubs/plugins/datatools/__init__.pyi +3 -3
  86. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  87. metaflow-stubs/plugins/datatools/s3/__init__.pyi +3 -3
  88. metaflow-stubs/plugins/datatools/s3/s3.pyi +6 -6
  89. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  90. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  91. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  92. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  93. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  94. metaflow-stubs/plugins/events_decorator.pyi +2 -2
  95. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  96. metaflow-stubs/plugins/frameworks/pytorch.pyi +3 -3
  97. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  98. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +3 -3
  99. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  100. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  101. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  102. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  103. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  104. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +3 -3
  105. metaflow-stubs/plugins/kubernetes/kubernetes_cli.pyi +4 -4
  106. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  107. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +3 -3
  108. metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +2 -2
  109. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +3 -3
  110. metaflow-stubs/plugins/logs_cli.pyi +3 -3
  111. metaflow-stubs/plugins/package_cli.pyi +2 -2
  112. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  113. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  114. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  115. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  116. metaflow-stubs/plugins/pypi/conda_environment.pyi +5 -5
  117. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  118. metaflow-stubs/plugins/pypi/pypi_environment.pyi +3 -3
  119. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  120. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  121. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  122. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  123. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +2 -2
  124. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  125. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  126. metaflow-stubs/plugins/tag_cli.pyi +4 -4
  127. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +2 -2
  128. metaflow-stubs/plugins/timeout_decorator.pyi +3 -3
  129. metaflow-stubs/procpoll.pyi +2 -2
  130. metaflow-stubs/pylint_wrapper.pyi +2 -2
  131. metaflow-stubs/runner/__init__.pyi +2 -2
  132. metaflow-stubs/runner/metaflow_runner.pyi +5 -5
  133. metaflow-stubs/runner/nbrun.pyi +2 -2
  134. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  135. metaflow-stubs/tagging_util.pyi +2 -2
  136. metaflow-stubs/tuple_util.pyi +2 -2
  137. metaflow-stubs/version.pyi +2 -2
  138. {metaflow_stubs-2.12.4.dist-info → metaflow_stubs-2.12.5.dist-info}/METADATA +2 -2
  139. metaflow_stubs-2.12.5.dist-info/RECORD +142 -0
  140. {metaflow_stubs-2.12.4.dist-info → metaflow_stubs-2.12.5.dist-info}/WHEEL +1 -1
  141. metaflow_stubs-2.12.4.dist-info/RECORD +0 -142
  142. {metaflow_stubs-2.12.4.dist-info → metaflow_stubs-2.12.5.dist-info}/top_level.txt +0 -0
@@ -1,25 +1,25 @@
1
1
  ##################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.12.4 #
4
- # Generated on 2024-06-18T20:28:20.449033 #
3
+ # MF version: 2.12.5 #
4
+ # Generated on 2024-06-20T19:51:33.982232 #
5
5
  ##################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import metaflow._vendor.click.types
12
- import datetime
13
- import metaflow.events
14
- import metaflow.flowspec
15
- import io
16
11
  import metaflow.client.core
17
- import metaflow.metaflow_current
18
12
  import metaflow.runner.metaflow_runner
13
+ import metaflow._vendor.click.types
19
14
  import typing
15
+ import metaflow.parameters
16
+ import metaflow.flowspec
20
17
  import metaflow.plugins.datatools.s3.s3
18
+ import datetime
19
+ import io
20
+ import metaflow.metaflow_current
21
21
  import metaflow.datastore.inputs
22
- import metaflow.parameters
22
+ import metaflow.events
23
23
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
24
24
  StepFlag = typing.NewType("StepFlag", bool)
25
25
 
@@ -727,6 +727,39 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
727
727
  """
728
728
  ...
729
729
 
730
+ @typing.overload
731
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
732
+ """
733
+ Specifies secrets to be retrieved and injected as environment variables prior to
734
+ the execution of a step.
735
+
736
+ Parameters
737
+ ----------
738
+ sources : List[Union[str, Dict[str, Any]]], default: []
739
+ List of secret specs, defining how the secrets are to be retrieved
740
+ """
741
+ ...
742
+
743
+ @typing.overload
744
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
745
+ ...
746
+
747
+ @typing.overload
748
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
749
+ ...
750
+
751
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
752
+ """
753
+ Specifies secrets to be retrieved and injected as environment variables prior to
754
+ the execution of a step.
755
+
756
+ Parameters
757
+ ----------
758
+ sources : List[Union[str, Dict[str, Any]]], default: []
759
+ List of secret specs, defining how the secrets are to be retrieved
760
+ """
761
+ ...
762
+
730
763
  @typing.overload
731
764
  def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
732
765
  """
@@ -874,124 +907,201 @@ def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
874
907
  """
875
908
  ...
876
909
 
877
- @typing.overload
878
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
910
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = "KUBERNETES_IMAGE_PULL_POLICY", service_account: str = "METAFLOW_KUBERNETES_SERVICE_ACCOUNT", secrets: typing.Optional[typing.List[str]] = None, namespace: str = "METAFLOW_KUBERNETES_NAMESPACE", gpu: typing.Optional[int] = None, gpu_vendor: str = "KUBERNETES_GPU_VENDOR", tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = "/metaflow_temp", persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
879
911
  """
880
- Specifies a timeout for your step.
881
-
882
- This decorator is useful if this step may hang indefinitely.
912
+ Specifies that this step should execute on Kubernetes.
883
913
 
884
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
885
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
886
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
914
+ Parameters
915
+ ----------
916
+ cpu : int, default 1
917
+ Number of CPUs required for this step. If `@resources` is
918
+ also present, the maximum value from all decorators is used.
919
+ memory : int, default 4096
920
+ Memory size (in MB) required for this step. If
921
+ `@resources` is also present, the maximum value from all decorators is
922
+ used.
923
+ disk : int, default 10240
924
+ Disk size (in MB) required for this step. If
925
+ `@resources` is also present, the maximum value from all decorators is
926
+ used.
927
+ image : str, optional, default None
928
+ Docker image to use when launching on Kubernetes. If not specified, and
929
+ METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
930
+ not, a default Docker image mapping to the current version of Python is used.
931
+ image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
932
+ If given, the imagePullPolicy to be applied to the Docker image of the step.
933
+ service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
934
+ Kubernetes service account to use when launching pod in Kubernetes.
935
+ secrets : List[str], optional, default None
936
+ Kubernetes secrets to use when launching pod in Kubernetes. These
937
+ secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
938
+ in Metaflow configuration.
939
+ namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
940
+ Kubernetes namespace to use when launching pod in Kubernetes.
941
+ gpu : int, optional, default None
942
+ Number of GPUs required for this step. A value of zero implies that
943
+ the scheduled node should not have GPUs.
944
+ gpu_vendor : str, default KUBERNETES_GPU_VENDOR
945
+ The vendor of the GPUs to be used for this step.
946
+ tolerations : List[str], default []
947
+ The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
948
+ Kubernetes tolerations to use when launching pod in Kubernetes.
949
+ use_tmpfs : bool, default False
950
+ This enables an explicit tmpfs mount for this step.
951
+ tmpfs_tempdir : bool, default True
952
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
953
+ tmpfs_size : int, optional, default: None
954
+ The value for the size (in MiB) of the tmpfs mount for this step.
955
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
956
+ memory allocated for this step.
957
+ tmpfs_path : str, optional, default /metaflow_temp
958
+ Path to tmpfs mount for this step.
959
+ persistent_volume_claims : Dict[str, str], optional, default None
960
+ A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
961
+ volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
962
+ shared_memory: int, optional
963
+ Shared memory size (in MiB) required for this step
964
+ port: int, optional
965
+ Port number to specify in the Kubernetes job object
966
+ """
967
+ ...
968
+
969
+ @typing.overload
970
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
971
+ """
972
+ Specifies the Conda environment for the step.
887
973
 
888
- Note that all the values specified in parameters are added together so if you specify
889
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
974
+ Information in this decorator will augment any
975
+ attributes set in the `@conda_base` flow-level decorator. Hence,
976
+ you can use `@conda_base` to set packages required by all
977
+ steps and use `@conda` to specify step-specific overrides.
890
978
 
891
979
  Parameters
892
980
  ----------
893
- seconds : int, default 0
894
- Number of seconds to wait prior to timing out.
895
- minutes : int, default 0
896
- Number of minutes to wait prior to timing out.
897
- hours : int, default 0
898
- Number of hours to wait prior to timing out.
981
+ packages : Dict[str, str], default {}
982
+ Packages to use for this step. The key is the name of the package
983
+ and the value is the version to use.
984
+ libraries : Dict[str, str], default {}
985
+ Supported for backward compatibility. When used with packages, packages will take precedence.
986
+ python : str, optional, default None
987
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
988
+ that the version used will correspond to the version of the Python interpreter used to start the run.
989
+ disabled : bool, default False
990
+ If set to True, disables @conda.
899
991
  """
900
992
  ...
901
993
 
902
994
  @typing.overload
903
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
995
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
904
996
  ...
905
997
 
906
998
  @typing.overload
907
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
999
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
908
1000
  ...
909
1001
 
910
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
1002
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
911
1003
  """
912
- Specifies a timeout for your step.
913
-
914
- This decorator is useful if this step may hang indefinitely.
915
-
916
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
917
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
918
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
1004
+ Specifies the Conda environment for the step.
919
1005
 
920
- Note that all the values specified in parameters are added together so if you specify
921
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1006
+ Information in this decorator will augment any
1007
+ attributes set in the `@conda_base` flow-level decorator. Hence,
1008
+ you can use `@conda_base` to set packages required by all
1009
+ steps and use `@conda` to specify step-specific overrides.
922
1010
 
923
1011
  Parameters
924
1012
  ----------
925
- seconds : int, default 0
926
- Number of seconds to wait prior to timing out.
927
- minutes : int, default 0
928
- Number of minutes to wait prior to timing out.
929
- hours : int, default 0
930
- Number of hours to wait prior to timing out.
1013
+ packages : Dict[str, str], default {}
1014
+ Packages to use for this step. The key is the name of the package
1015
+ and the value is the version to use.
1016
+ libraries : Dict[str, str], default {}
1017
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1018
+ python : str, optional, default None
1019
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1020
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1021
+ disabled : bool, default False
1022
+ If set to True, disables @conda.
931
1023
  """
932
1024
  ...
933
1025
 
934
1026
  @typing.overload
935
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1027
+ def card(*, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
936
1028
  """
937
- Specifies environment variables to be set prior to the execution of a step.
1029
+ Creates a human-readable report, a Metaflow Card, after this step completes.
1030
+
1031
+ Note that you may add multiple `@card` decorators in a step with different parameters.
938
1032
 
939
1033
  Parameters
940
1034
  ----------
941
- vars : Dict[str, str], default {}
942
- Dictionary of environment variables to set.
1035
+ type : str, default 'default'
1036
+ Card type.
1037
+ id : str, optional, default None
1038
+ If multiple cards are present, use this id to identify this card.
1039
+ options : Dict[str, Any], default {}
1040
+ Options passed to the card. The contents depend on the card type.
1041
+ timeout : int, default 45
1042
+ Interrupt reporting if it takes more than this many seconds.
1043
+
1044
+
943
1045
  """
944
1046
  ...
945
1047
 
946
1048
  @typing.overload
947
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1049
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
948
1050
  ...
949
1051
 
950
1052
  @typing.overload
951
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1053
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
952
1054
  ...
953
1055
 
954
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
1056
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
955
1057
  """
956
- Specifies environment variables to be set prior to the execution of a step.
1058
+ Creates a human-readable report, a Metaflow Card, after this step completes.
1059
+
1060
+ Note that you may add multiple `@card` decorators in a step with different parameters.
957
1061
 
958
1062
  Parameters
959
1063
  ----------
960
- vars : Dict[str, str], default {}
961
- Dictionary of environment variables to set.
1064
+ type : str, default 'default'
1065
+ Card type.
1066
+ id : str, optional, default None
1067
+ If multiple cards are present, use this id to identify this card.
1068
+ options : Dict[str, Any], default {}
1069
+ Options passed to the card. The contents depend on the card type.
1070
+ timeout : int, default 45
1071
+ Interrupt reporting if it takes more than this many seconds.
1072
+
1073
+
962
1074
  """
963
1075
  ...
964
1076
 
965
1077
  @typing.overload
966
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1078
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
967
1079
  """
968
- Specifies secrets to be retrieved and injected as environment variables prior to
969
- the execution of a step.
1080
+ Specifies environment variables to be set prior to the execution of a step.
970
1081
 
971
1082
  Parameters
972
1083
  ----------
973
- sources : List[Union[str, Dict[str, Any]]], default: []
974
- List of secret specs, defining how the secrets are to be retrieved
1084
+ vars : Dict[str, str], default {}
1085
+ Dictionary of environment variables to set.
975
1086
  """
976
1087
  ...
977
1088
 
978
1089
  @typing.overload
979
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1090
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
980
1091
  ...
981
1092
 
982
1093
  @typing.overload
983
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1094
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
984
1095
  ...
985
1096
 
986
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
1097
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
987
1098
  """
988
- Specifies secrets to be retrieved and injected as environment variables prior to
989
- the execution of a step.
1099
+ Specifies environment variables to be set prior to the execution of a step.
990
1100
 
991
1101
  Parameters
992
1102
  ----------
993
- sources : List[Union[str, Dict[str, Any]]], default: []
994
- List of secret specs, defining how the secrets are to be retrieved
1103
+ vars : Dict[str, str], default {}
1104
+ Dictionary of environment variables to set.
995
1105
  """
996
1106
  ...
997
1107
 
@@ -1049,59 +1159,51 @@ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
1049
1159
  ...
1050
1160
 
1051
1161
  @typing.overload
1052
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1162
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1053
1163
  """
1054
- Specifies the Conda environment for the step.
1164
+ Specifies that the step will success under all circumstances.
1055
1165
 
1056
- Information in this decorator will augment any
1057
- attributes set in the `@conda_base` flow-level decorator. Hence,
1058
- you can use `@conda_base` to set packages required by all
1059
- steps and use `@conda` to specify step-specific overrides.
1166
+ The decorator will create an optional artifact, specified by `var`, which
1167
+ contains the exception raised. You can use it to detect the presence
1168
+ of errors, indicating that all happy-path artifacts produced by the step
1169
+ are missing.
1060
1170
 
1061
1171
  Parameters
1062
1172
  ----------
1063
- packages : Dict[str, str], default {}
1064
- Packages to use for this step. The key is the name of the package
1065
- and the value is the version to use.
1066
- libraries : Dict[str, str], default {}
1067
- Supported for backward compatibility. When used with packages, packages will take precedence.
1068
- python : str, optional, default None
1069
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1070
- that the version used will correspond to the version of the Python interpreter used to start the run.
1071
- disabled : bool, default False
1072
- If set to True, disables @conda.
1173
+ var : str, optional, default None
1174
+ Name of the artifact in which to store the caught exception.
1175
+ If not specified, the exception is not stored.
1176
+ print_exception : bool, default True
1177
+ Determines whether or not the exception is printed to
1178
+ stdout when caught.
1073
1179
  """
1074
1180
  ...
1075
1181
 
1076
1182
  @typing.overload
1077
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1183
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1078
1184
  ...
1079
1185
 
1080
1186
  @typing.overload
1081
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1187
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1082
1188
  ...
1083
1189
 
1084
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1190
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
1085
1191
  """
1086
- Specifies the Conda environment for the step.
1192
+ Specifies that the step will success under all circumstances.
1087
1193
 
1088
- Information in this decorator will augment any
1089
- attributes set in the `@conda_base` flow-level decorator. Hence,
1090
- you can use `@conda_base` to set packages required by all
1091
- steps and use `@conda` to specify step-specific overrides.
1194
+ The decorator will create an optional artifact, specified by `var`, which
1195
+ contains the exception raised. You can use it to detect the presence
1196
+ of errors, indicating that all happy-path artifacts produced by the step
1197
+ are missing.
1092
1198
 
1093
1199
  Parameters
1094
1200
  ----------
1095
- packages : Dict[str, str], default {}
1096
- Packages to use for this step. The key is the name of the package
1097
- and the value is the version to use.
1098
- libraries : Dict[str, str], default {}
1099
- Supported for backward compatibility. When used with packages, packages will take precedence.
1100
- python : str, optional, default None
1101
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1102
- that the version used will correspond to the version of the Python interpreter used to start the run.
1103
- disabled : bool, default False
1104
- If set to True, disables @conda.
1201
+ var : str, optional, default None
1202
+ Name of the artifact in which to store the caught exception.
1203
+ If not specified, the exception is not stored.
1204
+ print_exception : bool, default True
1205
+ Determines whether or not the exception is printed to
1206
+ stdout when caught.
1105
1207
  """
1106
1208
  ...
1107
1209
 
@@ -1183,53 +1285,59 @@ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None]
1183
1285
  ...
1184
1286
 
1185
1287
  @typing.overload
1186
- def card(*, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1288
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1187
1289
  """
1188
- Creates a human-readable report, a Metaflow Card, after this step completes.
1290
+ Specifies a timeout for your step.
1189
1291
 
1190
- Note that you may add multiple `@card` decorators in a step with different parameters.
1292
+ This decorator is useful if this step may hang indefinitely.
1191
1293
 
1192
- Parameters
1193
- ----------
1194
- type : str, default 'default'
1195
- Card type.
1196
- id : str, optional, default None
1197
- If multiple cards are present, use this id to identify this card.
1198
- options : Dict[str, Any], default {}
1199
- Options passed to the card. The contents depend on the card type.
1200
- timeout : int, default 45
1201
- Interrupt reporting if it takes more than this many seconds.
1294
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1295
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
1296
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
1202
1297
 
1298
+ Note that all the values specified in parameters are added together so if you specify
1299
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1203
1300
 
1301
+ Parameters
1302
+ ----------
1303
+ seconds : int, default 0
1304
+ Number of seconds to wait prior to timing out.
1305
+ minutes : int, default 0
1306
+ Number of minutes to wait prior to timing out.
1307
+ hours : int, default 0
1308
+ Number of hours to wait prior to timing out.
1204
1309
  """
1205
1310
  ...
1206
1311
 
1207
1312
  @typing.overload
1208
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1313
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1209
1314
  ...
1210
1315
 
1211
1316
  @typing.overload
1212
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1317
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1213
1318
  ...
1214
1319
 
1215
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
1320
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
1216
1321
  """
1217
- Creates a human-readable report, a Metaflow Card, after this step completes.
1322
+ Specifies a timeout for your step.
1218
1323
 
1219
- Note that you may add multiple `@card` decorators in a step with different parameters.
1324
+ This decorator is useful if this step may hang indefinitely.
1220
1325
 
1221
- Parameters
1222
- ----------
1223
- type : str, default 'default'
1224
- Card type.
1225
- id : str, optional, default None
1226
- If multiple cards are present, use this id to identify this card.
1227
- options : Dict[str, Any], default {}
1228
- Options passed to the card. The contents depend on the card type.
1229
- timeout : int, default 45
1230
- Interrupt reporting if it takes more than this many seconds.
1326
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1327
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
1328
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
1231
1329
 
1330
+ Note that all the values specified in parameters are added together so if you specify
1331
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1232
1332
 
1333
+ Parameters
1334
+ ----------
1335
+ seconds : int, default 0
1336
+ Number of seconds to wait prior to timing out.
1337
+ minutes : int, default 0
1338
+ Number of minutes to wait prior to timing out.
1339
+ hours : int, default 0
1340
+ Number of hours to wait prior to timing out.
1233
1341
  """
1234
1342
  ...
1235
1343
 
@@ -1283,231 +1391,51 @@ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
1283
1391
  ...
1284
1392
 
1285
1393
  @typing.overload
1286
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1287
- """
1288
- Specifies that the step will success under all circumstances.
1289
-
1290
- The decorator will create an optional artifact, specified by `var`, which
1291
- contains the exception raised. You can use it to detect the presence
1292
- of errors, indicating that all happy-path artifacts produced by the step
1293
- are missing.
1294
-
1295
- Parameters
1296
- ----------
1297
- var : str, optional, default None
1298
- Name of the artifact in which to store the caught exception.
1299
- If not specified, the exception is not stored.
1300
- print_exception : bool, default True
1301
- Determines whether or not the exception is printed to
1302
- stdout when caught.
1303
- """
1304
- ...
1305
-
1306
- @typing.overload
1307
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1308
- ...
1309
-
1310
- @typing.overload
1311
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1312
- ...
1313
-
1314
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
1315
- """
1316
- Specifies that the step will success under all circumstances.
1317
-
1318
- The decorator will create an optional artifact, specified by `var`, which
1319
- contains the exception raised. You can use it to detect the presence
1320
- of errors, indicating that all happy-path artifacts produced by the step
1321
- are missing.
1322
-
1323
- Parameters
1324
- ----------
1325
- var : str, optional, default None
1326
- Name of the artifact in which to store the caught exception.
1327
- If not specified, the exception is not stored.
1328
- print_exception : bool, default True
1329
- Determines whether or not the exception is printed to
1330
- stdout when caught.
1331
- """
1332
- ...
1333
-
1334
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = "KUBERNETES_IMAGE_PULL_POLICY", service_account: str = "METAFLOW_KUBERNETES_SERVICE_ACCOUNT", secrets: typing.Optional[typing.List[str]] = None, namespace: str = "METAFLOW_KUBERNETES_NAMESPACE", gpu: typing.Optional[int] = None, gpu_vendor: str = "KUBERNETES_GPU_VENDOR", tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = "/metaflow_temp", persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1335
- """
1336
- Specifies that this step should execute on Kubernetes.
1337
-
1338
- Parameters
1339
- ----------
1340
- cpu : int, default 1
1341
- Number of CPUs required for this step. If `@resources` is
1342
- also present, the maximum value from all decorators is used.
1343
- memory : int, default 4096
1344
- Memory size (in MB) required for this step. If
1345
- `@resources` is also present, the maximum value from all decorators is
1346
- used.
1347
- disk : int, default 10240
1348
- Disk size (in MB) required for this step. If
1349
- `@resources` is also present, the maximum value from all decorators is
1350
- used.
1351
- image : str, optional, default None
1352
- Docker image to use when launching on Kubernetes. If not specified, and
1353
- METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
1354
- not, a default Docker image mapping to the current version of Python is used.
1355
- image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
1356
- If given, the imagePullPolicy to be applied to the Docker image of the step.
1357
- service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
1358
- Kubernetes service account to use when launching pod in Kubernetes.
1359
- secrets : List[str], optional, default None
1360
- Kubernetes secrets to use when launching pod in Kubernetes. These
1361
- secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
1362
- in Metaflow configuration.
1363
- namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
1364
- Kubernetes namespace to use when launching pod in Kubernetes.
1365
- gpu : int, optional, default None
1366
- Number of GPUs required for this step. A value of zero implies that
1367
- the scheduled node should not have GPUs.
1368
- gpu_vendor : str, default KUBERNETES_GPU_VENDOR
1369
- The vendor of the GPUs to be used for this step.
1370
- tolerations : List[str], default []
1371
- The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
1372
- Kubernetes tolerations to use when launching pod in Kubernetes.
1373
- use_tmpfs : bool, default False
1374
- This enables an explicit tmpfs mount for this step.
1375
- tmpfs_tempdir : bool, default True
1376
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1377
- tmpfs_size : int, optional, default: None
1378
- The value for the size (in MiB) of the tmpfs mount for this step.
1379
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1380
- memory allocated for this step.
1381
- tmpfs_path : str, optional, default /metaflow_temp
1382
- Path to tmpfs mount for this step.
1383
- persistent_volume_claims : Dict[str, str], optional, default None
1384
- A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
1385
- volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
1386
- shared_memory: int, optional
1387
- Shared memory size (in MiB) required for this step
1388
- port: int, optional
1389
- Port number to specify in the Kubernetes job object
1390
- """
1391
- ...
1392
-
1393
- def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1394
- """
1395
- Specifies what flows belong to the same project.
1396
-
1397
- A project-specific namespace is created for all flows that
1398
- use the same `@project(name)`.
1399
-
1400
- Parameters
1401
- ----------
1402
- name : str
1403
- Project name. Make sure that the name is unique amongst all
1404
- projects that use the same production scheduler. The name may
1405
- contain only lowercase alphanumeric characters and underscores.
1406
-
1407
-
1408
- """
1409
- ...
1410
-
1411
- @typing.overload
1412
- def trigger_on_finish(*, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1394
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1413
1395
  """
1414
- Specifies the flow(s) that this flow depends on.
1415
-
1416
- ```
1417
- @trigger_on_finish(flow='FooFlow')
1418
- ```
1419
- or
1420
- ```
1421
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1422
- ```
1423
- This decorator respects the @project decorator and triggers the flow
1424
- when upstream runs within the same namespace complete successfully
1425
-
1426
- Additionally, you can specify project aware upstream flow dependencies
1427
- by specifying the fully qualified project_flow_name.
1428
- ```
1429
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1430
- ```
1431
- or
1432
- ```
1433
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1434
- ```
1435
-
1436
- You can also specify just the project or project branch (other values will be
1437
- inferred from the current project or project branch):
1438
- ```
1439
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1440
- ```
1441
-
1442
- Note that `branch` is typically one of:
1443
- - `prod`
1444
- - `user.bob`
1445
- - `test.my_experiment`
1446
- - `prod.staging`
1396
+ Specifies the times when the flow should be run when running on a
1397
+ production scheduler.
1447
1398
 
1448
1399
  Parameters
1449
1400
  ----------
1450
- flow : Union[str, Dict[str, str]], optional, default None
1451
- Upstream flow dependency for this flow.
1452
- flows : List[Union[str, Dict[str, str]]], default []
1453
- Upstream flow dependencies for this flow.
1454
- options : Dict[str, Any], default {}
1455
- Backend-specific configuration for tuning eventing behavior.
1456
-
1457
-
1458
- """
1459
- ...
1460
-
1461
- @typing.overload
1462
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1463
- ...
1464
-
1465
- def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1401
+ hourly : bool, default False
1402
+ Run the workflow hourly.
1403
+ daily : bool, default True
1404
+ Run the workflow daily.
1405
+ weekly : bool, default False
1406
+ Run the workflow weekly.
1407
+ cron : str, optional, default None
1408
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1409
+ specified by this expression.
1410
+ timezone : str, optional, default None
1411
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1412
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1466
1413
  """
1467
- Specifies the flow(s) that this flow depends on.
1468
-
1469
- ```
1470
- @trigger_on_finish(flow='FooFlow')
1471
- ```
1472
- or
1473
- ```
1474
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1475
- ```
1476
- This decorator respects the @project decorator and triggers the flow
1477
- when upstream runs within the same namespace complete successfully
1478
-
1479
- Additionally, you can specify project aware upstream flow dependencies
1480
- by specifying the fully qualified project_flow_name.
1481
- ```
1482
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1483
- ```
1484
- or
1485
- ```
1486
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1487
- ```
1488
-
1489
- You can also specify just the project or project branch (other values will be
1490
- inferred from the current project or project branch):
1491
- ```
1492
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1493
- ```
1494
-
1495
- Note that `branch` is typically one of:
1496
- - `prod`
1497
- - `user.bob`
1498
- - `test.my_experiment`
1499
- - `prod.staging`
1414
+ ...
1415
+
1416
+ @typing.overload
1417
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1418
+ ...
1419
+
1420
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1421
+ """
1422
+ Specifies the times when the flow should be run when running on a
1423
+ production scheduler.
1500
1424
 
1501
1425
  Parameters
1502
1426
  ----------
1503
- flow : Union[str, Dict[str, str]], optional, default None
1504
- Upstream flow dependency for this flow.
1505
- flows : List[Union[str, Dict[str, str]]], default []
1506
- Upstream flow dependencies for this flow.
1507
- options : Dict[str, Any], default {}
1508
- Backend-specific configuration for tuning eventing behavior.
1509
-
1510
-
1427
+ hourly : bool, default False
1428
+ Run the workflow hourly.
1429
+ daily : bool, default True
1430
+ Run the workflow daily.
1431
+ weekly : bool, default False
1432
+ Run the workflow weekly.
1433
+ cron : str, optional, default None
1434
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1435
+ specified by this expression.
1436
+ timezone : str, optional, default None
1437
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1438
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1511
1439
  """
1512
1440
  ...
1513
1441
 
@@ -1606,52 +1534,21 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
1606
1534
  """
1607
1535
  ...
1608
1536
 
1609
- @typing.overload
1610
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1537
+ def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1611
1538
  """
1612
- Specifies the times when the flow should be run when running on a
1613
- production scheduler.
1539
+ Specifies what flows belong to the same project.
1614
1540
 
1615
- Parameters
1616
- ----------
1617
- hourly : bool, default False
1618
- Run the workflow hourly.
1619
- daily : bool, default True
1620
- Run the workflow daily.
1621
- weekly : bool, default False
1622
- Run the workflow weekly.
1623
- cron : str, optional, default None
1624
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1625
- specified by this expression.
1626
- timezone : str, optional, default None
1627
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1628
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1629
- """
1630
- ...
1631
-
1632
- @typing.overload
1633
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1634
- ...
1635
-
1636
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1637
- """
1638
- Specifies the times when the flow should be run when running on a
1639
- production scheduler.
1541
+ A project-specific namespace is created for all flows that
1542
+ use the same `@project(name)`.
1640
1543
 
1641
1544
  Parameters
1642
1545
  ----------
1643
- hourly : bool, default False
1644
- Run the workflow hourly.
1645
- daily : bool, default True
1646
- Run the workflow daily.
1647
- weekly : bool, default False
1648
- Run the workflow weekly.
1649
- cron : str, optional, default None
1650
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1651
- specified by this expression.
1652
- timezone : str, optional, default None
1653
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1654
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1546
+ name : str
1547
+ Project name. Make sure that the name is unique amongst all
1548
+ projects that use the same production scheduler. The name may
1549
+ contain only lowercase alphanumeric characters and underscores.
1550
+
1551
+
1655
1552
  """
1656
1553
  ...
1657
1554
 
@@ -1704,10 +1601,13 @@ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packa
1704
1601
  """
1705
1602
  ...
1706
1603
 
1707
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1604
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1708
1605
  """
1709
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1710
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1606
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1607
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1608
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1609
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
1610
+ starts only after all sensors finish.
1711
1611
 
1712
1612
  Parameters
1713
1613
  ----------
@@ -1728,31 +1628,25 @@ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str,
1728
1628
  Name of the sensor on Airflow
1729
1629
  description : str
1730
1630
  Description of sensor in the Airflow UI
1731
- external_dag_id : str
1732
- The dag_id that contains the task you want to wait for.
1733
- external_task_ids : List[str]
1734
- The list of task_ids that you want to wait for.
1735
- If None (default value) the sensor waits for the DAG. (Default: None)
1736
- allowed_states : List[str]
1737
- Iterable of allowed states, (Default: ['success'])
1738
- failed_states : List[str]
1739
- Iterable of failed or dis-allowed states. (Default: None)
1740
- execution_delta : datetime.timedelta
1741
- time difference with the previous execution to look at,
1742
- the default is the same logical date as the current task or DAG. (Default: None)
1743
- check_existence: bool
1744
- Set to True to check if the external task exists or check if
1745
- the DAG to wait for exists. (Default: True)
1631
+ bucket_key : Union[str, List[str]]
1632
+ The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1633
+ When it's specified as a full s3:// url, please leave `bucket_name` as None
1634
+ bucket_name : str
1635
+ Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1636
+ When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1637
+ wildcard_match : bool
1638
+ whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1639
+ aws_conn_id : str
1640
+ a reference to the s3 connection on Airflow. (Default: None)
1641
+ verify : bool
1642
+ Whether or not to verify SSL certificates for S3 connection. (Default: None)
1746
1643
  """
1747
1644
  ...
1748
1645
 
1749
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1646
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1750
1647
  """
1751
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1752
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1753
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1754
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
1755
- starts only after all sensors finish.
1648
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1649
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1756
1650
 
1757
1651
  Parameters
1758
1652
  ----------
@@ -1773,18 +1667,21 @@ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, expone
1773
1667
  Name of the sensor on Airflow
1774
1668
  description : str
1775
1669
  Description of sensor in the Airflow UI
1776
- bucket_key : Union[str, List[str]]
1777
- The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1778
- When it's specified as a full s3:// url, please leave `bucket_name` as None
1779
- bucket_name : str
1780
- Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1781
- When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1782
- wildcard_match : bool
1783
- whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1784
- aws_conn_id : str
1785
- a reference to the s3 connection on Airflow. (Default: None)
1786
- verify : bool
1787
- Whether or not to verify SSL certificates for S3 connection. (Default: None)
1670
+ external_dag_id : str
1671
+ The dag_id that contains the task you want to wait for.
1672
+ external_task_ids : List[str]
1673
+ The list of task_ids that you want to wait for.
1674
+ If None (default value) the sensor waits for the DAG. (Default: None)
1675
+ allowed_states : List[str]
1676
+ Iterable of allowed states, (Default: ['success'])
1677
+ failed_states : List[str]
1678
+ Iterable of failed or dis-allowed states. (Default: None)
1679
+ execution_delta : datetime.timedelta
1680
+ time difference with the previous execution to look at,
1681
+ the default is the same logical date as the current task or DAG. (Default: None)
1682
+ check_existence: bool
1683
+ Set to True to check if the external task exists or check if
1684
+ the DAG to wait for exists. (Default: True)
1788
1685
  """
1789
1686
  ...
1790
1687
 
@@ -1827,6 +1724,109 @@ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packag
1827
1724
  """
1828
1725
  ...
1829
1726
 
1727
+ @typing.overload
1728
+ def trigger_on_finish(*, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1729
+ """
1730
+ Specifies the flow(s) that this flow depends on.
1731
+
1732
+ ```
1733
+ @trigger_on_finish(flow='FooFlow')
1734
+ ```
1735
+ or
1736
+ ```
1737
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1738
+ ```
1739
+ This decorator respects the @project decorator and triggers the flow
1740
+ when upstream runs within the same namespace complete successfully
1741
+
1742
+ Additionally, you can specify project aware upstream flow dependencies
1743
+ by specifying the fully qualified project_flow_name.
1744
+ ```
1745
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1746
+ ```
1747
+ or
1748
+ ```
1749
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1750
+ ```
1751
+
1752
+ You can also specify just the project or project branch (other values will be
1753
+ inferred from the current project or project branch):
1754
+ ```
1755
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1756
+ ```
1757
+
1758
+ Note that `branch` is typically one of:
1759
+ - `prod`
1760
+ - `user.bob`
1761
+ - `test.my_experiment`
1762
+ - `prod.staging`
1763
+
1764
+ Parameters
1765
+ ----------
1766
+ flow : Union[str, Dict[str, str]], optional, default None
1767
+ Upstream flow dependency for this flow.
1768
+ flows : List[Union[str, Dict[str, str]]], default []
1769
+ Upstream flow dependencies for this flow.
1770
+ options : Dict[str, Any], default {}
1771
+ Backend-specific configuration for tuning eventing behavior.
1772
+
1773
+
1774
+ """
1775
+ ...
1776
+
1777
+ @typing.overload
1778
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1779
+ ...
1780
+
1781
+ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1782
+ """
1783
+ Specifies the flow(s) that this flow depends on.
1784
+
1785
+ ```
1786
+ @trigger_on_finish(flow='FooFlow')
1787
+ ```
1788
+ or
1789
+ ```
1790
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1791
+ ```
1792
+ This decorator respects the @project decorator and triggers the flow
1793
+ when upstream runs within the same namespace complete successfully
1794
+
1795
+ Additionally, you can specify project aware upstream flow dependencies
1796
+ by specifying the fully qualified project_flow_name.
1797
+ ```
1798
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1799
+ ```
1800
+ or
1801
+ ```
1802
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1803
+ ```
1804
+
1805
+ You can also specify just the project or project branch (other values will be
1806
+ inferred from the current project or project branch):
1807
+ ```
1808
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1809
+ ```
1810
+
1811
+ Note that `branch` is typically one of:
1812
+ - `prod`
1813
+ - `user.bob`
1814
+ - `test.my_experiment`
1815
+ - `prod.staging`
1816
+
1817
+ Parameters
1818
+ ----------
1819
+ flow : Union[str, Dict[str, str]], optional, default None
1820
+ Upstream flow dependency for this flow.
1821
+ flows : List[Union[str, Dict[str, str]]], default []
1822
+ Upstream flow dependencies for this flow.
1823
+ options : Dict[str, Any], default {}
1824
+ Backend-specific configuration for tuning eventing behavior.
1825
+
1826
+
1827
+ """
1828
+ ...
1829
+
1830
1830
  def namespace(ns: typing.Optional[str]) -> typing.Optional[str]:
1831
1831
  """
1832
1832
  Switch namespace to the one provided.