metaflow-stubs 2.12.19__py2.py3-none-any.whl → 2.12.21__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (152) hide show
  1. metaflow-stubs/__init__.pyi +473 -473
  2. metaflow-stubs/cards.pyi +5 -5
  3. metaflow-stubs/cli.pyi +3 -3
  4. metaflow-stubs/client/__init__.pyi +4 -4
  5. metaflow-stubs/client/core.pyi +7 -7
  6. metaflow-stubs/client/filecache.pyi +2 -2
  7. metaflow-stubs/clone_util.pyi +2 -2
  8. metaflow-stubs/events.pyi +3 -3
  9. metaflow-stubs/exception.pyi +2 -2
  10. metaflow-stubs/flowspec.pyi +6 -6
  11. metaflow-stubs/generated_for.txt +1 -1
  12. metaflow-stubs/includefile.pyi +4 -4
  13. metaflow-stubs/info_file.pyi +16 -0
  14. metaflow-stubs/metadata/metadata.pyi +2 -2
  15. metaflow-stubs/metadata/util.pyi +2 -2
  16. metaflow-stubs/metaflow_config.pyi +2 -2
  17. metaflow-stubs/metaflow_current.pyi +27 -25
  18. metaflow-stubs/mflog/mflog.pyi +2 -2
  19. metaflow-stubs/multicore_utils.pyi +2 -2
  20. metaflow-stubs/parameters.pyi +4 -4
  21. metaflow-stubs/plugins/__init__.pyi +3 -3
  22. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  23. metaflow-stubs/plugins/airflow/airflow.pyi +3 -3
  24. metaflow-stubs/plugins/airflow/airflow_cli.pyi +4 -4
  25. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
  26. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  27. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  28. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  29. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +3 -3
  30. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +3 -3
  31. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +3 -3
  32. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  33. metaflow-stubs/plugins/argo/argo_client.pyi +6 -2
  34. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  35. metaflow-stubs/plugins/argo/argo_workflows.pyi +4 -4
  36. metaflow-stubs/plugins/argo/argo_workflows_cli.pyi +6 -6
  37. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +5 -5
  38. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +5 -3
  39. metaflow-stubs/plugins/aws/__init__.pyi +2 -2
  40. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  41. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  42. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  43. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  44. metaflow-stubs/plugins/aws/batch/batch_cli.pyi +2 -2
  45. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  46. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +4 -4
  47. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  48. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +4 -4
  49. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  50. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
  51. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  52. metaflow-stubs/plugins/aws/step_functions/production_token.pyi +2 -2
  53. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  54. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  55. metaflow-stubs/plugins/aws/step_functions/step_functions_cli.pyi +3 -3
  56. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  57. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
  58. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +5 -3
  59. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  60. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  61. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  62. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +4 -4
  63. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  64. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  65. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  66. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  67. metaflow-stubs/plugins/cards/card_cli.pyi +4 -4
  68. metaflow-stubs/plugins/cards/card_client.pyi +3 -3
  69. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  70. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  71. metaflow-stubs/plugins/cards/card_decorator.pyi +3 -3
  72. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  73. metaflow-stubs/plugins/cards/card_modules/basic.pyi +3 -3
  74. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  75. metaflow-stubs/plugins/cards/card_modules/chevron/__init__.pyi +2 -2
  76. metaflow-stubs/plugins/cards/card_modules/chevron/main.pyi +2 -2
  77. metaflow-stubs/plugins/cards/card_modules/chevron/metadata.pyi +2 -2
  78. metaflow-stubs/plugins/cards/card_modules/chevron/renderer.pyi +2 -2
  79. metaflow-stubs/plugins/cards/card_modules/chevron/tokenizer.pyi +2 -2
  80. metaflow-stubs/plugins/cards/card_modules/components.pyi +4 -4
  81. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  82. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  83. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  84. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  85. metaflow-stubs/plugins/cards/component_serializer.pyi +4 -4
  86. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  87. metaflow-stubs/plugins/catch_decorator.pyi +4 -4
  88. metaflow-stubs/plugins/datatools/__init__.pyi +3 -3
  89. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  90. metaflow-stubs/plugins/datatools/s3/__init__.pyi +3 -3
  91. metaflow-stubs/plugins/datatools/s3/s3.pyi +7 -7
  92. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  93. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  94. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  95. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  96. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  97. metaflow-stubs/plugins/events_decorator.pyi +3 -3
  98. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  99. metaflow-stubs/plugins/frameworks/pytorch.pyi +3 -3
  100. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  101. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +4 -4
  102. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  103. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  104. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  105. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  106. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  107. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +18 -0
  108. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +7 -3
  109. metaflow-stubs/plugins/kubernetes/kubernetes_cli.pyi +26 -4
  110. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +8 -2
  111. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +4 -4
  112. metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +2 -2
  113. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
  114. metaflow-stubs/plugins/logs_cli.pyi +3 -3
  115. metaflow-stubs/plugins/package_cli.pyi +2 -2
  116. metaflow-stubs/plugins/parallel_decorator.pyi +5 -5
  117. metaflow-stubs/plugins/project_decorator.pyi +3 -3
  118. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  119. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  120. metaflow-stubs/plugins/pypi/conda_environment.pyi +4 -4
  121. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  122. metaflow-stubs/plugins/pypi/pypi_environment.pyi +3 -3
  123. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  124. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  125. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  126. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  127. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +3 -3
  128. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  129. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  130. metaflow-stubs/plugins/tag_cli.pyi +4 -4
  131. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +2 -2
  132. metaflow-stubs/plugins/timeout_decorator.pyi +3 -3
  133. metaflow-stubs/procpoll.pyi +2 -2
  134. metaflow-stubs/pylint_wrapper.pyi +2 -2
  135. metaflow-stubs/runner/__init__.pyi +2 -2
  136. metaflow-stubs/runner/deployer.pyi +8 -6
  137. metaflow-stubs/runner/metaflow_runner.pyi +8 -8
  138. metaflow-stubs/runner/nbdeploy.pyi +2 -2
  139. metaflow-stubs/runner/nbrun.pyi +2 -2
  140. metaflow-stubs/runner/subprocess_manager.pyi +4 -4
  141. metaflow-stubs/runner/utils.pyi +5 -3
  142. metaflow-stubs/system/__init__.pyi +4 -4
  143. metaflow-stubs/system/system_logger.pyi +3 -3
  144. metaflow-stubs/system/system_monitor.pyi +3 -3
  145. metaflow-stubs/tagging_util.pyi +2 -2
  146. metaflow-stubs/tuple_util.pyi +2 -2
  147. metaflow-stubs/version.pyi +2 -2
  148. {metaflow_stubs-2.12.19.dist-info → metaflow_stubs-2.12.21.dist-info}/METADATA +2 -2
  149. metaflow_stubs-2.12.21.dist-info/RECORD +152 -0
  150. {metaflow_stubs-2.12.19.dist-info → metaflow_stubs-2.12.21.dist-info}/WHEEL +1 -1
  151. metaflow_stubs-2.12.19.dist-info/RECORD +0 -150
  152. {metaflow_stubs-2.12.19.dist-info → metaflow_stubs-2.12.21.dist-info}/top_level.txt +0 -0
@@ -1,32 +1,28 @@
1
1
  ##################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.12.19 #
4
- # Generated on 2024-09-04T23:07:02.410930 #
3
+ # MF version: 2.12.21 #
4
+ # Generated on 2024-09-19T17:04:54.824903 #
5
5
  ##################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import typing
12
- import metaflow.client.core
13
- import metaflow.flowspec
14
- import metaflow.metaflow_current
15
- import metaflow.parameters
16
- import io
11
+ import metaflow.datastore.inputs
17
12
  import datetime
13
+ import metaflow.plugins.datatools.s3.s3
18
14
  import metaflow.runner.metaflow_runner
15
+ import metaflow.client.core
19
16
  import metaflow.events
20
- import metaflow.plugins.datatools.s3.s3
21
- import metaflow.datastore.inputs
17
+ import metaflow.metaflow_current
18
+ import metaflow.flowspec
19
+ import metaflow.parameters
22
20
  import metaflow._vendor.click.types
21
+ import typing
22
+ import io
23
23
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
24
24
  StepFlag = typing.NewType("StepFlag", bool)
25
25
 
26
- CURRENT_DIRECTORY: str
27
-
28
- INFO_FILE: str
29
-
30
26
  EXT_PKG: str
31
27
 
32
28
  def parallel_imap_unordered(func: typing.Callable[[typing.Any], typing.Any], iterable: typing.Iterable[typing.Any], max_parallel: typing.Optional[int] = None, dir: typing.Optional[str] = None) -> typing.Iterator[typing.Any]:
@@ -728,113 +724,35 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
728
724
  ...
729
725
 
730
726
  @typing.overload
731
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
727
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
732
728
  """
733
- Specifies that the step will success under all circumstances.
734
-
735
- The decorator will create an optional artifact, specified by `var`, which
736
- contains the exception raised. You can use it to detect the presence
737
- of errors, indicating that all happy-path artifacts produced by the step
738
- are missing.
729
+ Specifies secrets to be retrieved and injected as environment variables prior to
730
+ the execution of a step.
739
731
 
740
732
  Parameters
741
733
  ----------
742
- var : str, optional, default None
743
- Name of the artifact in which to store the caught exception.
744
- If not specified, the exception is not stored.
745
- print_exception : bool, default True
746
- Determines whether or not the exception is printed to
747
- stdout when caught.
734
+ sources : List[Union[str, Dict[str, Any]]], default: []
735
+ List of secret specs, defining how the secrets are to be retrieved
748
736
  """
749
737
  ...
750
738
 
751
739
  @typing.overload
752
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
740
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
753
741
  ...
754
742
 
755
743
  @typing.overload
756
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
757
- ...
758
-
759
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
760
- """
761
- Specifies that the step will success under all circumstances.
762
-
763
- The decorator will create an optional artifact, specified by `var`, which
764
- contains the exception raised. You can use it to detect the presence
765
- of errors, indicating that all happy-path artifacts produced by the step
766
- are missing.
767
-
768
- Parameters
769
- ----------
770
- var : str, optional, default None
771
- Name of the artifact in which to store the caught exception.
772
- If not specified, the exception is not stored.
773
- print_exception : bool, default True
774
- Determines whether or not the exception is printed to
775
- stdout when caught.
776
- """
744
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
777
745
  ...
778
746
 
779
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = "KUBERNETES_IMAGE_PULL_POLICY", service_account: str = "METAFLOW_KUBERNETES_SERVICE_ACCOUNT", secrets: typing.Optional[typing.List[str]] = None, namespace: str = "METAFLOW_KUBERNETES_NAMESPACE", gpu: typing.Optional[int] = None, gpu_vendor: str = "KUBERNETES_GPU_VENDOR", tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = "/metaflow_temp", persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
747
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
780
748
  """
781
- Specifies that this step should execute on Kubernetes.
749
+ Specifies secrets to be retrieved and injected as environment variables prior to
750
+ the execution of a step.
782
751
 
783
752
  Parameters
784
753
  ----------
785
- cpu : int, default 1
786
- Number of CPUs required for this step. If `@resources` is
787
- also present, the maximum value from all decorators is used.
788
- memory : int, default 4096
789
- Memory size (in MB) required for this step. If
790
- `@resources` is also present, the maximum value from all decorators is
791
- used.
792
- disk : int, default 10240
793
- Disk size (in MB) required for this step. If
794
- `@resources` is also present, the maximum value from all decorators is
795
- used.
796
- image : str, optional, default None
797
- Docker image to use when launching on Kubernetes. If not specified, and
798
- METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
799
- not, a default Docker image mapping to the current version of Python is used.
800
- image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
801
- If given, the imagePullPolicy to be applied to the Docker image of the step.
802
- service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
803
- Kubernetes service account to use when launching pod in Kubernetes.
804
- secrets : List[str], optional, default None
805
- Kubernetes secrets to use when launching pod in Kubernetes. These
806
- secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
807
- in Metaflow configuration.
808
- namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
809
- Kubernetes namespace to use when launching pod in Kubernetes.
810
- gpu : int, optional, default None
811
- Number of GPUs required for this step. A value of zero implies that
812
- the scheduled node should not have GPUs.
813
- gpu_vendor : str, default KUBERNETES_GPU_VENDOR
814
- The vendor of the GPUs to be used for this step.
815
- tolerations : List[str], default []
816
- The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
817
- Kubernetes tolerations to use when launching pod in Kubernetes.
818
- use_tmpfs : bool, default False
819
- This enables an explicit tmpfs mount for this step.
820
- tmpfs_tempdir : bool, default True
821
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
822
- tmpfs_size : int, optional, default: None
823
- The value for the size (in MiB) of the tmpfs mount for this step.
824
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
825
- memory allocated for this step.
826
- tmpfs_path : str, optional, default /metaflow_temp
827
- Path to tmpfs mount for this step.
828
- persistent_volume_claims : Dict[str, str], optional, default None
829
- A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
830
- volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
831
- shared_memory: int, optional
832
- Shared memory size (in MiB) required for this step
833
- port: int, optional
834
- Port number to specify in the Kubernetes job object
835
- compute_pool : str, optional, default None
836
- Compute pool to be used for for this step.
837
- If not specified, any accessible compute pool within the perimeter is used.
754
+ sources : List[Union[str, Dict[str, Any]]], default: []
755
+ List of secret specs, defining how the secrets are to be retrieved
838
756
  """
839
757
  ...
840
758
 
@@ -896,129 +814,82 @@ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
896
814
  ...
897
815
 
898
816
  @typing.overload
899
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
817
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
900
818
  """
901
- Specifies the Conda environment for the step.
902
-
903
- Information in this decorator will augment any
904
- attributes set in the `@conda_base` flow-level decorator. Hence,
905
- you can use `@conda_base` to set packages required by all
906
- steps and use `@conda` to specify step-specific overrides.
819
+ Specifies environment variables to be set prior to the execution of a step.
907
820
 
908
821
  Parameters
909
822
  ----------
910
- packages : Dict[str, str], default {}
911
- Packages to use for this step. The key is the name of the package
912
- and the value is the version to use.
913
- libraries : Dict[str, str], default {}
914
- Supported for backward compatibility. When used with packages, packages will take precedence.
915
- python : str, optional, default None
916
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
917
- that the version used will correspond to the version of the Python interpreter used to start the run.
918
- disabled : bool, default False
919
- If set to True, disables @conda.
823
+ vars : Dict[str, str], default {}
824
+ Dictionary of environment variables to set.
920
825
  """
921
826
  ...
922
827
 
923
828
  @typing.overload
924
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
829
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
925
830
  ...
926
831
 
927
832
  @typing.overload
928
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
833
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
929
834
  ...
930
835
 
931
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
836
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
932
837
  """
933
- Specifies the Conda environment for the step.
934
-
935
- Information in this decorator will augment any
936
- attributes set in the `@conda_base` flow-level decorator. Hence,
937
- you can use `@conda_base` to set packages required by all
938
- steps and use `@conda` to specify step-specific overrides.
838
+ Specifies environment variables to be set prior to the execution of a step.
939
839
 
940
840
  Parameters
941
841
  ----------
942
- packages : Dict[str, str], default {}
943
- Packages to use for this step. The key is the name of the package
944
- and the value is the version to use.
945
- libraries : Dict[str, str], default {}
946
- Supported for backward compatibility. When used with packages, packages will take precedence.
947
- python : str, optional, default None
948
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
949
- that the version used will correspond to the version of the Python interpreter used to start the run.
950
- disabled : bool, default False
951
- If set to True, disables @conda.
842
+ vars : Dict[str, str], default {}
843
+ Dictionary of environment variables to set.
952
844
  """
953
845
  ...
954
846
 
955
847
  @typing.overload
956
- def card(*, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
848
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
957
849
  """
958
- Creates a human-readable report, a Metaflow Card, after this step completes.
850
+ Specifies that the step will success under all circumstances.
959
851
 
960
- Note that you may add multiple `@card` decorators in a step with different parameters.
852
+ The decorator will create an optional artifact, specified by `var`, which
853
+ contains the exception raised. You can use it to detect the presence
854
+ of errors, indicating that all happy-path artifacts produced by the step
855
+ are missing.
961
856
 
962
857
  Parameters
963
858
  ----------
964
- type : str, default 'default'
965
- Card type.
966
- id : str, optional, default None
967
- If multiple cards are present, use this id to identify this card.
968
- options : Dict[str, Any], default {}
969
- Options passed to the card. The contents depend on the card type.
970
- timeout : int, default 45
971
- Interrupt reporting if it takes more than this many seconds.
972
-
973
-
859
+ var : str, optional, default None
860
+ Name of the artifact in which to store the caught exception.
861
+ If not specified, the exception is not stored.
862
+ print_exception : bool, default True
863
+ Determines whether or not the exception is printed to
864
+ stdout when caught.
974
865
  """
975
866
  ...
976
867
 
977
868
  @typing.overload
978
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
869
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
979
870
  ...
980
871
 
981
872
  @typing.overload
982
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
873
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
983
874
  ...
984
875
 
985
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
876
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
986
877
  """
987
- Creates a human-readable report, a Metaflow Card, after this step completes.
878
+ Specifies that the step will success under all circumstances.
988
879
 
989
- Note that you may add multiple `@card` decorators in a step with different parameters.
880
+ The decorator will create an optional artifact, specified by `var`, which
881
+ contains the exception raised. You can use it to detect the presence
882
+ of errors, indicating that all happy-path artifacts produced by the step
883
+ are missing.
990
884
 
991
885
  Parameters
992
886
  ----------
993
- type : str, default 'default'
994
- Card type.
995
- id : str, optional, default None
996
- If multiple cards are present, use this id to identify this card.
997
- options : Dict[str, Any], default {}
998
- Options passed to the card. The contents depend on the card type.
999
- timeout : int, default 45
1000
- Interrupt reporting if it takes more than this many seconds.
1001
-
1002
-
1003
- """
1004
- ...
1005
-
1006
- @typing.overload
1007
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1008
- """
1009
- Decorator prototype for all step decorators. This function gets specialized
1010
- and imported for all decorators types by _import_plugin_decorators().
1011
- """
1012
- ...
1013
-
1014
- @typing.overload
1015
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1016
- ...
1017
-
1018
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1019
- """
1020
- Decorator prototype for all step decorators. This function gets specialized
1021
- and imported for all decorators types by _import_plugin_decorators().
887
+ var : str, optional, default None
888
+ Name of the artifact in which to store the caught exception.
889
+ If not specified, the exception is not stored.
890
+ print_exception : bool, default True
891
+ Determines whether or not the exception is printed to
892
+ stdout when caught.
1022
893
  """
1023
894
  ...
1024
895
 
@@ -1076,54 +947,23 @@ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
1076
947
  ...
1077
948
 
1078
949
  @typing.overload
1079
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
950
+ def resources(*, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1080
951
  """
1081
- Specifies environment variables to be set prior to the execution of a step.
1082
-
1083
- Parameters
1084
- ----------
1085
- vars : Dict[str, str], default {}
1086
- Dictionary of environment variables to set.
1087
- """
1088
- ...
1089
-
1090
- @typing.overload
1091
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1092
- ...
1093
-
1094
- @typing.overload
1095
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1096
- ...
1097
-
1098
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
1099
- """
1100
- Specifies environment variables to be set prior to the execution of a step.
1101
-
1102
- Parameters
1103
- ----------
1104
- vars : Dict[str, str], default {}
1105
- Dictionary of environment variables to set.
1106
- """
1107
- ...
1108
-
1109
- @typing.overload
1110
- def resources(*, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1111
- """
1112
- Specifies the resources needed when executing this step.
1113
-
1114
- Use `@resources` to specify the resource requirements
1115
- independently of the specific compute layer (`@batch`, `@kubernetes`).
1116
-
1117
- You can choose the compute layer on the command line by executing e.g.
1118
- ```
1119
- python myflow.py run --with batch
1120
- ```
1121
- or
1122
- ```
1123
- python myflow.py run --with kubernetes
1124
- ```
1125
- which executes the flow on the desired system using the
1126
- requirements specified in `@resources`.
952
+ Specifies the resources needed when executing this step.
953
+
954
+ Use `@resources` to specify the resource requirements
955
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
956
+
957
+ You can choose the compute layer on the command line by executing e.g.
958
+ ```
959
+ python myflow.py run --with batch
960
+ ```
961
+ or
962
+ ```
963
+ python myflow.py run --with kubernetes
964
+ ```
965
+ which executes the flow on the desired system using the
966
+ requirements specified in `@resources`.
1127
967
 
1128
968
  Parameters
1129
969
  ----------
@@ -1183,6 +1023,25 @@ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None]
1183
1023
  """
1184
1024
  ...
1185
1025
 
1026
+ @typing.overload
1027
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1028
+ """
1029
+ Decorator prototype for all step decorators. This function gets specialized
1030
+ and imported for all decorators types by _import_plugin_decorators().
1031
+ """
1032
+ ...
1033
+
1034
+ @typing.overload
1035
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1036
+ ...
1037
+
1038
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1039
+ """
1040
+ Decorator prototype for all step decorators. This function gets specialized
1041
+ and imported for all decorators types by _import_plugin_decorators().
1042
+ """
1043
+ ...
1044
+
1186
1045
  @typing.overload
1187
1046
  def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1188
1047
  """
@@ -1331,35 +1190,53 @@ def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
1331
1190
  ...
1332
1191
 
1333
1192
  @typing.overload
1334
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1193
+ def card(*, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1335
1194
  """
1336
- Specifies secrets to be retrieved and injected as environment variables prior to
1337
- the execution of a step.
1195
+ Creates a human-readable report, a Metaflow Card, after this step completes.
1196
+
1197
+ Note that you may add multiple `@card` decorators in a step with different parameters.
1338
1198
 
1339
1199
  Parameters
1340
1200
  ----------
1341
- sources : List[Union[str, Dict[str, Any]]], default: []
1342
- List of secret specs, defining how the secrets are to be retrieved
1201
+ type : str, default 'default'
1202
+ Card type.
1203
+ id : str, optional, default None
1204
+ If multiple cards are present, use this id to identify this card.
1205
+ options : Dict[str, Any], default {}
1206
+ Options passed to the card. The contents depend on the card type.
1207
+ timeout : int, default 45
1208
+ Interrupt reporting if it takes more than this many seconds.
1209
+
1210
+
1343
1211
  """
1344
1212
  ...
1345
1213
 
1346
1214
  @typing.overload
1347
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1215
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1348
1216
  ...
1349
1217
 
1350
1218
  @typing.overload
1351
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1219
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1352
1220
  ...
1353
1221
 
1354
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
1222
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
1355
1223
  """
1356
- Specifies secrets to be retrieved and injected as environment variables prior to
1357
- the execution of a step.
1224
+ Creates a human-readable report, a Metaflow Card, after this step completes.
1225
+
1226
+ Note that you may add multiple `@card` decorators in a step with different parameters.
1358
1227
 
1359
1228
  Parameters
1360
1229
  ----------
1361
- sources : List[Union[str, Dict[str, Any]]], default: []
1362
- List of secret specs, defining how the secrets are to be retrieved
1230
+ type : str, default 'default'
1231
+ Card type.
1232
+ id : str, optional, default None
1233
+ If multiple cards are present, use this id to identify this card.
1234
+ options : Dict[str, Any], default {}
1235
+ Options passed to the card. The contents depend on the card type.
1236
+ timeout : int, default 45
1237
+ Interrupt reporting if it takes more than this many seconds.
1238
+
1239
+
1363
1240
  """
1364
1241
  ...
1365
1242
 
@@ -1412,6 +1289,129 @@ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
1412
1289
  """
1413
1290
  ...
1414
1291
 
1292
+ @typing.overload
1293
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1294
+ """
1295
+ Specifies the Conda environment for the step.
1296
+
1297
+ Information in this decorator will augment any
1298
+ attributes set in the `@conda_base` flow-level decorator. Hence,
1299
+ you can use `@conda_base` to set packages required by all
1300
+ steps and use `@conda` to specify step-specific overrides.
1301
+
1302
+ Parameters
1303
+ ----------
1304
+ packages : Dict[str, str], default {}
1305
+ Packages to use for this step. The key is the name of the package
1306
+ and the value is the version to use.
1307
+ libraries : Dict[str, str], default {}
1308
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1309
+ python : str, optional, default None
1310
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1311
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1312
+ disabled : bool, default False
1313
+ If set to True, disables @conda.
1314
+ """
1315
+ ...
1316
+
1317
+ @typing.overload
1318
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1319
+ ...
1320
+
1321
+ @typing.overload
1322
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1323
+ ...
1324
+
1325
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1326
+ """
1327
+ Specifies the Conda environment for the step.
1328
+
1329
+ Information in this decorator will augment any
1330
+ attributes set in the `@conda_base` flow-level decorator. Hence,
1331
+ you can use `@conda_base` to set packages required by all
1332
+ steps and use `@conda` to specify step-specific overrides.
1333
+
1334
+ Parameters
1335
+ ----------
1336
+ packages : Dict[str, str], default {}
1337
+ Packages to use for this step. The key is the name of the package
1338
+ and the value is the version to use.
1339
+ libraries : Dict[str, str], default {}
1340
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1341
+ python : str, optional, default None
1342
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1343
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1344
+ disabled : bool, default False
1345
+ If set to True, disables @conda.
1346
+ """
1347
+ ...
1348
+
1349
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = "KUBERNETES_IMAGE_PULL_POLICY", service_account: str = "METAFLOW_KUBERNETES_SERVICE_ACCOUNT", secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = "METAFLOW_KUBERNETES_NAMESPACE", gpu: typing.Optional[int] = None, gpu_vendor: str = "KUBERNETES_GPU_VENDOR", tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = "/metaflow_temp", persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1350
+ """
1351
+ Specifies that this step should execute on Kubernetes.
1352
+
1353
+ Parameters
1354
+ ----------
1355
+ cpu : int, default 1
1356
+ Number of CPUs required for this step. If `@resources` is
1357
+ also present, the maximum value from all decorators is used.
1358
+ memory : int, default 4096
1359
+ Memory size (in MB) required for this step. If
1360
+ `@resources` is also present, the maximum value from all decorators is
1361
+ used.
1362
+ disk : int, default 10240
1363
+ Disk size (in MB) required for this step. If
1364
+ `@resources` is also present, the maximum value from all decorators is
1365
+ used.
1366
+ image : str, optional, default None
1367
+ Docker image to use when launching on Kubernetes. If not specified, and
1368
+ METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
1369
+ not, a default Docker image mapping to the current version of Python is used.
1370
+ image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
1371
+ If given, the imagePullPolicy to be applied to the Docker image of the step.
1372
+ service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
1373
+ Kubernetes service account to use when launching pod in Kubernetes.
1374
+ secrets : List[str], optional, default None
1375
+ Kubernetes secrets to use when launching pod in Kubernetes. These
1376
+ secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
1377
+ in Metaflow configuration.
1378
+ node_selector: Union[Dict[str,str], str], optional, default None
1379
+ Kubernetes node selector(s) to apply to the pod running the task.
1380
+ Can be passed in as a comma separated string of values e.g. "kubernetes.io/os=linux,kubernetes.io/arch=amd64"
1381
+ or as a dictionary {"kubernetes.io/os": "linux", "kubernetes.io/arch": "amd64"}
1382
+ namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
1383
+ Kubernetes namespace to use when launching pod in Kubernetes.
1384
+ gpu : int, optional, default None
1385
+ Number of GPUs required for this step. A value of zero implies that
1386
+ the scheduled node should not have GPUs.
1387
+ gpu_vendor : str, default KUBERNETES_GPU_VENDOR
1388
+ The vendor of the GPUs to be used for this step.
1389
+ tolerations : List[str], default []
1390
+ The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
1391
+ Kubernetes tolerations to use when launching pod in Kubernetes.
1392
+ use_tmpfs : bool, default False
1393
+ This enables an explicit tmpfs mount for this step.
1394
+ tmpfs_tempdir : bool, default True
1395
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1396
+ tmpfs_size : int, optional, default: None
1397
+ The value for the size (in MiB) of the tmpfs mount for this step.
1398
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1399
+ memory allocated for this step.
1400
+ tmpfs_path : str, optional, default /metaflow_temp
1401
+ Path to tmpfs mount for this step.
1402
+ persistent_volume_claims : Dict[str, str], optional, default None
1403
+ A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
1404
+ volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
1405
+ shared_memory: int, optional
1406
+ Shared memory size (in MiB) required for this step
1407
+ port: int, optional
1408
+ Port number to specify in the Kubernetes job object
1409
+ compute_pool : str, optional, default None
1410
+ Compute pool to be used for for this step.
1411
+ If not specified, any accessible compute pool within the perimeter is used.
1412
+ """
1413
+ ...
1414
+
1415
1415
  def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1416
1416
  """
1417
1417
  The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
@@ -1455,83 +1455,48 @@ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, expone
1455
1455
  ...
1456
1456
 
1457
1457
  @typing.overload
1458
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1458
+ def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1459
1459
  """
1460
- Specifies the PyPI packages for all steps of the flow.
1460
+ Specifies the flow(s) that this flow depends on.
1461
1461
 
1462
- Use `@pypi_base` to set common packages required by all
1463
- steps and use `@pypi` to specify step-specific overrides.
1464
- Parameters
1465
- ----------
1466
- packages : Dict[str, str], default: {}
1467
- Packages to use for this flow. The key is the name of the package
1468
- and the value is the version to use.
1469
- python : str, optional, default: None
1470
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1471
- that the version used will correspond to the version of the Python interpreter used to start the run.
1472
- """
1473
- ...
1474
-
1475
- @typing.overload
1476
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1477
- ...
1478
-
1479
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1480
- """
1481
- Specifies the PyPI packages for all steps of the flow.
1482
-
1483
- Use `@pypi_base` to set common packages required by all
1484
- steps and use `@pypi` to specify step-specific overrides.
1485
- Parameters
1486
- ----------
1487
- packages : Dict[str, str], default: {}
1488
- Packages to use for this flow. The key is the name of the package
1489
- and the value is the version to use.
1490
- python : str, optional, default: None
1491
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1492
- that the version used will correspond to the version of the Python interpreter used to start the run.
1493
- """
1494
- ...
1495
-
1496
- @typing.overload
1497
- def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1498
- """
1499
- Specifies the event(s) that this flow depends on.
1462
+ ```
1463
+ @trigger_on_finish(flow='FooFlow')
1464
+ ```
1465
+ or
1466
+ ```
1467
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1468
+ ```
1469
+ This decorator respects the @project decorator and triggers the flow
1470
+ when upstream runs within the same namespace complete successfully
1500
1471
 
1472
+ Additionally, you can specify project aware upstream flow dependencies
1473
+ by specifying the fully qualified project_flow_name.
1501
1474
  ```
1502
- @trigger(event='foo')
1475
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1503
1476
  ```
1504
1477
  or
1505
1478
  ```
1506
- @trigger(events=['foo', 'bar'])
1479
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1507
1480
  ```
1508
1481
 
1509
- Additionally, you can specify the parameter mappings
1510
- to map event payload to Metaflow parameters for the flow.
1511
- ```
1512
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1513
- ```
1514
- or
1482
+ You can also specify just the project or project branch (other values will be
1483
+ inferred from the current project or project branch):
1515
1484
  ```
1516
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1517
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1485
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1518
1486
  ```
1519
1487
 
1520
- 'parameters' can also be a list of strings and tuples like so:
1521
- ```
1522
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1523
- ```
1524
- This is equivalent to:
1525
- ```
1526
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1527
- ```
1488
+ Note that `branch` is typically one of:
1489
+ - `prod`
1490
+ - `user.bob`
1491
+ - `test.my_experiment`
1492
+ - `prod.staging`
1528
1493
 
1529
1494
  Parameters
1530
1495
  ----------
1531
- event : Union[str, Dict[str, Any]], optional, default None
1532
- Event dependency for this flow.
1533
- events : List[Union[str, Dict[str, Any]]], default []
1534
- Events dependency for this flow.
1496
+ flow : Union[str, Dict[str, str]], optional, default None
1497
+ Upstream flow dependency for this flow.
1498
+ flows : List[Union[str, Dict[str, str]]], default []
1499
+ Upstream flow dependencies for this flow.
1535
1500
  options : Dict[str, Any], default {}
1536
1501
  Backend-specific configuration for tuning eventing behavior.
1537
1502
 
@@ -1540,47 +1505,51 @@ def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = No
1540
1505
  ...
1541
1506
 
1542
1507
  @typing.overload
1543
- def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1508
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1544
1509
  ...
1545
1510
 
1546
- def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1511
+ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1547
1512
  """
1548
- Specifies the event(s) that this flow depends on.
1513
+ Specifies the flow(s) that this flow depends on.
1549
1514
 
1550
1515
  ```
1551
- @trigger(event='foo')
1516
+ @trigger_on_finish(flow='FooFlow')
1552
1517
  ```
1553
1518
  or
1554
1519
  ```
1555
- @trigger(events=['foo', 'bar'])
1520
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1556
1521
  ```
1522
+ This decorator respects the @project decorator and triggers the flow
1523
+ when upstream runs within the same namespace complete successfully
1557
1524
 
1558
- Additionally, you can specify the parameter mappings
1559
- to map event payload to Metaflow parameters for the flow.
1525
+ Additionally, you can specify project aware upstream flow dependencies
1526
+ by specifying the fully qualified project_flow_name.
1560
1527
  ```
1561
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1528
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1562
1529
  ```
1563
1530
  or
1564
1531
  ```
1565
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1566
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1532
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1567
1533
  ```
1568
1534
 
1569
- 'parameters' can also be a list of strings and tuples like so:
1570
- ```
1571
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1572
- ```
1573
- This is equivalent to:
1535
+ You can also specify just the project or project branch (other values will be
1536
+ inferred from the current project or project branch):
1574
1537
  ```
1575
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1538
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1576
1539
  ```
1577
1540
 
1541
+ Note that `branch` is typically one of:
1542
+ - `prod`
1543
+ - `user.bob`
1544
+ - `test.my_experiment`
1545
+ - `prod.staging`
1546
+
1578
1547
  Parameters
1579
1548
  ----------
1580
- event : Union[str, Dict[str, Any]], optional, default None
1581
- Event dependency for this flow.
1582
- events : List[Union[str, Dict[str, Any]]], default []
1583
- Events dependency for this flow.
1549
+ flow : Union[str, Dict[str, str]], optional, default None
1550
+ Upstream flow dependency for this flow.
1551
+ flows : List[Union[str, Dict[str, str]]], default []
1552
+ Upstream flow dependencies for this flow.
1584
1553
  options : Dict[str, Any], default {}
1585
1554
  Backend-specific configuration for tuning eventing behavior.
1586
1555
 
@@ -1607,48 +1576,181 @@ def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typ
1607
1576
  ...
1608
1577
 
1609
1578
  @typing.overload
1610
- def trigger_on_finish(*, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1579
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1611
1580
  """
1612
- Specifies the flow(s) that this flow depends on.
1581
+ Specifies the times when the flow should be run when running on a
1582
+ production scheduler.
1583
+
1584
+ Parameters
1585
+ ----------
1586
+ hourly : bool, default False
1587
+ Run the workflow hourly.
1588
+ daily : bool, default True
1589
+ Run the workflow daily.
1590
+ weekly : bool, default False
1591
+ Run the workflow weekly.
1592
+ cron : str, optional, default None
1593
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1594
+ specified by this expression.
1595
+ timezone : str, optional, default None
1596
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1597
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1598
+ """
1599
+ ...
1600
+
1601
+ @typing.overload
1602
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1603
+ ...
1604
+
1605
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1606
+ """
1607
+ Specifies the times when the flow should be run when running on a
1608
+ production scheduler.
1609
+
1610
+ Parameters
1611
+ ----------
1612
+ hourly : bool, default False
1613
+ Run the workflow hourly.
1614
+ daily : bool, default True
1615
+ Run the workflow daily.
1616
+ weekly : bool, default False
1617
+ Run the workflow weekly.
1618
+ cron : str, optional, default None
1619
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1620
+ specified by this expression.
1621
+ timezone : str, optional, default None
1622
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1623
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1624
+ """
1625
+ ...
1626
+
1627
+ @typing.overload
1628
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1629
+ """
1630
+ Specifies the PyPI packages for all steps of the flow.
1631
+
1632
+ Use `@pypi_base` to set common packages required by all
1633
+ steps and use `@pypi` to specify step-specific overrides.
1634
+ Parameters
1635
+ ----------
1636
+ packages : Dict[str, str], default: {}
1637
+ Packages to use for this flow. The key is the name of the package
1638
+ and the value is the version to use.
1639
+ python : str, optional, default: None
1640
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1641
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1642
+ """
1643
+ ...
1644
+
1645
+ @typing.overload
1646
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1647
+ ...
1648
+
1649
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1650
+ """
1651
+ Specifies the PyPI packages for all steps of the flow.
1652
+
1653
+ Use `@pypi_base` to set common packages required by all
1654
+ steps and use `@pypi` to specify step-specific overrides.
1655
+ Parameters
1656
+ ----------
1657
+ packages : Dict[str, str], default: {}
1658
+ Packages to use for this flow. The key is the name of the package
1659
+ and the value is the version to use.
1660
+ python : str, optional, default: None
1661
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1662
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1663
+ """
1664
+ ...
1665
+
1666
+ @typing.overload
1667
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1668
+ """
1669
+ Specifies the Conda environment for all steps of the flow.
1670
+
1671
+ Use `@conda_base` to set common libraries required by all
1672
+ steps and use `@conda` to specify step-specific additions.
1673
+
1674
+ Parameters
1675
+ ----------
1676
+ packages : Dict[str, str], default {}
1677
+ Packages to use for this flow. The key is the name of the package
1678
+ and the value is the version to use.
1679
+ libraries : Dict[str, str], default {}
1680
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1681
+ python : str, optional, default None
1682
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1683
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1684
+ disabled : bool, default False
1685
+ If set to True, disables Conda.
1686
+ """
1687
+ ...
1688
+
1689
+ @typing.overload
1690
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1691
+ ...
1692
+
1693
+ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1694
+ """
1695
+ Specifies the Conda environment for all steps of the flow.
1696
+
1697
+ Use `@conda_base` to set common libraries required by all
1698
+ steps and use `@conda` to specify step-specific additions.
1699
+
1700
+ Parameters
1701
+ ----------
1702
+ packages : Dict[str, str], default {}
1703
+ Packages to use for this flow. The key is the name of the package
1704
+ and the value is the version to use.
1705
+ libraries : Dict[str, str], default {}
1706
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1707
+ python : str, optional, default None
1708
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1709
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1710
+ disabled : bool, default False
1711
+ If set to True, disables Conda.
1712
+ """
1713
+ ...
1714
+
1715
+ @typing.overload
1716
+ def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1717
+ """
1718
+ Specifies the event(s) that this flow depends on.
1613
1719
 
1614
1720
  ```
1615
- @trigger_on_finish(flow='FooFlow')
1721
+ @trigger(event='foo')
1616
1722
  ```
1617
1723
  or
1618
1724
  ```
1619
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1725
+ @trigger(events=['foo', 'bar'])
1620
1726
  ```
1621
- This decorator respects the @project decorator and triggers the flow
1622
- when upstream runs within the same namespace complete successfully
1623
1727
 
1624
- Additionally, you can specify project aware upstream flow dependencies
1625
- by specifying the fully qualified project_flow_name.
1728
+ Additionally, you can specify the parameter mappings
1729
+ to map event payload to Metaflow parameters for the flow.
1626
1730
  ```
1627
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1731
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1628
1732
  ```
1629
1733
  or
1630
1734
  ```
1631
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1735
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1736
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1632
1737
  ```
1633
1738
 
1634
- You can also specify just the project or project branch (other values will be
1635
- inferred from the current project or project branch):
1739
+ 'parameters' can also be a list of strings and tuples like so:
1636
1740
  ```
1637
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1741
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1742
+ ```
1743
+ This is equivalent to:
1744
+ ```
1745
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1638
1746
  ```
1639
-
1640
- Note that `branch` is typically one of:
1641
- - `prod`
1642
- - `user.bob`
1643
- - `test.my_experiment`
1644
- - `prod.staging`
1645
1747
 
1646
1748
  Parameters
1647
1749
  ----------
1648
- flow : Union[str, Dict[str, str]], optional, default None
1649
- Upstream flow dependency for this flow.
1650
- flows : List[Union[str, Dict[str, str]]], default []
1651
- Upstream flow dependencies for this flow.
1750
+ event : Union[str, Dict[str, Any]], optional, default None
1751
+ Event dependency for this flow.
1752
+ events : List[Union[str, Dict[str, Any]]], default []
1753
+ Events dependency for this flow.
1652
1754
  options : Dict[str, Any], default {}
1653
1755
  Backend-specific configuration for tuning eventing behavior.
1654
1756
 
@@ -1657,51 +1759,47 @@ def trigger_on_finish(*, flow: typing.Union[str, typing.Dict[str, str], None] =
1657
1759
  ...
1658
1760
 
1659
1761
  @typing.overload
1660
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1762
+ def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1661
1763
  ...
1662
1764
 
1663
- def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1765
+ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1664
1766
  """
1665
- Specifies the flow(s) that this flow depends on.
1767
+ Specifies the event(s) that this flow depends on.
1666
1768
 
1667
1769
  ```
1668
- @trigger_on_finish(flow='FooFlow')
1770
+ @trigger(event='foo')
1669
1771
  ```
1670
1772
  or
1671
1773
  ```
1672
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1774
+ @trigger(events=['foo', 'bar'])
1673
1775
  ```
1674
- This decorator respects the @project decorator and triggers the flow
1675
- when upstream runs within the same namespace complete successfully
1676
1776
 
1677
- Additionally, you can specify project aware upstream flow dependencies
1678
- by specifying the fully qualified project_flow_name.
1777
+ Additionally, you can specify the parameter mappings
1778
+ to map event payload to Metaflow parameters for the flow.
1679
1779
  ```
1680
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1780
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1681
1781
  ```
1682
1782
  or
1683
1783
  ```
1684
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1784
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1785
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1685
1786
  ```
1686
1787
 
1687
- You can also specify just the project or project branch (other values will be
1688
- inferred from the current project or project branch):
1788
+ 'parameters' can also be a list of strings and tuples like so:
1689
1789
  ```
1690
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1790
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1791
+ ```
1792
+ This is equivalent to:
1793
+ ```
1794
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1691
1795
  ```
1692
-
1693
- Note that `branch` is typically one of:
1694
- - `prod`
1695
- - `user.bob`
1696
- - `test.my_experiment`
1697
- - `prod.staging`
1698
1796
 
1699
1797
  Parameters
1700
1798
  ----------
1701
- flow : Union[str, Dict[str, str]], optional, default None
1702
- Upstream flow dependency for this flow.
1703
- flows : List[Union[str, Dict[str, str]]], default []
1704
- Upstream flow dependencies for this flow.
1799
+ event : Union[str, Dict[str, Any]], optional, default None
1800
+ Event dependency for this flow.
1801
+ events : List[Union[str, Dict[str, Any]]], default []
1802
+ Events dependency for this flow.
1705
1803
  options : Dict[str, Any], default {}
1706
1804
  Backend-specific configuration for tuning eventing behavior.
1707
1805
 
@@ -1751,104 +1849,6 @@ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str,
1751
1849
  """
1752
1850
  ...
1753
1851
 
1754
- @typing.overload
1755
- def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1756
- """
1757
- Specifies the Conda environment for all steps of the flow.
1758
-
1759
- Use `@conda_base` to set common libraries required by all
1760
- steps and use `@conda` to specify step-specific additions.
1761
-
1762
- Parameters
1763
- ----------
1764
- packages : Dict[str, str], default {}
1765
- Packages to use for this flow. The key is the name of the package
1766
- and the value is the version to use.
1767
- libraries : Dict[str, str], default {}
1768
- Supported for backward compatibility. When used with packages, packages will take precedence.
1769
- python : str, optional, default None
1770
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1771
- that the version used will correspond to the version of the Python interpreter used to start the run.
1772
- disabled : bool, default False
1773
- If set to True, disables Conda.
1774
- """
1775
- ...
1776
-
1777
- @typing.overload
1778
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1779
- ...
1780
-
1781
- def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1782
- """
1783
- Specifies the Conda environment for all steps of the flow.
1784
-
1785
- Use `@conda_base` to set common libraries required by all
1786
- steps and use `@conda` to specify step-specific additions.
1787
-
1788
- Parameters
1789
- ----------
1790
- packages : Dict[str, str], default {}
1791
- Packages to use for this flow. The key is the name of the package
1792
- and the value is the version to use.
1793
- libraries : Dict[str, str], default {}
1794
- Supported for backward compatibility. When used with packages, packages will take precedence.
1795
- python : str, optional, default None
1796
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1797
- that the version used will correspond to the version of the Python interpreter used to start the run.
1798
- disabled : bool, default False
1799
- If set to True, disables Conda.
1800
- """
1801
- ...
1802
-
1803
- @typing.overload
1804
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1805
- """
1806
- Specifies the times when the flow should be run when running on a
1807
- production scheduler.
1808
-
1809
- Parameters
1810
- ----------
1811
- hourly : bool, default False
1812
- Run the workflow hourly.
1813
- daily : bool, default True
1814
- Run the workflow daily.
1815
- weekly : bool, default False
1816
- Run the workflow weekly.
1817
- cron : str, optional, default None
1818
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1819
- specified by this expression.
1820
- timezone : str, optional, default None
1821
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1822
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1823
- """
1824
- ...
1825
-
1826
- @typing.overload
1827
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1828
- ...
1829
-
1830
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1831
- """
1832
- Specifies the times when the flow should be run when running on a
1833
- production scheduler.
1834
-
1835
- Parameters
1836
- ----------
1837
- hourly : bool, default False
1838
- Run the workflow hourly.
1839
- daily : bool, default True
1840
- Run the workflow daily.
1841
- weekly : bool, default False
1842
- Run the workflow weekly.
1843
- cron : str, optional, default None
1844
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1845
- specified by this expression.
1846
- timezone : str, optional, default None
1847
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1848
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1849
- """
1850
- ...
1851
-
1852
1852
  def namespace(ns: typing.Optional[str]) -> typing.Optional[str]:
1853
1853
  """
1854
1854
  Switch namespace to the one provided.