metaflow-stubs 2.11.5__py2.py3-none-any.whl → 2.11.7__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (132) hide show
  1. metaflow-stubs/__init__.pyi +474 -462
  2. metaflow-stubs/cards.pyi +5 -5
  3. metaflow-stubs/cli.pyi +3 -3
  4. metaflow-stubs/client/__init__.pyi +4 -4
  5. metaflow-stubs/client/core.pyi +6 -6
  6. metaflow-stubs/client/filecache.pyi +2 -2
  7. metaflow-stubs/clone_util.pyi +2 -2
  8. metaflow-stubs/events.pyi +3 -3
  9. metaflow-stubs/exception.pyi +2 -2
  10. metaflow-stubs/flowspec.pyi +4 -4
  11. metaflow-stubs/generated_for.txt +1 -1
  12. metaflow-stubs/includefile.pyi +5 -5
  13. metaflow-stubs/metadata/metadata.pyi +3 -3
  14. metaflow-stubs/metadata/util.pyi +2 -2
  15. metaflow-stubs/metaflow_config.pyi +2 -2
  16. metaflow-stubs/metaflow_current.pyi +5 -5
  17. metaflow-stubs/mflog/mflog.pyi +2 -2
  18. metaflow-stubs/multicore_utils.pyi +2 -2
  19. metaflow-stubs/parameters.pyi +3 -3
  20. metaflow-stubs/plugins/__init__.pyi +3 -3
  21. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  22. metaflow-stubs/plugins/airflow/airflow.pyi +3 -3
  23. metaflow-stubs/plugins/airflow/airflow_cli.pyi +3 -3
  24. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
  25. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  26. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  27. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  28. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +3 -3
  29. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +3 -3
  30. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +3 -3
  31. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  32. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  33. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  34. metaflow-stubs/plugins/argo/argo_workflows.pyi +4 -4
  35. metaflow-stubs/plugins/argo/argo_workflows_cli.pyi +4 -4
  36. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +3 -3
  37. metaflow-stubs/plugins/aws/__init__.pyi +2 -2
  38. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  39. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  40. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  41. metaflow-stubs/plugins/aws/batch/batch.pyi +4 -4
  42. metaflow-stubs/plugins/aws/batch/batch_cli.pyi +4 -4
  43. metaflow-stubs/plugins/aws/batch/batch_client.pyi +3 -3
  44. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +4 -4
  45. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  46. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +4 -4
  47. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  48. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
  49. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  50. metaflow-stubs/plugins/aws/step_functions/production_token.pyi +2 -2
  51. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  52. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +4 -4
  53. metaflow-stubs/plugins/aws/step_functions/step_functions_cli.pyi +4 -4
  54. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  55. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
  56. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  57. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  58. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  59. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  60. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  61. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  62. metaflow-stubs/plugins/cards/card_cli.pyi +4 -4
  63. metaflow-stubs/plugins/cards/card_client.pyi +3 -3
  64. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  65. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  66. metaflow-stubs/plugins/cards/card_decorator.pyi +3 -3
  67. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  68. metaflow-stubs/plugins/cards/card_modules/basic.pyi +3 -3
  69. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  70. metaflow-stubs/plugins/cards/card_modules/chevron/__init__.pyi +2 -2
  71. metaflow-stubs/plugins/cards/card_modules/chevron/main.pyi +2 -2
  72. metaflow-stubs/plugins/cards/card_modules/chevron/metadata.pyi +2 -2
  73. metaflow-stubs/plugins/cards/card_modules/chevron/renderer.pyi +2 -2
  74. metaflow-stubs/plugins/cards/card_modules/chevron/tokenizer.pyi +2 -2
  75. metaflow-stubs/plugins/cards/card_modules/components.pyi +2 -2
  76. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  77. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  78. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  79. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  80. metaflow-stubs/plugins/cards/component_serializer.pyi +4 -4
  81. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  82. metaflow-stubs/plugins/catch_decorator.pyi +3 -3
  83. metaflow-stubs/plugins/datatools/__init__.pyi +3 -3
  84. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  85. metaflow-stubs/plugins/datatools/s3/__init__.pyi +3 -3
  86. metaflow-stubs/plugins/datatools/s3/s3.pyi +4 -4
  87. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  88. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  89. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  90. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  91. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  92. metaflow-stubs/plugins/events_decorator.pyi +3 -3
  93. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  94. metaflow-stubs/plugins/frameworks/pytorch.pyi +3 -3
  95. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  96. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  97. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  98. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  99. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  100. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  101. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +3 -3
  102. metaflow-stubs/plugins/kubernetes/kubernetes_cli.pyi +3 -3
  103. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  104. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +4 -4
  105. metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +2 -2
  106. metaflow-stubs/plugins/package_cli.pyi +2 -2
  107. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  108. metaflow-stubs/plugins/project_decorator.pyi +3 -3
  109. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  110. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  111. metaflow-stubs/plugins/pypi/conda_environment.pyi +5 -5
  112. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  113. metaflow-stubs/plugins/pypi/pypi_environment.pyi +3 -3
  114. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  115. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  116. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  117. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  118. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +3 -3
  119. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  120. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  121. metaflow-stubs/plugins/tag_cli.pyi +4 -4
  122. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +3 -3
  123. metaflow-stubs/plugins/timeout_decorator.pyi +3 -3
  124. metaflow-stubs/procpoll.pyi +2 -2
  125. metaflow-stubs/pylint_wrapper.pyi +2 -2
  126. metaflow-stubs/tagging_util.pyi +2 -2
  127. metaflow-stubs/version.pyi +2 -2
  128. {metaflow_stubs-2.11.5.dist-info → metaflow_stubs-2.11.7.dist-info}/METADATA +2 -2
  129. metaflow_stubs-2.11.7.dist-info/RECORD +132 -0
  130. metaflow_stubs-2.11.5.dist-info/RECORD +0 -132
  131. {metaflow_stubs-2.11.5.dist-info → metaflow_stubs-2.11.7.dist-info}/WHEEL +0 -0
  132. {metaflow_stubs-2.11.5.dist-info → metaflow_stubs-2.11.7.dist-info}/top_level.txt +0 -0
@@ -1,23 +1,23 @@
1
1
  ##################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.11.5 #
4
- # Generated on 2024-03-14T18:36:24.873925 #
3
+ # MF version: 2.11.7 #
4
+ # Generated on 2024-03-27T23:22:57.990224 #
5
5
  ##################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
+ import metaflow.datastore.inputs
11
12
  import metaflow.metaflow_current
12
- import metaflow._vendor.click.types
13
- import datetime
14
- import metaflow.events
15
13
  import metaflow.parameters
14
+ import typing
16
15
  import metaflow.client.core
17
- import metaflow.plugins.datatools.s3.s3
18
- import metaflow.datastore.inputs
19
16
  import io
20
- import typing
17
+ import metaflow.events
18
+ import datetime
19
+ import metaflow.plugins.datatools.s3.s3
20
+ import metaflow._vendor.click.types
21
21
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
22
22
  StepFlag = typing.NewType("StepFlag", bool)
23
23
 
@@ -833,202 +833,6 @@ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], Non
833
833
  """
834
834
  ...
835
835
 
836
- @typing.overload
837
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
838
- """
839
- Specifies secrets to be retrieved and injected as environment variables prior to
840
- the execution of a step.
841
-
842
- Parameters
843
- ----------
844
- sources : List[Union[str, Dict[str, Any]]], default: []
845
- List of secret specs, defining how the secrets are to be retrieved
846
- """
847
- ...
848
-
849
- @typing.overload
850
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
851
- ...
852
-
853
- @typing.overload
854
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
855
- ...
856
-
857
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
858
- """
859
- Specifies secrets to be retrieved and injected as environment variables prior to
860
- the execution of a step.
861
-
862
- Parameters
863
- ----------
864
- sources : List[Union[str, Dict[str, Any]]], default: []
865
- List of secret specs, defining how the secrets are to be retrieved
866
- """
867
- ...
868
-
869
- @typing.overload
870
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
871
- """
872
- Specifies a timeout for your step.
873
-
874
- This decorator is useful if this step may hang indefinitely.
875
-
876
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
877
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
878
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
879
-
880
- Note that all the values specified in parameters are added together so if you specify
881
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
882
-
883
- Parameters
884
- ----------
885
- seconds : int, default 0
886
- Number of seconds to wait prior to timing out.
887
- minutes : int, default 0
888
- Number of minutes to wait prior to timing out.
889
- hours : int, default 0
890
- Number of hours to wait prior to timing out.
891
- """
892
- ...
893
-
894
- @typing.overload
895
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
896
- ...
897
-
898
- @typing.overload
899
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
900
- ...
901
-
902
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
903
- """
904
- Specifies a timeout for your step.
905
-
906
- This decorator is useful if this step may hang indefinitely.
907
-
908
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
909
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
910
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
911
-
912
- Note that all the values specified in parameters are added together so if you specify
913
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
914
-
915
- Parameters
916
- ----------
917
- seconds : int, default 0
918
- Number of seconds to wait prior to timing out.
919
- minutes : int, default 0
920
- Number of minutes to wait prior to timing out.
921
- hours : int, default 0
922
- Number of hours to wait prior to timing out.
923
- """
924
- ...
925
-
926
- @typing.overload
927
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
928
- """
929
- Specifies the PyPI packages for the step.
930
-
931
- Information in this decorator will augment any
932
- attributes set in the `@pyi_base` flow-level decorator. Hence,
933
- you can use `@pypi_base` to set packages required by all
934
- steps and use `@pypi` to specify step-specific overrides.
935
-
936
- Parameters
937
- ----------
938
- packages : Dict[str, str], default: {}
939
- Packages to use for this step. The key is the name of the package
940
- and the value is the version to use.
941
- python : str, optional, default: None
942
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
943
- that the version used will correspond to the version of the Python interpreter used to start the run.
944
- """
945
- ...
946
-
947
- @typing.overload
948
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
949
- ...
950
-
951
- @typing.overload
952
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
953
- ...
954
-
955
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
956
- """
957
- Specifies the PyPI packages for the step.
958
-
959
- Information in this decorator will augment any
960
- attributes set in the `@pyi_base` flow-level decorator. Hence,
961
- you can use `@pypi_base` to set packages required by all
962
- steps and use `@pypi` to specify step-specific overrides.
963
-
964
- Parameters
965
- ----------
966
- packages : Dict[str, str], default: {}
967
- Packages to use for this step. The key is the name of the package
968
- and the value is the version to use.
969
- python : str, optional, default: None
970
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
971
- that the version used will correspond to the version of the Python interpreter used to start the run.
972
- """
973
- ...
974
-
975
- @typing.overload
976
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
977
- """
978
- Specifies the Conda environment for the step.
979
-
980
- Information in this decorator will augment any
981
- attributes set in the `@conda_base` flow-level decorator. Hence,
982
- you can use `@conda_base` to set packages required by all
983
- steps and use `@conda` to specify step-specific overrides.
984
-
985
- Parameters
986
- ----------
987
- packages : Dict[str, str], default {}
988
- Packages to use for this step. The key is the name of the package
989
- and the value is the version to use.
990
- libraries : Dict[str, str], default {}
991
- Supported for backward compatibility. When used with packages, packages will take precedence.
992
- python : str, optional, default None
993
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
994
- that the version used will correspond to the version of the Python interpreter used to start the run.
995
- disabled : bool, default False
996
- If set to True, disables @conda.
997
- """
998
- ...
999
-
1000
- @typing.overload
1001
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1002
- ...
1003
-
1004
- @typing.overload
1005
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1006
- ...
1007
-
1008
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1009
- """
1010
- Specifies the Conda environment for the step.
1011
-
1012
- Information in this decorator will augment any
1013
- attributes set in the `@conda_base` flow-level decorator. Hence,
1014
- you can use `@conda_base` to set packages required by all
1015
- steps and use `@conda` to specify step-specific overrides.
1016
-
1017
- Parameters
1018
- ----------
1019
- packages : Dict[str, str], default {}
1020
- Packages to use for this step. The key is the name of the package
1021
- and the value is the version to use.
1022
- libraries : Dict[str, str], default {}
1023
- Supported for backward compatibility. When used with packages, packages will take precedence.
1024
- python : str, optional, default None
1025
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1026
- that the version used will correspond to the version of the Python interpreter used to start the run.
1027
- disabled : bool, default False
1028
- If set to True, disables @conda.
1029
- """
1030
- ...
1031
-
1032
836
  @typing.overload
1033
837
  def card(*, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1034
838
  """
@@ -1081,7 +885,7 @@ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
1081
885
  ...
1082
886
 
1083
887
  @typing.overload
1084
- def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
888
+ def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1085
889
  """
1086
890
  Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
1087
891
 
@@ -1141,6 +945,12 @@ def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optio
1141
945
  ephemeral_storage: int, default None
1142
946
  The total amount, in GiB, of ephemeral storage to set for the task (21-200)
1143
947
  This is only relevant for Fargate compute environments
948
+ log_driver: str, optional, default None
949
+ The log driver to use for the Amazon ECS container.
950
+ log_options: List[str], optional, default None
951
+ List of strings containing options for the chosen log driver. The configurable values
952
+ depend on the `log driver` chosen. Validation of these options is not supported yet.
953
+ Example usage: ["awslogs-group:aws/batch/job"]
1144
954
  """
1145
955
  ...
1146
956
 
@@ -1152,7 +962,7 @@ def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Calla
1152
962
  def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1153
963
  ...
1154
964
 
1155
- def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None):
965
+ def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
1156
966
  """
1157
967
  Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
1158
968
 
@@ -1212,6 +1022,12 @@ def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
1212
1022
  ephemeral_storage: int, default None
1213
1023
  The total amount, in GiB, of ephemeral storage to set for the task (21-200)
1214
1024
  This is only relevant for Fargate compute environments
1025
+ log_driver: str, optional, default None
1026
+ The log driver to use for the Amazon ECS container.
1027
+ log_options: List[str], optional, default None
1028
+ List of strings containing options for the chosen log driver. The configurable values
1029
+ depend on the `log driver` chosen. Validation of these options is not supported yet.
1030
+ Example usage: ["awslogs-group:aws/batch/job"]
1215
1031
  """
1216
1032
  ...
1217
1033
 
@@ -1264,50 +1080,107 @@ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
1264
1080
  """
1265
1081
  ...
1266
1082
 
1267
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = "KUBERNETES_IMAGE_PULL_POLICY", service_account: str = "METAFLOW_KUBERNETES_SERVICE_ACCOUNT", secrets: typing.Optional[typing.List[str]] = None, namespace: str = "METAFLOW_KUBERNETES_NAMESPACE", gpu: typing.Optional[int] = None, gpu_vendor: str = "KUBERNETES_GPU_VENDOR", tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = "/metaflow_temp", persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1083
+ @typing.overload
1084
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1268
1085
  """
1269
- Specifies that this step should execute on Kubernetes.
1086
+ Specifies the Conda environment for the step.
1087
+
1088
+ Information in this decorator will augment any
1089
+ attributes set in the `@conda_base` flow-level decorator. Hence,
1090
+ you can use `@conda_base` to set packages required by all
1091
+ steps and use `@conda` to specify step-specific overrides.
1270
1092
 
1271
1093
  Parameters
1272
1094
  ----------
1273
- cpu : int, default 1
1274
- Number of CPUs required for this step. If `@resources` is
1275
- also present, the maximum value from all decorators is used.
1276
- memory : int, default 4096
1277
- Memory size (in MB) required for this step. If
1278
- `@resources` is also present, the maximum value from all decorators is
1279
- used.
1280
- disk : int, default 10240
1281
- Disk size (in MB) required for this step. If
1282
- `@resources` is also present, the maximum value from all decorators is
1283
- used.
1284
- image : str, optional, default None
1285
- Docker image to use when launching on Kubernetes. If not specified, and
1286
- METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
1287
- not, a default Docker image mapping to the current version of Python is used.
1288
- image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
1289
- If given, the imagePullPolicy to be applied to the Docker image of the step.
1290
- service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
1291
- Kubernetes service account to use when launching pod in Kubernetes.
1292
- secrets : List[str], optional, default None
1293
- Kubernetes secrets to use when launching pod in Kubernetes. These
1294
- secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
1295
- in Metaflow configuration.
1296
- namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
1297
- Kubernetes namespace to use when launching pod in Kubernetes.
1298
- gpu : int, optional, default None
1299
- Number of GPUs required for this step. A value of zero implies that
1300
- the scheduled node should not have GPUs.
1301
- gpu_vendor : str, default KUBERNETES_GPU_VENDOR
1302
- The vendor of the GPUs to be used for this step.
1303
- tolerations : List[str], default []
1304
- The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
1305
- Kubernetes tolerations to use when launching pod in Kubernetes.
1306
- use_tmpfs : bool, default False
1307
- This enables an explicit tmpfs mount for this step.
1308
- tmpfs_tempdir : bool, default True
1309
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1310
- tmpfs_size : int, optional, default: None
1095
+ packages : Dict[str, str], default {}
1096
+ Packages to use for this step. The key is the name of the package
1097
+ and the value is the version to use.
1098
+ libraries : Dict[str, str], default {}
1099
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1100
+ python : str, optional, default None
1101
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1102
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1103
+ disabled : bool, default False
1104
+ If set to True, disables @conda.
1105
+ """
1106
+ ...
1107
+
1108
+ @typing.overload
1109
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1110
+ ...
1111
+
1112
+ @typing.overload
1113
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1114
+ ...
1115
+
1116
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1117
+ """
1118
+ Specifies the Conda environment for the step.
1119
+
1120
+ Information in this decorator will augment any
1121
+ attributes set in the `@conda_base` flow-level decorator. Hence,
1122
+ you can use `@conda_base` to set packages required by all
1123
+ steps and use `@conda` to specify step-specific overrides.
1124
+
1125
+ Parameters
1126
+ ----------
1127
+ packages : Dict[str, str], default {}
1128
+ Packages to use for this step. The key is the name of the package
1129
+ and the value is the version to use.
1130
+ libraries : Dict[str, str], default {}
1131
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1132
+ python : str, optional, default None
1133
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1134
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1135
+ disabled : bool, default False
1136
+ If set to True, disables @conda.
1137
+ """
1138
+ ...
1139
+
1140
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = "KUBERNETES_IMAGE_PULL_POLICY", service_account: str = "METAFLOW_KUBERNETES_SERVICE_ACCOUNT", secrets: typing.Optional[typing.List[str]] = None, namespace: str = "METAFLOW_KUBERNETES_NAMESPACE", gpu: typing.Optional[int] = None, gpu_vendor: str = "KUBERNETES_GPU_VENDOR", tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = "/metaflow_temp", persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1141
+ """
1142
+ Specifies that this step should execute on Kubernetes.
1143
+
1144
+ Parameters
1145
+ ----------
1146
+ cpu : int, default 1
1147
+ Number of CPUs required for this step. If `@resources` is
1148
+ also present, the maximum value from all decorators is used.
1149
+ memory : int, default 4096
1150
+ Memory size (in MB) required for this step. If
1151
+ `@resources` is also present, the maximum value from all decorators is
1152
+ used.
1153
+ disk : int, default 10240
1154
+ Disk size (in MB) required for this step. If
1155
+ `@resources` is also present, the maximum value from all decorators is
1156
+ used.
1157
+ image : str, optional, default None
1158
+ Docker image to use when launching on Kubernetes. If not specified, and
1159
+ METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
1160
+ not, a default Docker image mapping to the current version of Python is used.
1161
+ image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
1162
+ If given, the imagePullPolicy to be applied to the Docker image of the step.
1163
+ service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
1164
+ Kubernetes service account to use when launching pod in Kubernetes.
1165
+ secrets : List[str], optional, default None
1166
+ Kubernetes secrets to use when launching pod in Kubernetes. These
1167
+ secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
1168
+ in Metaflow configuration.
1169
+ namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
1170
+ Kubernetes namespace to use when launching pod in Kubernetes.
1171
+ gpu : int, optional, default None
1172
+ Number of GPUs required for this step. A value of zero implies that
1173
+ the scheduled node should not have GPUs.
1174
+ gpu_vendor : str, default KUBERNETES_GPU_VENDOR
1175
+ The vendor of the GPUs to be used for this step.
1176
+ tolerations : List[str], default []
1177
+ The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
1178
+ Kubernetes tolerations to use when launching pod in Kubernetes.
1179
+ use_tmpfs : bool, default False
1180
+ This enables an explicit tmpfs mount for this step.
1181
+ tmpfs_tempdir : bool, default True
1182
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1183
+ tmpfs_size : int, optional, default: None
1311
1184
  The value for the size (in MiB) of the tmpfs mount for this step.
1312
1185
  This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1313
1186
  memory allocated for this step.
@@ -1322,308 +1195,243 @@ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: ty
1322
1195
  ...
1323
1196
 
1324
1197
  @typing.overload
1325
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1198
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1326
1199
  """
1327
- Specifies the number of times the task corresponding
1328
- to a step needs to be retried.
1329
-
1330
- This decorator is useful for handling transient errors, such as networking issues.
1331
- If your task contains operations that can't be retried safely, e.g. database updates,
1332
- it is advisable to annotate it with `@retry(times=0)`.
1333
-
1334
- This can be used in conjunction with the `@catch` decorator. The `@catch`
1335
- decorator will execute a no-op task after all retries have been exhausted,
1336
- ensuring that the flow execution can continue.
1200
+ Specifies secrets to be retrieved and injected as environment variables prior to
1201
+ the execution of a step.
1337
1202
 
1338
1203
  Parameters
1339
1204
  ----------
1340
- times : int, default 3
1341
- Number of times to retry this task.
1342
- minutes_between_retries : int, default 2
1343
- Number of minutes between retries.
1205
+ sources : List[Union[str, Dict[str, Any]]], default: []
1206
+ List of secret specs, defining how the secrets are to be retrieved
1344
1207
  """
1345
1208
  ...
1346
1209
 
1347
1210
  @typing.overload
1348
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1211
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1349
1212
  ...
1350
1213
 
1351
1214
  @typing.overload
1352
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1353
- ...
1354
-
1355
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
1356
- """
1357
- Specifies the number of times the task corresponding
1358
- to a step needs to be retried.
1359
-
1360
- This decorator is useful for handling transient errors, such as networking issues.
1361
- If your task contains operations that can't be retried safely, e.g. database updates,
1362
- it is advisable to annotate it with `@retry(times=0)`.
1363
-
1364
- This can be used in conjunction with the `@catch` decorator. The `@catch`
1365
- decorator will execute a no-op task after all retries have been exhausted,
1366
- ensuring that the flow execution can continue.
1367
-
1368
- Parameters
1369
- ----------
1370
- times : int, default 3
1371
- Number of times to retry this task.
1372
- minutes_between_retries : int, default 2
1373
- Number of minutes between retries.
1374
- """
1375
- ...
1376
-
1377
- def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1378
- """
1379
- Specifies what flows belong to the same project.
1380
-
1381
- A project-specific namespace is created for all flows that
1382
- use the same `@project(name)`.
1383
-
1384
- Parameters
1385
- ----------
1386
- name : str
1387
- Project name. Make sure that the name is unique amongst all
1388
- projects that use the same production scheduler. The name may
1389
- contain only lowercase alphanumeric characters and underscores.
1390
-
1391
-
1392
- """
1215
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1393
1216
  ...
1394
1217
 
1395
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1218
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
1396
1219
  """
1397
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1398
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1220
+ Specifies secrets to be retrieved and injected as environment variables prior to
1221
+ the execution of a step.
1399
1222
 
1400
1223
  Parameters
1401
1224
  ----------
1402
- timeout : int
1403
- Time, in seconds before the task times out and fails. (Default: 3600)
1404
- poke_interval : int
1405
- Time in seconds that the job should wait in between each try. (Default: 60)
1406
- mode : str
1407
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1408
- exponential_backoff : bool
1409
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1410
- pool : str
1411
- the slot pool this task should run in,
1412
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1413
- soft_fail : bool
1414
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1415
- name : str
1416
- Name of the sensor on Airflow
1417
- description : str
1418
- Description of sensor in the Airflow UI
1419
- external_dag_id : str
1420
- The dag_id that contains the task you want to wait for.
1421
- external_task_ids : List[str]
1422
- The list of task_ids that you want to wait for.
1423
- If None (default value) the sensor waits for the DAG. (Default: None)
1424
- allowed_states : List[str]
1425
- Iterable of allowed states, (Default: ['success'])
1426
- failed_states : List[str]
1427
- Iterable of failed or dis-allowed states. (Default: None)
1428
- execution_delta : datetime.timedelta
1429
- time difference with the previous execution to look at,
1430
- the default is the same logical date as the current task or DAG. (Default: None)
1431
- check_existence: bool
1432
- Set to True to check if the external task exists or check if
1433
- the DAG to wait for exists. (Default: True)
1225
+ sources : List[Union[str, Dict[str, Any]]], default: []
1226
+ List of secret specs, defining how the secrets are to be retrieved
1434
1227
  """
1435
1228
  ...
1436
1229
 
1437
1230
  @typing.overload
1438
- def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1231
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1439
1232
  """
1440
- Specifies the Conda environment for all steps of the flow.
1233
+ Specifies the PyPI packages for the step.
1441
1234
 
1442
- Use `@conda_base` to set common libraries required by all
1443
- steps and use `@conda` to specify step-specific additions.
1235
+ Information in this decorator will augment any
1236
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
1237
+ you can use `@pypi_base` to set packages required by all
1238
+ steps and use `@pypi` to specify step-specific overrides.
1444
1239
 
1445
1240
  Parameters
1446
1241
  ----------
1447
- packages : Dict[str, str], default {}
1448
- Packages to use for this flow. The key is the name of the package
1242
+ packages : Dict[str, str], default: {}
1243
+ Packages to use for this step. The key is the name of the package
1449
1244
  and the value is the version to use.
1450
- libraries : Dict[str, str], default {}
1451
- Supported for backward compatibility. When used with packages, packages will take precedence.
1452
- python : str, optional, default None
1245
+ python : str, optional, default: None
1453
1246
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1454
1247
  that the version used will correspond to the version of the Python interpreter used to start the run.
1455
- disabled : bool, default False
1456
- If set to True, disables Conda.
1457
1248
  """
1458
1249
  ...
1459
1250
 
1460
1251
  @typing.overload
1461
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1252
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1462
1253
  ...
1463
1254
 
1464
- def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1255
+ @typing.overload
1256
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1257
+ ...
1258
+
1259
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1465
1260
  """
1466
- Specifies the Conda environment for all steps of the flow.
1261
+ Specifies the PyPI packages for the step.
1467
1262
 
1468
- Use `@conda_base` to set common libraries required by all
1469
- steps and use `@conda` to specify step-specific additions.
1263
+ Information in this decorator will augment any
1264
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
1265
+ you can use `@pypi_base` to set packages required by all
1266
+ steps and use `@pypi` to specify step-specific overrides.
1470
1267
 
1471
1268
  Parameters
1472
1269
  ----------
1473
- packages : Dict[str, str], default {}
1474
- Packages to use for this flow. The key is the name of the package
1270
+ packages : Dict[str, str], default: {}
1271
+ Packages to use for this step. The key is the name of the package
1475
1272
  and the value is the version to use.
1476
- libraries : Dict[str, str], default {}
1477
- Supported for backward compatibility. When used with packages, packages will take precedence.
1478
- python : str, optional, default None
1273
+ python : str, optional, default: None
1479
1274
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1480
1275
  that the version used will correspond to the version of the Python interpreter used to start the run.
1481
- disabled : bool, default False
1482
- If set to True, disables Conda.
1483
1276
  """
1484
1277
  ...
1485
1278
 
1486
1279
  @typing.overload
1487
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1280
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1488
1281
  """
1489
- Specifies the times when the flow should be run when running on a
1490
- production scheduler.
1282
+ Specifies the number of times the task corresponding
1283
+ to a step needs to be retried.
1284
+
1285
+ This decorator is useful for handling transient errors, such as networking issues.
1286
+ If your task contains operations that can't be retried safely, e.g. database updates,
1287
+ it is advisable to annotate it with `@retry(times=0)`.
1288
+
1289
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
1290
+ decorator will execute a no-op task after all retries have been exhausted,
1291
+ ensuring that the flow execution can continue.
1491
1292
 
1492
1293
  Parameters
1493
1294
  ----------
1494
- hourly : bool, default False
1495
- Run the workflow hourly.
1496
- daily : bool, default True
1497
- Run the workflow daily.
1498
- weekly : bool, default False
1499
- Run the workflow weekly.
1500
- cron : str, optional, default None
1501
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1502
- specified by this expression.
1503
- timezone : str, optional, default None
1504
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1505
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1295
+ times : int, default 3
1296
+ Number of times to retry this task.
1297
+ minutes_between_retries : int, default 2
1298
+ Number of minutes between retries.
1506
1299
  """
1507
1300
  ...
1508
1301
 
1509
1302
  @typing.overload
1510
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1303
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1511
1304
  ...
1512
1305
 
1513
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1306
+ @typing.overload
1307
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1308
+ ...
1309
+
1310
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
1514
1311
  """
1515
- Specifies the times when the flow should be run when running on a
1516
- production scheduler.
1312
+ Specifies the number of times the task corresponding
1313
+ to a step needs to be retried.
1314
+
1315
+ This decorator is useful for handling transient errors, such as networking issues.
1316
+ If your task contains operations that can't be retried safely, e.g. database updates,
1317
+ it is advisable to annotate it with `@retry(times=0)`.
1318
+
1319
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
1320
+ decorator will execute a no-op task after all retries have been exhausted,
1321
+ ensuring that the flow execution can continue.
1517
1322
 
1518
1323
  Parameters
1519
1324
  ----------
1520
- hourly : bool, default False
1521
- Run the workflow hourly.
1522
- daily : bool, default True
1523
- Run the workflow daily.
1524
- weekly : bool, default False
1525
- Run the workflow weekly.
1526
- cron : str, optional, default None
1527
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1528
- specified by this expression.
1529
- timezone : str, optional, default None
1530
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1531
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1325
+ times : int, default 3
1326
+ Number of times to retry this task.
1327
+ minutes_between_retries : int, default 2
1328
+ Number of minutes between retries.
1532
1329
  """
1533
1330
  ...
1534
1331
 
1535
1332
  @typing.overload
1536
- def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1333
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1537
1334
  """
1538
- Specifies the event(s) that this flow depends on.
1335
+ Specifies a timeout for your step.
1539
1336
 
1540
- ```
1541
- @trigger(event='foo')
1542
- ```
1543
- or
1544
- ```
1545
- @trigger(events=['foo', 'bar'])
1546
- ```
1337
+ This decorator is useful if this step may hang indefinitely.
1547
1338
 
1548
- Additionally, you can specify the parameter mappings
1549
- to map event payload to Metaflow parameters for the flow.
1550
- ```
1551
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1552
- ```
1553
- or
1554
- ```
1555
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1556
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1557
- ```
1339
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1340
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
1341
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
1558
1342
 
1559
- 'parameters' can also be a list of strings and tuples like so:
1560
- ```
1561
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1562
- ```
1563
- This is equivalent to:
1564
- ```
1565
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1566
- ```
1343
+ Note that all the values specified in parameters are added together so if you specify
1344
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1567
1345
 
1568
1346
  Parameters
1569
1347
  ----------
1570
- event : Union[str, Dict[str, Any]], optional, default None
1571
- Event dependency for this flow.
1572
- events : List[Union[str, Dict[str, Any]]], default []
1573
- Events dependency for this flow.
1574
- options : Dict[str, Any], default {}
1575
- Backend-specific configuration for tuning eventing behavior.
1576
-
1577
-
1348
+ seconds : int, default 0
1349
+ Number of seconds to wait prior to timing out.
1350
+ minutes : int, default 0
1351
+ Number of minutes to wait prior to timing out.
1352
+ hours : int, default 0
1353
+ Number of hours to wait prior to timing out.
1578
1354
  """
1579
1355
  ...
1580
1356
 
1581
1357
  @typing.overload
1582
- def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1358
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1583
1359
  ...
1584
1360
 
1585
- def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1361
+ @typing.overload
1362
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1363
+ ...
1364
+
1365
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
1586
1366
  """
1587
- Specifies the event(s) that this flow depends on.
1367
+ Specifies a timeout for your step.
1588
1368
 
1589
- ```
1590
- @trigger(event='foo')
1591
- ```
1592
- or
1593
- ```
1594
- @trigger(events=['foo', 'bar'])
1595
- ```
1369
+ This decorator is useful if this step may hang indefinitely.
1596
1370
 
1597
- Additionally, you can specify the parameter mappings
1598
- to map event payload to Metaflow parameters for the flow.
1599
- ```
1600
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1601
- ```
1602
- or
1603
- ```
1604
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1605
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1606
- ```
1371
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1372
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
1373
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
1607
1374
 
1608
- 'parameters' can also be a list of strings and tuples like so:
1609
- ```
1610
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1611
- ```
1612
- This is equivalent to:
1613
- ```
1614
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1615
- ```
1375
+ Note that all the values specified in parameters are added together so if you specify
1376
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1377
+
1378
+ Parameters
1379
+ ----------
1380
+ seconds : int, default 0
1381
+ Number of seconds to wait prior to timing out.
1382
+ minutes : int, default 0
1383
+ Number of minutes to wait prior to timing out.
1384
+ hours : int, default 0
1385
+ Number of hours to wait prior to timing out.
1386
+ """
1387
+ ...
1388
+
1389
+ @typing.overload
1390
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1391
+ """
1392
+ Specifies the times when the flow should be run when running on a
1393
+ production scheduler.
1616
1394
 
1617
1395
  Parameters
1618
1396
  ----------
1619
- event : Union[str, Dict[str, Any]], optional, default None
1620
- Event dependency for this flow.
1621
- events : List[Union[str, Dict[str, Any]]], default []
1622
- Events dependency for this flow.
1623
- options : Dict[str, Any], default {}
1624
- Backend-specific configuration for tuning eventing behavior.
1625
-
1397
+ hourly : bool, default False
1398
+ Run the workflow hourly.
1399
+ daily : bool, default True
1400
+ Run the workflow daily.
1401
+ weekly : bool, default False
1402
+ Run the workflow weekly.
1403
+ cron : str, optional, default None
1404
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1405
+ specified by this expression.
1406
+ timezone : str, optional, default None
1407
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1408
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1409
+ """
1410
+ ...
1411
+
1412
+ @typing.overload
1413
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1414
+ ...
1415
+
1416
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1417
+ """
1418
+ Specifies the times when the flow should be run when running on a
1419
+ production scheduler.
1626
1420
 
1421
+ Parameters
1422
+ ----------
1423
+ hourly : bool, default False
1424
+ Run the workflow hourly.
1425
+ daily : bool, default True
1426
+ Run the workflow daily.
1427
+ weekly : bool, default False
1428
+ Run the workflow weekly.
1429
+ cron : str, optional, default None
1430
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1431
+ specified by this expression.
1432
+ timezone : str, optional, default None
1433
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1434
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1627
1435
  """
1628
1436
  ...
1629
1437
 
@@ -1666,6 +1474,24 @@ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packag
1666
1474
  """
1667
1475
  ...
1668
1476
 
1477
+ def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1478
+ """
1479
+ Specifies what flows belong to the same project.
1480
+
1481
+ A project-specific namespace is created for all flows that
1482
+ use the same `@project(name)`.
1483
+
1484
+ Parameters
1485
+ ----------
1486
+ name : str
1487
+ Project name. Make sure that the name is unique amongst all
1488
+ projects that use the same production scheduler. The name may
1489
+ contain only lowercase alphanumeric characters and underscores.
1490
+
1491
+
1492
+ """
1493
+ ...
1494
+
1669
1495
  def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1670
1496
  """
1671
1497
  The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
@@ -1708,6 +1534,97 @@ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, expone
1708
1534
  """
1709
1535
  ...
1710
1536
 
1537
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1538
+ """
1539
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1540
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1541
+
1542
+ Parameters
1543
+ ----------
1544
+ timeout : int
1545
+ Time, in seconds before the task times out and fails. (Default: 3600)
1546
+ poke_interval : int
1547
+ Time in seconds that the job should wait in between each try. (Default: 60)
1548
+ mode : str
1549
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1550
+ exponential_backoff : bool
1551
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1552
+ pool : str
1553
+ the slot pool this task should run in,
1554
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1555
+ soft_fail : bool
1556
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1557
+ name : str
1558
+ Name of the sensor on Airflow
1559
+ description : str
1560
+ Description of sensor in the Airflow UI
1561
+ external_dag_id : str
1562
+ The dag_id that contains the task you want to wait for.
1563
+ external_task_ids : List[str]
1564
+ The list of task_ids that you want to wait for.
1565
+ If None (default value) the sensor waits for the DAG. (Default: None)
1566
+ allowed_states : List[str]
1567
+ Iterable of allowed states, (Default: ['success'])
1568
+ failed_states : List[str]
1569
+ Iterable of failed or dis-allowed states. (Default: None)
1570
+ execution_delta : datetime.timedelta
1571
+ time difference with the previous execution to look at,
1572
+ the default is the same logical date as the current task or DAG. (Default: None)
1573
+ check_existence: bool
1574
+ Set to True to check if the external task exists or check if
1575
+ the DAG to wait for exists. (Default: True)
1576
+ """
1577
+ ...
1578
+
1579
+ @typing.overload
1580
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1581
+ """
1582
+ Specifies the Conda environment for all steps of the flow.
1583
+
1584
+ Use `@conda_base` to set common libraries required by all
1585
+ steps and use `@conda` to specify step-specific additions.
1586
+
1587
+ Parameters
1588
+ ----------
1589
+ packages : Dict[str, str], default {}
1590
+ Packages to use for this flow. The key is the name of the package
1591
+ and the value is the version to use.
1592
+ libraries : Dict[str, str], default {}
1593
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1594
+ python : str, optional, default None
1595
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1596
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1597
+ disabled : bool, default False
1598
+ If set to True, disables Conda.
1599
+ """
1600
+ ...
1601
+
1602
+ @typing.overload
1603
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1604
+ ...
1605
+
1606
+ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1607
+ """
1608
+ Specifies the Conda environment for all steps of the flow.
1609
+
1610
+ Use `@conda_base` to set common libraries required by all
1611
+ steps and use `@conda` to specify step-specific additions.
1612
+
1613
+ Parameters
1614
+ ----------
1615
+ packages : Dict[str, str], default {}
1616
+ Packages to use for this flow. The key is the name of the package
1617
+ and the value is the version to use.
1618
+ libraries : Dict[str, str], default {}
1619
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1620
+ python : str, optional, default None
1621
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1622
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1623
+ disabled : bool, default False
1624
+ If set to True, disables Conda.
1625
+ """
1626
+ ...
1627
+
1711
1628
  @typing.overload
1712
1629
  def trigger_on_finish(*, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1713
1630
  """
@@ -1811,6 +1728,101 @@ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *
1811
1728
  """
1812
1729
  ...
1813
1730
 
1731
+ @typing.overload
1732
+ def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1733
+ """
1734
+ Specifies the event(s) that this flow depends on.
1735
+
1736
+ ```
1737
+ @trigger(event='foo')
1738
+ ```
1739
+ or
1740
+ ```
1741
+ @trigger(events=['foo', 'bar'])
1742
+ ```
1743
+
1744
+ Additionally, you can specify the parameter mappings
1745
+ to map event payload to Metaflow parameters for the flow.
1746
+ ```
1747
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1748
+ ```
1749
+ or
1750
+ ```
1751
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1752
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1753
+ ```
1754
+
1755
+ 'parameters' can also be a list of strings and tuples like so:
1756
+ ```
1757
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1758
+ ```
1759
+ This is equivalent to:
1760
+ ```
1761
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1762
+ ```
1763
+
1764
+ Parameters
1765
+ ----------
1766
+ event : Union[str, Dict[str, Any]], optional, default None
1767
+ Event dependency for this flow.
1768
+ events : List[Union[str, Dict[str, Any]]], default []
1769
+ Events dependency for this flow.
1770
+ options : Dict[str, Any], default {}
1771
+ Backend-specific configuration for tuning eventing behavior.
1772
+
1773
+
1774
+ """
1775
+ ...
1776
+
1777
+ @typing.overload
1778
+ def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1779
+ ...
1780
+
1781
+ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1782
+ """
1783
+ Specifies the event(s) that this flow depends on.
1784
+
1785
+ ```
1786
+ @trigger(event='foo')
1787
+ ```
1788
+ or
1789
+ ```
1790
+ @trigger(events=['foo', 'bar'])
1791
+ ```
1792
+
1793
+ Additionally, you can specify the parameter mappings
1794
+ to map event payload to Metaflow parameters for the flow.
1795
+ ```
1796
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1797
+ ```
1798
+ or
1799
+ ```
1800
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1801
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1802
+ ```
1803
+
1804
+ 'parameters' can also be a list of strings and tuples like so:
1805
+ ```
1806
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1807
+ ```
1808
+ This is equivalent to:
1809
+ ```
1810
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1811
+ ```
1812
+
1813
+ Parameters
1814
+ ----------
1815
+ event : Union[str, Dict[str, Any]], optional, default None
1816
+ Event dependency for this flow.
1817
+ events : List[Union[str, Dict[str, Any]]], default []
1818
+ Events dependency for this flow.
1819
+ options : Dict[str, Any], default {}
1820
+ Backend-specific configuration for tuning eventing behavior.
1821
+
1822
+
1823
+ """
1824
+ ...
1825
+
1814
1826
  def namespace(ns: typing.Optional[str]) -> typing.Optional[str]:
1815
1827
  """
1816
1828
  Switch namespace to the one provided.