metaflow-stubs 2.12.17__py2.py3-none-any.whl → 2.12.18__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (150) hide show
  1. metaflow-stubs/__init__.pyi +575 -575
  2. metaflow-stubs/cards.pyi +3 -3
  3. metaflow-stubs/cli.pyi +2 -2
  4. metaflow-stubs/client/__init__.pyi +3 -3
  5. metaflow-stubs/client/core.pyi +6 -6
  6. metaflow-stubs/client/filecache.pyi +3 -3
  7. metaflow-stubs/clone_util.pyi +2 -2
  8. metaflow-stubs/events.pyi +2 -2
  9. metaflow-stubs/exception.pyi +2 -2
  10. metaflow-stubs/flowspec.pyi +5 -5
  11. metaflow-stubs/generated_for.txt +1 -1
  12. metaflow-stubs/includefile.pyi +4 -4
  13. metaflow-stubs/metadata/metadata.pyi +3 -3
  14. metaflow-stubs/metadata/util.pyi +2 -2
  15. metaflow-stubs/metaflow_config.pyi +2 -2
  16. metaflow-stubs/metaflow_current.pyi +4 -4
  17. metaflow-stubs/mflog/mflog.pyi +2 -2
  18. metaflow-stubs/multicore_utils.pyi +2 -2
  19. metaflow-stubs/parameters.pyi +3 -3
  20. metaflow-stubs/plugins/__init__.pyi +3 -3
  21. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  22. metaflow-stubs/plugins/airflow/airflow.pyi +3 -3
  23. metaflow-stubs/plugins/airflow/airflow_cli.pyi +3 -3
  24. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
  25. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  26. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  27. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  28. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
  29. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
  30. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
  31. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  32. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  33. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  34. metaflow-stubs/plugins/argo/argo_workflows.pyi +4 -4
  35. metaflow-stubs/plugins/argo/argo_workflows_cli.pyi +5 -5
  36. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +5 -5
  37. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +3 -3
  38. metaflow-stubs/plugins/aws/__init__.pyi +2 -2
  39. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  40. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  41. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  42. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  43. metaflow-stubs/plugins/aws/batch/batch_cli.pyi +2 -2
  44. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  45. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +3 -3
  46. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  47. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +3 -3
  48. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  49. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
  50. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  51. metaflow-stubs/plugins/aws/step_functions/production_token.pyi +2 -2
  52. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  53. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  54. metaflow-stubs/plugins/aws/step_functions/step_functions_cli.pyi +4 -4
  55. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  56. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
  57. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +3 -3
  58. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  59. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  60. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  61. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +3 -3
  62. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  63. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  64. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  65. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  66. metaflow-stubs/plugins/cards/card_cli.pyi +3 -3
  67. metaflow-stubs/plugins/cards/card_client.pyi +3 -3
  68. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  69. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  70. metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
  71. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  72. metaflow-stubs/plugins/cards/card_modules/basic.pyi +2 -2
  73. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  74. metaflow-stubs/plugins/cards/card_modules/chevron/__init__.pyi +2 -2
  75. metaflow-stubs/plugins/cards/card_modules/chevron/main.pyi +2 -2
  76. metaflow-stubs/plugins/cards/card_modules/chevron/metadata.pyi +2 -2
  77. metaflow-stubs/plugins/cards/card_modules/chevron/renderer.pyi +2 -2
  78. metaflow-stubs/plugins/cards/card_modules/chevron/tokenizer.pyi +2 -2
  79. metaflow-stubs/plugins/cards/card_modules/components.pyi +2 -2
  80. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  81. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  82. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  83. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  84. metaflow-stubs/plugins/cards/component_serializer.pyi +3 -3
  85. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  86. metaflow-stubs/plugins/catch_decorator.pyi +3 -3
  87. metaflow-stubs/plugins/datatools/__init__.pyi +3 -3
  88. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  89. metaflow-stubs/plugins/datatools/s3/__init__.pyi +3 -3
  90. metaflow-stubs/plugins/datatools/s3/s3.pyi +6 -6
  91. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  92. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  93. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  94. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  95. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  96. metaflow-stubs/plugins/events_decorator.pyi +2 -2
  97. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  98. metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
  99. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  100. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +3 -3
  101. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  102. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  103. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  104. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  105. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  106. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +3 -3
  107. metaflow-stubs/plugins/kubernetes/kubernetes_cli.pyi +3 -3
  108. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  109. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +3 -3
  110. metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +2 -2
  111. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
  112. metaflow-stubs/plugins/logs_cli.pyi +3 -3
  113. metaflow-stubs/plugins/package_cli.pyi +2 -2
  114. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  115. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  116. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  117. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  118. metaflow-stubs/plugins/pypi/conda_environment.pyi +3 -3
  119. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  120. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  121. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  122. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  123. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  124. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  125. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +3 -3
  126. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  127. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  128. metaflow-stubs/plugins/tag_cli.pyi +4 -4
  129. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +2 -2
  130. metaflow-stubs/plugins/timeout_decorator.pyi +2 -2
  131. metaflow-stubs/procpoll.pyi +2 -2
  132. metaflow-stubs/pylint_wrapper.pyi +2 -2
  133. metaflow-stubs/runner/__init__.pyi +2 -2
  134. metaflow-stubs/runner/deployer.pyi +3 -3
  135. metaflow-stubs/runner/metaflow_runner.pyi +3 -3
  136. metaflow-stubs/runner/nbdeploy.pyi +2 -2
  137. metaflow-stubs/runner/nbrun.pyi +2 -2
  138. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  139. metaflow-stubs/runner/utils.pyi +2 -2
  140. metaflow-stubs/system/__init__.pyi +3 -3
  141. metaflow-stubs/system/system_logger.pyi +3 -3
  142. metaflow-stubs/system/system_monitor.pyi +2 -2
  143. metaflow-stubs/tagging_util.pyi +2 -2
  144. metaflow-stubs/tuple_util.pyi +2 -2
  145. metaflow-stubs/version.pyi +2 -2
  146. {metaflow_stubs-2.12.17.dist-info → metaflow_stubs-2.12.18.dist-info}/METADATA +2 -2
  147. metaflow_stubs-2.12.18.dist-info/RECORD +150 -0
  148. {metaflow_stubs-2.12.17.dist-info → metaflow_stubs-2.12.18.dist-info}/WHEEL +1 -1
  149. metaflow_stubs-2.12.17.dist-info/RECORD +0 -150
  150. {metaflow_stubs-2.12.17.dist-info → metaflow_stubs-2.12.18.dist-info}/top_level.txt +0 -0
@@ -1,25 +1,25 @@
1
1
  ##################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.12.17 #
4
- # Generated on 2024-08-27T00:52:58.058650 #
3
+ # MF version: 2.12.18 #
4
+ # Generated on 2024-08-28T16:18:32.928126 #
5
5
  ##################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import metaflow.datastore.inputs
11
+ import metaflow.metaflow_current
12
12
  import metaflow._vendor.click.types
13
- import metaflow.plugins.datatools.s3.s3
13
+ import io
14
+ import metaflow.client.core
14
15
  import metaflow.runner.metaflow_runner
15
16
  import typing
17
+ import metaflow.flowspec
18
+ import metaflow.plugins.datatools.s3.s3
16
19
  import metaflow.parameters
17
- import metaflow.metaflow_current
18
20
  import metaflow.events
19
- import io
20
- import metaflow.flowspec
21
21
  import datetime
22
- import metaflow.client.core
22
+ import metaflow.datastore.inputs
23
23
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
24
24
  StepFlag = typing.NewType("StepFlag", bool)
25
25
 
@@ -785,59 +785,185 @@ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
785
785
  ...
786
786
 
787
787
  @typing.overload
788
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
788
+ def card(*, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
789
789
  """
790
- Specifies the Conda environment for the step.
790
+ Creates a human-readable report, a Metaflow Card, after this step completes.
791
791
 
792
- Information in this decorator will augment any
793
- attributes set in the `@conda_base` flow-level decorator. Hence,
794
- you can use `@conda_base` to set packages required by all
795
- steps and use `@conda` to specify step-specific overrides.
792
+ Note that you may add multiple `@card` decorators in a step with different parameters.
796
793
 
797
794
  Parameters
798
795
  ----------
799
- packages : Dict[str, str], default {}
800
- Packages to use for this step. The key is the name of the package
801
- and the value is the version to use.
802
- libraries : Dict[str, str], default {}
803
- Supported for backward compatibility. When used with packages, packages will take precedence.
804
- python : str, optional, default None
805
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
806
- that the version used will correspond to the version of the Python interpreter used to start the run.
807
- disabled : bool, default False
808
- If set to True, disables @conda.
796
+ type : str, default 'default'
797
+ Card type.
798
+ id : str, optional, default None
799
+ If multiple cards are present, use this id to identify this card.
800
+ options : Dict[str, Any], default {}
801
+ Options passed to the card. The contents depend on the card type.
802
+ timeout : int, default 45
803
+ Interrupt reporting if it takes more than this many seconds.
804
+
805
+
809
806
  """
810
807
  ...
811
808
 
812
809
  @typing.overload
813
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
810
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
814
811
  ...
815
812
 
816
813
  @typing.overload
817
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
814
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
818
815
  ...
819
816
 
820
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
817
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
821
818
  """
822
- Specifies the Conda environment for the step.
819
+ Creates a human-readable report, a Metaflow Card, after this step completes.
823
820
 
824
- Information in this decorator will augment any
825
- attributes set in the `@conda_base` flow-level decorator. Hence,
826
- you can use `@conda_base` to set packages required by all
827
- steps and use `@conda` to specify step-specific overrides.
821
+ Note that you may add multiple `@card` decorators in a step with different parameters.
828
822
 
829
823
  Parameters
830
824
  ----------
831
- packages : Dict[str, str], default {}
832
- Packages to use for this step. The key is the name of the package
833
- and the value is the version to use.
834
- libraries : Dict[str, str], default {}
835
- Supported for backward compatibility. When used with packages, packages will take precedence.
836
- python : str, optional, default None
837
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
838
- that the version used will correspond to the version of the Python interpreter used to start the run.
839
- disabled : bool, default False
840
- If set to True, disables @conda.
825
+ type : str, default 'default'
826
+ Card type.
827
+ id : str, optional, default None
828
+ If multiple cards are present, use this id to identify this card.
829
+ options : Dict[str, Any], default {}
830
+ Options passed to the card. The contents depend on the card type.
831
+ timeout : int, default 45
832
+ Interrupt reporting if it takes more than this many seconds.
833
+
834
+
835
+ """
836
+ ...
837
+
838
+ @typing.overload
839
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
840
+ """
841
+ Specifies that the step will success under all circumstances.
842
+
843
+ The decorator will create an optional artifact, specified by `var`, which
844
+ contains the exception raised. You can use it to detect the presence
845
+ of errors, indicating that all happy-path artifacts produced by the step
846
+ are missing.
847
+
848
+ Parameters
849
+ ----------
850
+ var : str, optional, default None
851
+ Name of the artifact in which to store the caught exception.
852
+ If not specified, the exception is not stored.
853
+ print_exception : bool, default True
854
+ Determines whether or not the exception is printed to
855
+ stdout when caught.
856
+ """
857
+ ...
858
+
859
+ @typing.overload
860
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
861
+ ...
862
+
863
+ @typing.overload
864
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
865
+ ...
866
+
867
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
868
+ """
869
+ Specifies that the step will success under all circumstances.
870
+
871
+ The decorator will create an optional artifact, specified by `var`, which
872
+ contains the exception raised. You can use it to detect the presence
873
+ of errors, indicating that all happy-path artifacts produced by the step
874
+ are missing.
875
+
876
+ Parameters
877
+ ----------
878
+ var : str, optional, default None
879
+ Name of the artifact in which to store the caught exception.
880
+ If not specified, the exception is not stored.
881
+ print_exception : bool, default True
882
+ Determines whether or not the exception is printed to
883
+ stdout when caught.
884
+ """
885
+ ...
886
+
887
+ @typing.overload
888
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
889
+ """
890
+ Specifies environment variables to be set prior to the execution of a step.
891
+
892
+ Parameters
893
+ ----------
894
+ vars : Dict[str, str], default {}
895
+ Dictionary of environment variables to set.
896
+ """
897
+ ...
898
+
899
+ @typing.overload
900
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
901
+ ...
902
+
903
+ @typing.overload
904
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
905
+ ...
906
+
907
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
908
+ """
909
+ Specifies environment variables to be set prior to the execution of a step.
910
+
911
+ Parameters
912
+ ----------
913
+ vars : Dict[str, str], default {}
914
+ Dictionary of environment variables to set.
915
+ """
916
+ ...
917
+
918
+ @typing.overload
919
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
920
+ """
921
+ Decorator prototype for all step decorators. This function gets specialized
922
+ and imported for all decorators types by _import_plugin_decorators().
923
+ """
924
+ ...
925
+
926
+ @typing.overload
927
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
928
+ ...
929
+
930
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
931
+ """
932
+ Decorator prototype for all step decorators. This function gets specialized
933
+ and imported for all decorators types by _import_plugin_decorators().
934
+ """
935
+ ...
936
+
937
+ @typing.overload
938
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
939
+ """
940
+ Specifies secrets to be retrieved and injected as environment variables prior to
941
+ the execution of a step.
942
+
943
+ Parameters
944
+ ----------
945
+ sources : List[Union[str, Dict[str, Any]]], default: []
946
+ List of secret specs, defining how the secrets are to be retrieved
947
+ """
948
+ ...
949
+
950
+ @typing.overload
951
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
952
+ ...
953
+
954
+ @typing.overload
955
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
956
+ ...
957
+
958
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
959
+ """
960
+ Specifies secrets to be retrieved and injected as environment variables prior to
961
+ the execution of a step.
962
+
963
+ Parameters
964
+ ----------
965
+ sources : List[Union[str, Dict[str, Any]]], default: []
966
+ List of secret specs, defining how the secrets are to be retrieved
841
967
  """
842
968
  ...
843
969
 
@@ -895,149 +1021,79 @@ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
895
1021
  ...
896
1022
 
897
1023
  @typing.overload
898
- def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1024
+ def resources(*, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
899
1025
  """
900
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
1026
+ Specifies the resources needed when executing this step.
1027
+
1028
+ Use `@resources` to specify the resource requirements
1029
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
1030
+
1031
+ You can choose the compute layer on the command line by executing e.g.
1032
+ ```
1033
+ python myflow.py run --with batch
1034
+ ```
1035
+ or
1036
+ ```
1037
+ python myflow.py run --with kubernetes
1038
+ ```
1039
+ which executes the flow on the desired system using the
1040
+ requirements specified in `@resources`.
901
1041
 
902
1042
  Parameters
903
1043
  ----------
904
1044
  cpu : int, default 1
905
- Number of CPUs required for this step. If `@resources` is
906
- also present, the maximum value from all decorators is used.
1045
+ Number of CPUs required for this step.
907
1046
  gpu : int, default 0
908
- Number of GPUs required for this step. If `@resources` is
909
- also present, the maximum value from all decorators is used.
1047
+ Number of GPUs required for this step.
1048
+ disk : int, optional, default None
1049
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
910
1050
  memory : int, default 4096
911
- Memory size (in MB) required for this step. If
912
- `@resources` is also present, the maximum value from all decorators is
913
- used.
914
- image : str, optional, default None
915
- Docker image to use when launching on AWS Batch. If not specified, and
916
- METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
917
- not, a default Docker image mapping to the current version of Python is used.
918
- queue : str, default METAFLOW_BATCH_JOB_QUEUE
919
- AWS Batch Job Queue to submit the job to.
920
- iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
921
- AWS IAM role that AWS Batch container uses to access AWS cloud resources.
922
- execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
923
- AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
924
- (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
1051
+ Memory size (in MB) required for this step.
925
1052
  shared_memory : int, optional, default None
926
1053
  The value for the size (in MiB) of the /dev/shm volume for this step.
927
1054
  This parameter maps to the `--shm-size` option in Docker.
928
- max_swap : int, optional, default None
929
- The total amount of swap memory (in MiB) a container can use for this
930
- step. This parameter is translated to the `--memory-swap` option in
931
- Docker where the value is the sum of the container memory plus the
932
- `max_swap` value.
933
- swappiness : int, optional, default None
934
- This allows you to tune memory swappiness behavior for this step.
935
- A swappiness value of 0 causes swapping not to happen unless absolutely
936
- necessary. A swappiness value of 100 causes pages to be swapped very
937
- aggressively. Accepted values are whole numbers between 0 and 100.
938
- use_tmpfs : bool, default False
939
- This enables an explicit tmpfs mount for this step. Note that tmpfs is
940
- not available on Fargate compute environments
941
- tmpfs_tempdir : bool, default True
942
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
943
- tmpfs_size : int, optional, default None
944
- The value for the size (in MiB) of the tmpfs mount for this step.
945
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
946
- memory allocated for this step.
947
- tmpfs_path : str, optional, default None
948
- Path to tmpfs mount for this step. Defaults to /metaflow_temp.
949
- inferentia : int, default 0
950
- Number of Inferentia chips required for this step.
951
- trainium : int, default None
952
- Alias for inferentia. Use only one of the two.
953
- efa : int, default 0
954
- Number of elastic fabric adapter network devices to attach to container
955
- ephemeral_storage : int, default None
956
- The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
957
- This is only relevant for Fargate compute environments
958
- log_driver: str, optional, default None
959
- The log driver to use for the Amazon ECS container.
960
- log_options: List[str], optional, default None
961
- List of strings containing options for the chosen log driver. The configurable values
962
- depend on the `log driver` chosen. Validation of these options is not supported yet.
963
- Example: [`awslogs-group:aws/batch/job`]
964
1055
  """
965
1056
  ...
966
1057
 
967
1058
  @typing.overload
968
- def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1059
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
969
1060
  ...
970
1061
 
971
1062
  @typing.overload
972
- def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1063
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
973
1064
  ...
974
1065
 
975
- def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
1066
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
976
1067
  """
977
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
1068
+ Specifies the resources needed when executing this step.
1069
+
1070
+ Use `@resources` to specify the resource requirements
1071
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
1072
+
1073
+ You can choose the compute layer on the command line by executing e.g.
1074
+ ```
1075
+ python myflow.py run --with batch
1076
+ ```
1077
+ or
1078
+ ```
1079
+ python myflow.py run --with kubernetes
1080
+ ```
1081
+ which executes the flow on the desired system using the
1082
+ requirements specified in `@resources`.
978
1083
 
979
1084
  Parameters
980
1085
  ----------
981
1086
  cpu : int, default 1
982
- Number of CPUs required for this step. If `@resources` is
983
- also present, the maximum value from all decorators is used.
1087
+ Number of CPUs required for this step.
984
1088
  gpu : int, default 0
985
- Number of GPUs required for this step. If `@resources` is
986
- also present, the maximum value from all decorators is used.
1089
+ Number of GPUs required for this step.
1090
+ disk : int, optional, default None
1091
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
987
1092
  memory : int, default 4096
988
- Memory size (in MB) required for this step. If
989
- `@resources` is also present, the maximum value from all decorators is
990
- used.
991
- image : str, optional, default None
992
- Docker image to use when launching on AWS Batch. If not specified, and
993
- METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
994
- not, a default Docker image mapping to the current version of Python is used.
995
- queue : str, default METAFLOW_BATCH_JOB_QUEUE
996
- AWS Batch Job Queue to submit the job to.
997
- iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
998
- AWS IAM role that AWS Batch container uses to access AWS cloud resources.
999
- execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
1000
- AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
1001
- (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
1093
+ Memory size (in MB) required for this step.
1002
1094
  shared_memory : int, optional, default None
1003
1095
  The value for the size (in MiB) of the /dev/shm volume for this step.
1004
1096
  This parameter maps to the `--shm-size` option in Docker.
1005
- max_swap : int, optional, default None
1006
- The total amount of swap memory (in MiB) a container can use for this
1007
- step. This parameter is translated to the `--memory-swap` option in
1008
- Docker where the value is the sum of the container memory plus the
1009
- `max_swap` value.
1010
- swappiness : int, optional, default None
1011
- This allows you to tune memory swappiness behavior for this step.
1012
- A swappiness value of 0 causes swapping not to happen unless absolutely
1013
- necessary. A swappiness value of 100 causes pages to be swapped very
1014
- aggressively. Accepted values are whole numbers between 0 and 100.
1015
- use_tmpfs : bool, default False
1016
- This enables an explicit tmpfs mount for this step. Note that tmpfs is
1017
- not available on Fargate compute environments
1018
- tmpfs_tempdir : bool, default True
1019
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1020
- tmpfs_size : int, optional, default None
1021
- The value for the size (in MiB) of the tmpfs mount for this step.
1022
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1023
- memory allocated for this step.
1024
- tmpfs_path : str, optional, default None
1025
- Path to tmpfs mount for this step. Defaults to /metaflow_temp.
1026
- inferentia : int, default 0
1027
- Number of Inferentia chips required for this step.
1028
- trainium : int, default None
1029
- Alias for inferentia. Use only one of the two.
1030
- efa : int, default 0
1031
- Number of elastic fabric adapter network devices to attach to container
1032
- ephemeral_storage : int, default None
1033
- The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
1034
- This is only relevant for Fargate compute environments
1035
- log_driver: str, optional, default None
1036
- The log driver to use for the Amazon ECS container.
1037
- log_options: List[str], optional, default None
1038
- List of strings containing options for the chosen log driver. The configurable values
1039
- depend on the `log driver` chosen. Validation of these options is not supported yet.
1040
- Example: [`awslogs-group:aws/batch/job`]
1041
1097
  """
1042
1098
  ...
1043
1099
 
@@ -1104,212 +1160,206 @@ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: ty
1104
1160
  ...
1105
1161
 
1106
1162
  @typing.overload
1107
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1163
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1108
1164
  """
1109
- Specifies that the step will success under all circumstances.
1165
+ Specifies the Conda environment for the step.
1110
1166
 
1111
- The decorator will create an optional artifact, specified by `var`, which
1112
- contains the exception raised. You can use it to detect the presence
1113
- of errors, indicating that all happy-path artifacts produced by the step
1114
- are missing.
1167
+ Information in this decorator will augment any
1168
+ attributes set in the `@conda_base` flow-level decorator. Hence,
1169
+ you can use `@conda_base` to set packages required by all
1170
+ steps and use `@conda` to specify step-specific overrides.
1115
1171
 
1116
1172
  Parameters
1117
1173
  ----------
1118
- var : str, optional, default None
1119
- Name of the artifact in which to store the caught exception.
1120
- If not specified, the exception is not stored.
1121
- print_exception : bool, default True
1122
- Determines whether or not the exception is printed to
1123
- stdout when caught.
1174
+ packages : Dict[str, str], default {}
1175
+ Packages to use for this step. The key is the name of the package
1176
+ and the value is the version to use.
1177
+ libraries : Dict[str, str], default {}
1178
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1179
+ python : str, optional, default None
1180
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1181
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1182
+ disabled : bool, default False
1183
+ If set to True, disables @conda.
1124
1184
  """
1125
1185
  ...
1126
1186
 
1127
1187
  @typing.overload
1128
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1188
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1129
1189
  ...
1130
1190
 
1131
1191
  @typing.overload
1132
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1192
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1133
1193
  ...
1134
1194
 
1135
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
1195
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1136
1196
  """
1137
- Specifies that the step will success under all circumstances.
1197
+ Specifies the Conda environment for the step.
1138
1198
 
1139
- The decorator will create an optional artifact, specified by `var`, which
1140
- contains the exception raised. You can use it to detect the presence
1141
- of errors, indicating that all happy-path artifacts produced by the step
1142
- are missing.
1199
+ Information in this decorator will augment any
1200
+ attributes set in the `@conda_base` flow-level decorator. Hence,
1201
+ you can use `@conda_base` to set packages required by all
1202
+ steps and use `@conda` to specify step-specific overrides.
1143
1203
 
1144
1204
  Parameters
1145
1205
  ----------
1146
- var : str, optional, default None
1147
- Name of the artifact in which to store the caught exception.
1148
- If not specified, the exception is not stored.
1149
- print_exception : bool, default True
1150
- Determines whether or not the exception is printed to
1151
- stdout when caught.
1206
+ packages : Dict[str, str], default {}
1207
+ Packages to use for this step. The key is the name of the package
1208
+ and the value is the version to use.
1209
+ libraries : Dict[str, str], default {}
1210
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1211
+ python : str, optional, default None
1212
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1213
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1214
+ disabled : bool, default False
1215
+ If set to True, disables @conda.
1152
1216
  """
1153
1217
  ...
1154
1218
 
1155
1219
  @typing.overload
1156
- def resources(*, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1220
+ def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1157
1221
  """
1158
- Specifies the resources needed when executing this step.
1159
-
1160
- Use `@resources` to specify the resource requirements
1161
- independently of the specific compute layer (`@batch`, `@kubernetes`).
1162
-
1163
- You can choose the compute layer on the command line by executing e.g.
1164
- ```
1165
- python myflow.py run --with batch
1166
- ```
1167
- or
1168
- ```
1169
- python myflow.py run --with kubernetes
1170
- ```
1171
- which executes the flow on the desired system using the
1172
- requirements specified in `@resources`.
1222
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
1173
1223
 
1174
1224
  Parameters
1175
1225
  ----------
1176
1226
  cpu : int, default 1
1177
- Number of CPUs required for this step.
1227
+ Number of CPUs required for this step. If `@resources` is
1228
+ also present, the maximum value from all decorators is used.
1178
1229
  gpu : int, default 0
1179
- Number of GPUs required for this step.
1180
- disk : int, optional, default None
1181
- Disk size (in MB) required for this step. Only applies on Kubernetes.
1230
+ Number of GPUs required for this step. If `@resources` is
1231
+ also present, the maximum value from all decorators is used.
1182
1232
  memory : int, default 4096
1183
- Memory size (in MB) required for this step.
1233
+ Memory size (in MB) required for this step. If
1234
+ `@resources` is also present, the maximum value from all decorators is
1235
+ used.
1236
+ image : str, optional, default None
1237
+ Docker image to use when launching on AWS Batch. If not specified, and
1238
+ METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
1239
+ not, a default Docker image mapping to the current version of Python is used.
1240
+ queue : str, default METAFLOW_BATCH_JOB_QUEUE
1241
+ AWS Batch Job Queue to submit the job to.
1242
+ iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
1243
+ AWS IAM role that AWS Batch container uses to access AWS cloud resources.
1244
+ execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
1245
+ AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
1246
+ (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
1184
1247
  shared_memory : int, optional, default None
1185
1248
  The value for the size (in MiB) of the /dev/shm volume for this step.
1186
1249
  This parameter maps to the `--shm-size` option in Docker.
1250
+ max_swap : int, optional, default None
1251
+ The total amount of swap memory (in MiB) a container can use for this
1252
+ step. This parameter is translated to the `--memory-swap` option in
1253
+ Docker where the value is the sum of the container memory plus the
1254
+ `max_swap` value.
1255
+ swappiness : int, optional, default None
1256
+ This allows you to tune memory swappiness behavior for this step.
1257
+ A swappiness value of 0 causes swapping not to happen unless absolutely
1258
+ necessary. A swappiness value of 100 causes pages to be swapped very
1259
+ aggressively. Accepted values are whole numbers between 0 and 100.
1260
+ use_tmpfs : bool, default False
1261
+ This enables an explicit tmpfs mount for this step. Note that tmpfs is
1262
+ not available on Fargate compute environments
1263
+ tmpfs_tempdir : bool, default True
1264
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1265
+ tmpfs_size : int, optional, default None
1266
+ The value for the size (in MiB) of the tmpfs mount for this step.
1267
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1268
+ memory allocated for this step.
1269
+ tmpfs_path : str, optional, default None
1270
+ Path to tmpfs mount for this step. Defaults to /metaflow_temp.
1271
+ inferentia : int, default 0
1272
+ Number of Inferentia chips required for this step.
1273
+ trainium : int, default None
1274
+ Alias for inferentia. Use only one of the two.
1275
+ efa : int, default 0
1276
+ Number of elastic fabric adapter network devices to attach to container
1277
+ ephemeral_storage : int, default None
1278
+ The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
1279
+ This is only relevant for Fargate compute environments
1280
+ log_driver: str, optional, default None
1281
+ The log driver to use for the Amazon ECS container.
1282
+ log_options: List[str], optional, default None
1283
+ List of strings containing options for the chosen log driver. The configurable values
1284
+ depend on the `log driver` chosen. Validation of these options is not supported yet.
1285
+ Example: [`awslogs-group:aws/batch/job`]
1187
1286
  """
1188
1287
  ...
1189
1288
 
1190
1289
  @typing.overload
1191
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1290
+ def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1192
1291
  ...
1193
1292
 
1194
1293
  @typing.overload
1195
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1294
+ def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1196
1295
  ...
1197
1296
 
1198
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
1297
+ def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
1199
1298
  """
1200
- Specifies the resources needed when executing this step.
1201
-
1202
- Use `@resources` to specify the resource requirements
1203
- independently of the specific compute layer (`@batch`, `@kubernetes`).
1204
-
1205
- You can choose the compute layer on the command line by executing e.g.
1206
- ```
1207
- python myflow.py run --with batch
1208
- ```
1209
- or
1210
- ```
1211
- python myflow.py run --with kubernetes
1212
- ```
1213
- which executes the flow on the desired system using the
1214
- requirements specified in `@resources`.
1299
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
1215
1300
 
1216
1301
  Parameters
1217
1302
  ----------
1218
1303
  cpu : int, default 1
1219
- Number of CPUs required for this step.
1304
+ Number of CPUs required for this step. If `@resources` is
1305
+ also present, the maximum value from all decorators is used.
1220
1306
  gpu : int, default 0
1221
- Number of GPUs required for this step.
1222
- disk : int, optional, default None
1223
- Disk size (in MB) required for this step. Only applies on Kubernetes.
1307
+ Number of GPUs required for this step. If `@resources` is
1308
+ also present, the maximum value from all decorators is used.
1224
1309
  memory : int, default 4096
1225
- Memory size (in MB) required for this step.
1310
+ Memory size (in MB) required for this step. If
1311
+ `@resources` is also present, the maximum value from all decorators is
1312
+ used.
1313
+ image : str, optional, default None
1314
+ Docker image to use when launching on AWS Batch. If not specified, and
1315
+ METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
1316
+ not, a default Docker image mapping to the current version of Python is used.
1317
+ queue : str, default METAFLOW_BATCH_JOB_QUEUE
1318
+ AWS Batch Job Queue to submit the job to.
1319
+ iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
1320
+ AWS IAM role that AWS Batch container uses to access AWS cloud resources.
1321
+ execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
1322
+ AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
1323
+ (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
1226
1324
  shared_memory : int, optional, default None
1227
1325
  The value for the size (in MiB) of the /dev/shm volume for this step.
1228
1326
  This parameter maps to the `--shm-size` option in Docker.
1229
- """
1230
- ...
1231
-
1232
- @typing.overload
1233
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1234
- """
1235
- Specifies secrets to be retrieved and injected as environment variables prior to
1236
- the execution of a step.
1237
-
1238
- Parameters
1239
- ----------
1240
- sources : List[Union[str, Dict[str, Any]]], default: []
1241
- List of secret specs, defining how the secrets are to be retrieved
1242
- """
1243
- ...
1244
-
1245
- @typing.overload
1246
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1247
- ...
1248
-
1249
- @typing.overload
1250
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1251
- ...
1252
-
1253
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
1254
- """
1255
- Specifies secrets to be retrieved and injected as environment variables prior to
1256
- the execution of a step.
1257
-
1258
- Parameters
1259
- ----------
1260
- sources : List[Union[str, Dict[str, Any]]], default: []
1261
- List of secret specs, defining how the secrets are to be retrieved
1262
- """
1263
- ...
1264
-
1265
- @typing.overload
1266
- def card(*, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1267
- """
1268
- Creates a human-readable report, a Metaflow Card, after this step completes.
1269
-
1270
- Note that you may add multiple `@card` decorators in a step with different parameters.
1271
-
1272
- Parameters
1273
- ----------
1274
- type : str, default 'default'
1275
- Card type.
1276
- id : str, optional, default None
1277
- If multiple cards are present, use this id to identify this card.
1278
- options : Dict[str, Any], default {}
1279
- Options passed to the card. The contents depend on the card type.
1280
- timeout : int, default 45
1281
- Interrupt reporting if it takes more than this many seconds.
1282
-
1283
-
1284
- """
1285
- ...
1286
-
1287
- @typing.overload
1288
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1289
- ...
1290
-
1291
- @typing.overload
1292
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1293
- ...
1294
-
1295
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
1296
- """
1297
- Creates a human-readable report, a Metaflow Card, after this step completes.
1298
-
1299
- Note that you may add multiple `@card` decorators in a step with different parameters.
1300
-
1301
- Parameters
1302
- ----------
1303
- type : str, default 'default'
1304
- Card type.
1305
- id : str, optional, default None
1306
- If multiple cards are present, use this id to identify this card.
1307
- options : Dict[str, Any], default {}
1308
- Options passed to the card. The contents depend on the card type.
1309
- timeout : int, default 45
1310
- Interrupt reporting if it takes more than this many seconds.
1311
-
1312
-
1327
+ max_swap : int, optional, default None
1328
+ The total amount of swap memory (in MiB) a container can use for this
1329
+ step. This parameter is translated to the `--memory-swap` option in
1330
+ Docker where the value is the sum of the container memory plus the
1331
+ `max_swap` value.
1332
+ swappiness : int, optional, default None
1333
+ This allows you to tune memory swappiness behavior for this step.
1334
+ A swappiness value of 0 causes swapping not to happen unless absolutely
1335
+ necessary. A swappiness value of 100 causes pages to be swapped very
1336
+ aggressively. Accepted values are whole numbers between 0 and 100.
1337
+ use_tmpfs : bool, default False
1338
+ This enables an explicit tmpfs mount for this step. Note that tmpfs is
1339
+ not available on Fargate compute environments
1340
+ tmpfs_tempdir : bool, default True
1341
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1342
+ tmpfs_size : int, optional, default None
1343
+ The value for the size (in MiB) of the tmpfs mount for this step.
1344
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1345
+ memory allocated for this step.
1346
+ tmpfs_path : str, optional, default None
1347
+ Path to tmpfs mount for this step. Defaults to /metaflow_temp.
1348
+ inferentia : int, default 0
1349
+ Number of Inferentia chips required for this step.
1350
+ trainium : int, default None
1351
+ Alias for inferentia. Use only one of the two.
1352
+ efa : int, default 0
1353
+ Number of elastic fabric adapter network devices to attach to container
1354
+ ephemeral_storage : int, default None
1355
+ The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
1356
+ This is only relevant for Fargate compute environments
1357
+ log_driver: str, optional, default None
1358
+ The log driver to use for the Amazon ECS container.
1359
+ log_options: List[str], optional, default None
1360
+ List of strings containing options for the chosen log driver. The configurable values
1361
+ depend on the `log driver` chosen. Validation of these options is not supported yet.
1362
+ Example: [`awslogs-group:aws/batch/job`]
1313
1363
  """
1314
1364
  ...
1315
1365
 
@@ -1363,148 +1413,60 @@ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
1363
1413
  ...
1364
1414
 
1365
1415
  @typing.overload
1366
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1367
- """
1368
- Decorator prototype for all step decorators. This function gets specialized
1369
- and imported for all decorators types by _import_plugin_decorators().
1370
- """
1371
- ...
1372
-
1373
- @typing.overload
1374
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1375
- ...
1376
-
1377
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1378
- """
1379
- Decorator prototype for all step decorators. This function gets specialized
1380
- and imported for all decorators types by _import_plugin_decorators().
1381
- """
1382
- ...
1383
-
1384
- @typing.overload
1385
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1416
+ def trigger_on_finish(*, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1386
1417
  """
1387
- Specifies environment variables to be set prior to the execution of a step.
1418
+ Specifies the flow(s) that this flow depends on.
1388
1419
 
1389
- Parameters
1390
- ----------
1391
- vars : Dict[str, str], default {}
1392
- Dictionary of environment variables to set.
1393
- """
1394
- ...
1395
-
1396
- @typing.overload
1397
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1398
- ...
1399
-
1400
- @typing.overload
1401
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1402
- ...
1403
-
1404
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
1405
- """
1406
- Specifies environment variables to be set prior to the execution of a step.
1420
+ ```
1421
+ @trigger_on_finish(flow='FooFlow')
1422
+ ```
1423
+ or
1424
+ ```
1425
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1426
+ ```
1427
+ This decorator respects the @project decorator and triggers the flow
1428
+ when upstream runs within the same namespace complete successfully
1407
1429
 
1408
- Parameters
1409
- ----------
1410
- vars : Dict[str, str], default {}
1411
- Dictionary of environment variables to set.
1412
- """
1413
- ...
1414
-
1415
- @typing.overload
1416
- def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1417
- """
1418
- Specifies the Conda environment for all steps of the flow.
1430
+ Additionally, you can specify project aware upstream flow dependencies
1431
+ by specifying the fully qualified project_flow_name.
1432
+ ```
1433
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1434
+ ```
1435
+ or
1436
+ ```
1437
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1438
+ ```
1419
1439
 
1420
- Use `@conda_base` to set common libraries required by all
1421
- steps and use `@conda` to specify step-specific additions.
1440
+ You can also specify just the project or project branch (other values will be
1441
+ inferred from the current project or project branch):
1442
+ ```
1443
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1444
+ ```
1445
+
1446
+ Note that `branch` is typically one of:
1447
+ - `prod`
1448
+ - `user.bob`
1449
+ - `test.my_experiment`
1450
+ - `prod.staging`
1422
1451
 
1423
1452
  Parameters
1424
1453
  ----------
1425
- packages : Dict[str, str], default {}
1426
- Packages to use for this flow. The key is the name of the package
1427
- and the value is the version to use.
1428
- libraries : Dict[str, str], default {}
1429
- Supported for backward compatibility. When used with packages, packages will take precedence.
1430
- python : str, optional, default None
1431
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1432
- that the version used will correspond to the version of the Python interpreter used to start the run.
1433
- disabled : bool, default False
1434
- If set to True, disables Conda.
1435
- """
1436
- ...
1437
-
1438
- @typing.overload
1439
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1440
- ...
1441
-
1442
- def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1443
- """
1444
- Specifies the Conda environment for all steps of the flow.
1454
+ flow : Union[str, Dict[str, str]], optional, default None
1455
+ Upstream flow dependency for this flow.
1456
+ flows : List[Union[str, Dict[str, str]]], default []
1457
+ Upstream flow dependencies for this flow.
1458
+ options : Dict[str, Any], default {}
1459
+ Backend-specific configuration for tuning eventing behavior.
1445
1460
 
1446
- Use `@conda_base` to set common libraries required by all
1447
- steps and use `@conda` to specify step-specific additions.
1448
1461
 
1449
- Parameters
1450
- ----------
1451
- packages : Dict[str, str], default {}
1452
- Packages to use for this flow. The key is the name of the package
1453
- and the value is the version to use.
1454
- libraries : Dict[str, str], default {}
1455
- Supported for backward compatibility. When used with packages, packages will take precedence.
1456
- python : str, optional, default None
1457
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1458
- that the version used will correspond to the version of the Python interpreter used to start the run.
1459
- disabled : bool, default False
1460
- If set to True, disables Conda.
1461
1462
  """
1462
1463
  ...
1463
1464
 
1464
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1465
- """
1466
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1467
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1468
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1469
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
1470
- starts only after all sensors finish.
1471
-
1472
- Parameters
1473
- ----------
1474
- timeout : int
1475
- Time, in seconds before the task times out and fails. (Default: 3600)
1476
- poke_interval : int
1477
- Time in seconds that the job should wait in between each try. (Default: 60)
1478
- mode : str
1479
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1480
- exponential_backoff : bool
1481
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1482
- pool : str
1483
- the slot pool this task should run in,
1484
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1485
- soft_fail : bool
1486
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1487
- name : str
1488
- Name of the sensor on Airflow
1489
- description : str
1490
- Description of sensor in the Airflow UI
1491
- bucket_key : Union[str, List[str]]
1492
- The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1493
- When it's specified as a full s3:// url, please leave `bucket_name` as None
1494
- bucket_name : str
1495
- Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1496
- When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1497
- wildcard_match : bool
1498
- whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1499
- aws_conn_id : str
1500
- a reference to the s3 connection on Airflow. (Default: None)
1501
- verify : bool
1502
- Whether or not to verify SSL certificates for S3 connection. (Default: None)
1503
- """
1465
+ @typing.overload
1466
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1504
1467
  ...
1505
1468
 
1506
- @typing.overload
1507
- def trigger_on_finish(*, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1469
+ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1508
1470
  """
1509
1471
  Specifies the flow(s) that this flow depends on.
1510
1472
 
@@ -1553,74 +1515,126 @@ def trigger_on_finish(*, flow: typing.Union[str, typing.Dict[str, str], None] =
1553
1515
  """
1554
1516
  ...
1555
1517
 
1518
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1519
+ """
1520
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1521
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1522
+
1523
+ Parameters
1524
+ ----------
1525
+ timeout : int
1526
+ Time, in seconds before the task times out and fails. (Default: 3600)
1527
+ poke_interval : int
1528
+ Time in seconds that the job should wait in between each try. (Default: 60)
1529
+ mode : str
1530
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1531
+ exponential_backoff : bool
1532
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1533
+ pool : str
1534
+ the slot pool this task should run in,
1535
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1536
+ soft_fail : bool
1537
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1538
+ name : str
1539
+ Name of the sensor on Airflow
1540
+ description : str
1541
+ Description of sensor in the Airflow UI
1542
+ external_dag_id : str
1543
+ The dag_id that contains the task you want to wait for.
1544
+ external_task_ids : List[str]
1545
+ The list of task_ids that you want to wait for.
1546
+ If None (default value) the sensor waits for the DAG. (Default: None)
1547
+ allowed_states : List[str]
1548
+ Iterable of allowed states, (Default: ['success'])
1549
+ failed_states : List[str]
1550
+ Iterable of failed or dis-allowed states. (Default: None)
1551
+ execution_delta : datetime.timedelta
1552
+ time difference with the previous execution to look at,
1553
+ the default is the same logical date as the current task or DAG. (Default: None)
1554
+ check_existence: bool
1555
+ Set to True to check if the external task exists or check if
1556
+ the DAG to wait for exists. (Default: True)
1557
+ """
1558
+ ...
1559
+
1560
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1561
+ """
1562
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1563
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1564
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1565
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
1566
+ starts only after all sensors finish.
1567
+
1568
+ Parameters
1569
+ ----------
1570
+ timeout : int
1571
+ Time, in seconds before the task times out and fails. (Default: 3600)
1572
+ poke_interval : int
1573
+ Time in seconds that the job should wait in between each try. (Default: 60)
1574
+ mode : str
1575
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1576
+ exponential_backoff : bool
1577
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1578
+ pool : str
1579
+ the slot pool this task should run in,
1580
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1581
+ soft_fail : bool
1582
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1583
+ name : str
1584
+ Name of the sensor on Airflow
1585
+ description : str
1586
+ Description of sensor in the Airflow UI
1587
+ bucket_key : Union[str, List[str]]
1588
+ The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1589
+ When it's specified as a full s3:// url, please leave `bucket_name` as None
1590
+ bucket_name : str
1591
+ Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1592
+ When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1593
+ wildcard_match : bool
1594
+ whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1595
+ aws_conn_id : str
1596
+ a reference to the s3 connection on Airflow. (Default: None)
1597
+ verify : bool
1598
+ Whether or not to verify SSL certificates for S3 connection. (Default: None)
1599
+ """
1600
+ ...
1601
+
1556
1602
  @typing.overload
1557
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1558
- ...
1559
-
1560
- def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1603
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1561
1604
  """
1562
- Specifies the flow(s) that this flow depends on.
1563
-
1564
- ```
1565
- @trigger_on_finish(flow='FooFlow')
1566
- ```
1567
- or
1568
- ```
1569
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1570
- ```
1571
- This decorator respects the @project decorator and triggers the flow
1572
- when upstream runs within the same namespace complete successfully
1573
-
1574
- Additionally, you can specify project aware upstream flow dependencies
1575
- by specifying the fully qualified project_flow_name.
1576
- ```
1577
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1578
- ```
1579
- or
1580
- ```
1581
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1582
- ```
1583
-
1584
- You can also specify just the project or project branch (other values will be
1585
- inferred from the current project or project branch):
1586
- ```
1587
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1588
- ```
1589
-
1590
- Note that `branch` is typically one of:
1591
- - `prod`
1592
- - `user.bob`
1593
- - `test.my_experiment`
1594
- - `prod.staging`
1605
+ Specifies the PyPI packages for all steps of the flow.
1595
1606
 
1607
+ Use `@pypi_base` to set common packages required by all
1608
+ steps and use `@pypi` to specify step-specific overrides.
1596
1609
  Parameters
1597
1610
  ----------
1598
- flow : Union[str, Dict[str, str]], optional, default None
1599
- Upstream flow dependency for this flow.
1600
- flows : List[Union[str, Dict[str, str]]], default []
1601
- Upstream flow dependencies for this flow.
1602
- options : Dict[str, Any], default {}
1603
- Backend-specific configuration for tuning eventing behavior.
1604
-
1605
-
1611
+ packages : Dict[str, str], default: {}
1612
+ Packages to use for this flow. The key is the name of the package
1613
+ and the value is the version to use.
1614
+ python : str, optional, default: None
1615
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1616
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1606
1617
  """
1607
1618
  ...
1608
1619
 
1609
- def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1620
+ @typing.overload
1621
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1622
+ ...
1623
+
1624
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1610
1625
  """
1611
- Specifies what flows belong to the same project.
1612
-
1613
- A project-specific namespace is created for all flows that
1614
- use the same `@project(name)`.
1626
+ Specifies the PyPI packages for all steps of the flow.
1615
1627
 
1628
+ Use `@pypi_base` to set common packages required by all
1629
+ steps and use `@pypi` to specify step-specific overrides.
1616
1630
  Parameters
1617
1631
  ----------
1618
- name : str
1619
- Project name. Make sure that the name is unique amongst all
1620
- projects that use the same production scheduler. The name may
1621
- contain only lowercase alphanumeric characters and underscores.
1622
-
1623
-
1632
+ packages : Dict[str, str], default: {}
1633
+ Packages to use for this flow. The key is the name of the package
1634
+ and the value is the version to use.
1635
+ python : str, optional, default: None
1636
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1637
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1624
1638
  """
1625
1639
  ...
1626
1640
 
@@ -1673,6 +1687,73 @@ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly:
1673
1687
  """
1674
1688
  ...
1675
1689
 
1690
+ @typing.overload
1691
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1692
+ """
1693
+ Specifies the Conda environment for all steps of the flow.
1694
+
1695
+ Use `@conda_base` to set common libraries required by all
1696
+ steps and use `@conda` to specify step-specific additions.
1697
+
1698
+ Parameters
1699
+ ----------
1700
+ packages : Dict[str, str], default {}
1701
+ Packages to use for this flow. The key is the name of the package
1702
+ and the value is the version to use.
1703
+ libraries : Dict[str, str], default {}
1704
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1705
+ python : str, optional, default None
1706
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1707
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1708
+ disabled : bool, default False
1709
+ If set to True, disables Conda.
1710
+ """
1711
+ ...
1712
+
1713
+ @typing.overload
1714
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1715
+ ...
1716
+
1717
+ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1718
+ """
1719
+ Specifies the Conda environment for all steps of the flow.
1720
+
1721
+ Use `@conda_base` to set common libraries required by all
1722
+ steps and use `@conda` to specify step-specific additions.
1723
+
1724
+ Parameters
1725
+ ----------
1726
+ packages : Dict[str, str], default {}
1727
+ Packages to use for this flow. The key is the name of the package
1728
+ and the value is the version to use.
1729
+ libraries : Dict[str, str], default {}
1730
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1731
+ python : str, optional, default None
1732
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1733
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1734
+ disabled : bool, default False
1735
+ If set to True, disables Conda.
1736
+ """
1737
+ ...
1738
+
1739
+ def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1740
+ """
1741
+ Specifies what flows belong to the same project.
1742
+
1743
+ A project-specific namespace is created for all flows that
1744
+ use the same `@project(name)`.
1745
+
1746
+ Parameters
1747
+ ----------
1748
+ name : str
1749
+ Project name. Make sure that the name is unique amongst all
1750
+ projects that use the same production scheduler. The name may
1751
+ contain only lowercase alphanumeric characters and underscores.
1752
+
1753
+
1754
+ """
1755
+ ...
1756
+
1676
1757
  @typing.overload
1677
1758
  def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1678
1759
  """
@@ -1768,87 +1849,6 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
1768
1849
  """
1769
1850
  ...
1770
1851
 
1771
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1772
- """
1773
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1774
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1775
-
1776
- Parameters
1777
- ----------
1778
- timeout : int
1779
- Time, in seconds before the task times out and fails. (Default: 3600)
1780
- poke_interval : int
1781
- Time in seconds that the job should wait in between each try. (Default: 60)
1782
- mode : str
1783
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1784
- exponential_backoff : bool
1785
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1786
- pool : str
1787
- the slot pool this task should run in,
1788
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1789
- soft_fail : bool
1790
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1791
- name : str
1792
- Name of the sensor on Airflow
1793
- description : str
1794
- Description of sensor in the Airflow UI
1795
- external_dag_id : str
1796
- The dag_id that contains the task you want to wait for.
1797
- external_task_ids : List[str]
1798
- The list of task_ids that you want to wait for.
1799
- If None (default value) the sensor waits for the DAG. (Default: None)
1800
- allowed_states : List[str]
1801
- Iterable of allowed states, (Default: ['success'])
1802
- failed_states : List[str]
1803
- Iterable of failed or dis-allowed states. (Default: None)
1804
- execution_delta : datetime.timedelta
1805
- time difference with the previous execution to look at,
1806
- the default is the same logical date as the current task or DAG. (Default: None)
1807
- check_existence: bool
1808
- Set to True to check if the external task exists or check if
1809
- the DAG to wait for exists. (Default: True)
1810
- """
1811
- ...
1812
-
1813
- @typing.overload
1814
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1815
- """
1816
- Specifies the PyPI packages for all steps of the flow.
1817
-
1818
- Use `@pypi_base` to set common packages required by all
1819
- steps and use `@pypi` to specify step-specific overrides.
1820
- Parameters
1821
- ----------
1822
- packages : Dict[str, str], default: {}
1823
- Packages to use for this flow. The key is the name of the package
1824
- and the value is the version to use.
1825
- python : str, optional, default: None
1826
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1827
- that the version used will correspond to the version of the Python interpreter used to start the run.
1828
- """
1829
- ...
1830
-
1831
- @typing.overload
1832
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1833
- ...
1834
-
1835
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1836
- """
1837
- Specifies the PyPI packages for all steps of the flow.
1838
-
1839
- Use `@pypi_base` to set common packages required by all
1840
- steps and use `@pypi` to specify step-specific overrides.
1841
- Parameters
1842
- ----------
1843
- packages : Dict[str, str], default: {}
1844
- Packages to use for this flow. The key is the name of the package
1845
- and the value is the version to use.
1846
- python : str, optional, default: None
1847
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1848
- that the version used will correspond to the version of the Python interpreter used to start the run.
1849
- """
1850
- ...
1851
-
1852
1852
  def namespace(ns: typing.Optional[str]) -> typing.Optional[str]:
1853
1853
  """
1854
1854
  Switch namespace to the one provided.