ob-metaflow-stubs 3.9__py2.py3-none-any.whl → 4.0__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (142) hide show
  1. metaflow-stubs/__init__.pyi +498 -498
  2. metaflow-stubs/cards.pyi +10 -4
  3. metaflow-stubs/cli.pyi +2 -2
  4. metaflow-stubs/client/__init__.pyi +4 -4
  5. metaflow-stubs/client/core.pyi +7 -7
  6. metaflow-stubs/client/filecache.pyi +2 -2
  7. metaflow-stubs/clone_util.pyi +2 -2
  8. metaflow-stubs/events.pyi +2 -2
  9. metaflow-stubs/exception.pyi +2 -2
  10. metaflow-stubs/flowspec.pyi +5 -5
  11. metaflow-stubs/generated_for.txt +1 -1
  12. metaflow-stubs/includefile.pyi +5 -5
  13. metaflow-stubs/metadata/metadata.pyi +2 -2
  14. metaflow-stubs/metadata/util.pyi +2 -2
  15. metaflow-stubs/metaflow_config.pyi +2 -2
  16. metaflow-stubs/metaflow_current.pyi +18 -18
  17. metaflow-stubs/mflog/mflog.pyi +2 -2
  18. metaflow-stubs/multicore_utils.pyi +2 -2
  19. metaflow-stubs/parameters.pyi +4 -4
  20. metaflow-stubs/plugins/__init__.pyi +9 -3
  21. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  22. metaflow-stubs/plugins/airflow/airflow.pyi +3 -3
  23. metaflow-stubs/plugins/airflow/airflow_cli.pyi +3 -3
  24. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
  25. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  26. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  27. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  28. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
  29. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +4 -4
  30. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +4 -4
  31. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  32. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  33. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  34. metaflow-stubs/plugins/argo/argo_workflows.pyi +4 -4
  35. metaflow-stubs/plugins/argo/argo_workflows_cli.pyi +4 -4
  36. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +3 -3
  37. metaflow-stubs/plugins/aws/__init__.pyi +2 -2
  38. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  39. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  40. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  41. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  42. metaflow-stubs/plugins/aws/batch/batch_cli.pyi +2 -2
  43. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  44. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +3 -3
  45. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  46. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +2 -2
  47. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  48. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
  49. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  50. metaflow-stubs/plugins/aws/step_functions/production_token.pyi +2 -2
  51. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  52. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  53. metaflow-stubs/plugins/aws/step_functions/step_functions_cli.pyi +4 -4
  54. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  55. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
  56. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  57. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  58. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  59. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +2 -2
  60. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  61. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  62. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  63. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  64. metaflow-stubs/plugins/cards/card_cli.pyi +4 -4
  65. metaflow-stubs/plugins/cards/card_client.pyi +3 -3
  66. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  67. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  68. metaflow-stubs/plugins/cards/card_decorator.pyi +3 -3
  69. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  70. metaflow-stubs/plugins/cards/card_modules/basic.pyi +9 -3
  71. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  72. metaflow-stubs/plugins/cards/card_modules/chevron/__init__.pyi +2 -2
  73. metaflow-stubs/plugins/cards/card_modules/chevron/main.pyi +2 -2
  74. metaflow-stubs/plugins/cards/card_modules/chevron/metadata.pyi +2 -2
  75. metaflow-stubs/plugins/cards/card_modules/chevron/renderer.pyi +2 -2
  76. metaflow-stubs/plugins/cards/card_modules/chevron/tokenizer.pyi +2 -2
  77. metaflow-stubs/plugins/cards/card_modules/components.pyi +2 -2
  78. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  79. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  80. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  81. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  82. metaflow-stubs/plugins/cards/component_serializer.pyi +3 -3
  83. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  84. metaflow-stubs/plugins/catch_decorator.pyi +3 -3
  85. metaflow-stubs/plugins/datatools/__init__.pyi +3 -3
  86. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  87. metaflow-stubs/plugins/datatools/s3/__init__.pyi +3 -3
  88. metaflow-stubs/plugins/datatools/s3/s3.pyi +6 -6
  89. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  90. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  91. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  92. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  93. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  94. metaflow-stubs/plugins/events_decorator.pyi +3 -3
  95. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  96. metaflow-stubs/plugins/frameworks/pytorch.pyi +4 -4
  97. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  98. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +2 -2
  99. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  100. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  101. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  102. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  103. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  104. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
  105. metaflow-stubs/plugins/kubernetes/kubernetes_cli.pyi +2 -2
  106. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  107. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +3 -3
  108. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
  109. metaflow-stubs/plugins/logs_cli.pyi +2 -2
  110. metaflow-stubs/plugins/package_cli.pyi +2 -2
  111. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  112. metaflow-stubs/plugins/perimeters.pyi +2 -2
  113. metaflow-stubs/plugins/project_decorator.pyi +3 -3
  114. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  115. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  116. metaflow-stubs/plugins/pypi/conda_environment.pyi +3 -3
  117. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  118. metaflow-stubs/plugins/pypi/pypi_environment.pyi +3 -3
  119. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  120. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  121. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  122. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  123. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +2 -2
  124. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  125. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  126. metaflow-stubs/plugins/tag_cli.pyi +5 -5
  127. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +3 -3
  128. metaflow-stubs/plugins/timeout_decorator.pyi +2 -2
  129. metaflow-stubs/procpoll.pyi +2 -2
  130. metaflow-stubs/profilers/__init__.pyi +2 -2
  131. metaflow-stubs/pylint_wrapper.pyi +2 -2
  132. metaflow-stubs/runner/__init__.pyi +2 -2
  133. metaflow-stubs/runner/metaflow_runner.pyi +4 -4
  134. metaflow-stubs/runner/nbrun.pyi +2 -2
  135. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  136. metaflow-stubs/tagging_util.pyi +2 -2
  137. metaflow-stubs/tuple_util.pyi +2 -2
  138. {ob_metaflow_stubs-3.9.dist-info → ob_metaflow_stubs-4.0.dist-info}/METADATA +1 -1
  139. ob_metaflow_stubs-4.0.dist-info/RECORD +142 -0
  140. ob_metaflow_stubs-3.9.dist-info/RECORD +0 -142
  141. {ob_metaflow_stubs-3.9.dist-info → ob_metaflow_stubs-4.0.dist-info}/WHEEL +0 -0
  142. {ob_metaflow_stubs-3.9.dist-info → ob_metaflow_stubs-4.0.dist-info}/top_level.txt +0 -0
@@ -1,25 +1,25 @@
1
1
  ##################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.12.4.2+ob(v1) #
4
- # Generated on 2024-06-19T18:39:56.270719 #
3
+ # MF version: 2.12.5.2+ob(v1) #
4
+ # Generated on 2024-06-24T23:02:43.033000 #
5
5
  ##################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import metaflow.plugins.datatools.s3.s3
12
- import metaflow._vendor.click.types
13
- import typing
14
- import metaflow.events
15
- import metaflow.metaflow_current
16
- import metaflow.datastore.inputs
11
+ import io
17
12
  import metaflow.client.core
18
- import metaflow.runner.metaflow_runner
19
13
  import metaflow.parameters
14
+ import metaflow.metaflow_current
20
15
  import metaflow.flowspec
16
+ import metaflow.runner.metaflow_runner
17
+ import typing
21
18
  import datetime
22
- import io
19
+ import metaflow.plugins.datatools.s3.s3
20
+ import metaflow.datastore.inputs
21
+ import metaflow.events
22
+ import metaflow._vendor.click.types
23
23
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
24
24
  StepFlag = typing.NewType("StepFlag", bool)
25
25
 
@@ -727,112 +727,6 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
727
727
  """
728
728
  ...
729
729
 
730
- @typing.overload
731
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
732
- """
733
- Specifies a timeout for your step.
734
-
735
- This decorator is useful if this step may hang indefinitely.
736
-
737
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
738
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
739
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
740
-
741
- Note that all the values specified in parameters are added together so if you specify
742
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
743
-
744
- Parameters
745
- ----------
746
- seconds : int, default 0
747
- Number of seconds to wait prior to timing out.
748
- minutes : int, default 0
749
- Number of minutes to wait prior to timing out.
750
- hours : int, default 0
751
- Number of hours to wait prior to timing out.
752
- """
753
- ...
754
-
755
- @typing.overload
756
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
757
- ...
758
-
759
- @typing.overload
760
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
761
- ...
762
-
763
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
764
- """
765
- Specifies a timeout for your step.
766
-
767
- This decorator is useful if this step may hang indefinitely.
768
-
769
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
770
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
771
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
772
-
773
- Note that all the values specified in parameters are added together so if you specify
774
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
775
-
776
- Parameters
777
- ----------
778
- seconds : int, default 0
779
- Number of seconds to wait prior to timing out.
780
- minutes : int, default 0
781
- Number of minutes to wait prior to timing out.
782
- hours : int, default 0
783
- Number of hours to wait prior to timing out.
784
- """
785
- ...
786
-
787
- @typing.overload
788
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
789
- """
790
- Specifies that the step will success under all circumstances.
791
-
792
- The decorator will create an optional artifact, specified by `var`, which
793
- contains the exception raised. You can use it to detect the presence
794
- of errors, indicating that all happy-path artifacts produced by the step
795
- are missing.
796
-
797
- Parameters
798
- ----------
799
- var : str, optional, default None
800
- Name of the artifact in which to store the caught exception.
801
- If not specified, the exception is not stored.
802
- print_exception : bool, default True
803
- Determines whether or not the exception is printed to
804
- stdout when caught.
805
- """
806
- ...
807
-
808
- @typing.overload
809
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
810
- ...
811
-
812
- @typing.overload
813
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
814
- ...
815
-
816
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
817
- """
818
- Specifies that the step will success under all circumstances.
819
-
820
- The decorator will create an optional artifact, specified by `var`, which
821
- contains the exception raised. You can use it to detect the presence
822
- of errors, indicating that all happy-path artifacts produced by the step
823
- are missing.
824
-
825
- Parameters
826
- ----------
827
- var : str, optional, default None
828
- Name of the artifact in which to store the caught exception.
829
- If not specified, the exception is not stored.
830
- print_exception : bool, default True
831
- Determines whether or not the exception is printed to
832
- stdout when caught.
833
- """
834
- ...
835
-
836
730
  @typing.overload
837
731
  def card(*, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
838
732
  """
@@ -885,194 +779,388 @@ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
885
779
  ...
886
780
 
887
781
  @typing.overload
888
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
782
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
889
783
  """
890
- Specifies the Conda environment for the step.
784
+ Specifies the PyPI packages for the step.
891
785
 
892
786
  Information in this decorator will augment any
893
- attributes set in the `@conda_base` flow-level decorator. Hence,
894
- you can use `@conda_base` to set packages required by all
895
- steps and use `@conda` to specify step-specific overrides.
787
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
788
+ you can use `@pypi_base` to set packages required by all
789
+ steps and use `@pypi` to specify step-specific overrides.
896
790
 
897
791
  Parameters
898
792
  ----------
899
- packages : Dict[str, str], default {}
793
+ packages : Dict[str, str], default: {}
900
794
  Packages to use for this step. The key is the name of the package
901
795
  and the value is the version to use.
902
- libraries : Dict[str, str], default {}
903
- Supported for backward compatibility. When used with packages, packages will take precedence.
904
- python : str, optional, default None
796
+ python : str, optional, default: None
905
797
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
906
798
  that the version used will correspond to the version of the Python interpreter used to start the run.
907
- disabled : bool, default False
908
- If set to True, disables @conda.
909
799
  """
910
800
  ...
911
801
 
912
802
  @typing.overload
913
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
803
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
914
804
  ...
915
805
 
916
806
  @typing.overload
917
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
807
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
918
808
  ...
919
809
 
920
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
810
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
921
811
  """
922
- Specifies the Conda environment for the step.
812
+ Specifies the PyPI packages for the step.
923
813
 
924
814
  Information in this decorator will augment any
925
- attributes set in the `@conda_base` flow-level decorator. Hence,
926
- you can use `@conda_base` to set packages required by all
927
- steps and use `@conda` to specify step-specific overrides.
815
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
816
+ you can use `@pypi_base` to set packages required by all
817
+ steps and use `@pypi` to specify step-specific overrides.
928
818
 
929
819
  Parameters
930
820
  ----------
931
- packages : Dict[str, str], default {}
821
+ packages : Dict[str, str], default: {}
932
822
  Packages to use for this step. The key is the name of the package
933
823
  and the value is the version to use.
934
- libraries : Dict[str, str], default {}
935
- Supported for backward compatibility. When used with packages, packages will take precedence.
936
- python : str, optional, default None
824
+ python : str, optional, default: None
937
825
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
938
826
  that the version used will correspond to the version of the Python interpreter used to start the run.
939
- disabled : bool, default False
940
- If set to True, disables @conda.
941
827
  """
942
828
  ...
943
829
 
944
830
  @typing.overload
945
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
831
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
946
832
  """
947
- Specifies the number of times the task corresponding
948
- to a step needs to be retried.
833
+ Specifies a timeout for your step.
949
834
 
950
- This decorator is useful for handling transient errors, such as networking issues.
951
- If your task contains operations that can't be retried safely, e.g. database updates,
952
- it is advisable to annotate it with `@retry(times=0)`.
835
+ This decorator is useful if this step may hang indefinitely.
953
836
 
954
- This can be used in conjunction with the `@catch` decorator. The `@catch`
955
- decorator will execute a no-op task after all retries have been exhausted,
956
- ensuring that the flow execution can continue.
837
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
838
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
839
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
840
+
841
+ Note that all the values specified in parameters are added together so if you specify
842
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
957
843
 
958
844
  Parameters
959
845
  ----------
960
- times : int, default 3
961
- Number of times to retry this task.
962
- minutes_between_retries : int, default 2
963
- Number of minutes between retries.
846
+ seconds : int, default 0
847
+ Number of seconds to wait prior to timing out.
848
+ minutes : int, default 0
849
+ Number of minutes to wait prior to timing out.
850
+ hours : int, default 0
851
+ Number of hours to wait prior to timing out.
964
852
  """
965
853
  ...
966
854
 
967
855
  @typing.overload
968
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
856
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
969
857
  ...
970
858
 
971
859
  @typing.overload
972
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
860
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
973
861
  ...
974
862
 
975
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
863
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
976
864
  """
977
- Specifies the number of times the task corresponding
978
- to a step needs to be retried.
865
+ Specifies a timeout for your step.
979
866
 
980
- This decorator is useful for handling transient errors, such as networking issues.
981
- If your task contains operations that can't be retried safely, e.g. database updates,
982
- it is advisable to annotate it with `@retry(times=0)`.
867
+ This decorator is useful if this step may hang indefinitely.
983
868
 
984
- This can be used in conjunction with the `@catch` decorator. The `@catch`
985
- decorator will execute a no-op task after all retries have been exhausted,
986
- ensuring that the flow execution can continue.
869
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
870
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
871
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
872
+
873
+ Note that all the values specified in parameters are added together so if you specify
874
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
987
875
 
988
876
  Parameters
989
877
  ----------
990
- times : int, default 3
991
- Number of times to retry this task.
992
- minutes_between_retries : int, default 2
993
- Number of minutes between retries.
878
+ seconds : int, default 0
879
+ Number of seconds to wait prior to timing out.
880
+ minutes : int, default 0
881
+ Number of minutes to wait prior to timing out.
882
+ hours : int, default 0
883
+ Number of hours to wait prior to timing out.
994
884
  """
995
885
  ...
996
886
 
997
887
  @typing.overload
998
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
888
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
999
889
  """
1000
- Specifies secrets to be retrieved and injected as environment variables prior to
1001
- the execution of a step.
890
+ Specifies that the step will success under all circumstances.
891
+
892
+ The decorator will create an optional artifact, specified by `var`, which
893
+ contains the exception raised. You can use it to detect the presence
894
+ of errors, indicating that all happy-path artifacts produced by the step
895
+ are missing.
1002
896
 
1003
897
  Parameters
1004
898
  ----------
1005
- sources : List[Union[str, Dict[str, Any]]], default: []
1006
- List of secret specs, defining how the secrets are to be retrieved
899
+ var : str, optional, default None
900
+ Name of the artifact in which to store the caught exception.
901
+ If not specified, the exception is not stored.
902
+ print_exception : bool, default True
903
+ Determines whether or not the exception is printed to
904
+ stdout when caught.
1007
905
  """
1008
906
  ...
1009
907
 
1010
908
  @typing.overload
1011
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
909
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1012
910
  ...
1013
911
 
1014
912
  @typing.overload
1015
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
913
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1016
914
  ...
1017
915
 
1018
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
916
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
1019
917
  """
1020
- Specifies secrets to be retrieved and injected as environment variables prior to
1021
- the execution of a step.
918
+ Specifies that the step will success under all circumstances.
919
+
920
+ The decorator will create an optional artifact, specified by `var`, which
921
+ contains the exception raised. You can use it to detect the presence
922
+ of errors, indicating that all happy-path artifacts produced by the step
923
+ are missing.
1022
924
 
1023
925
  Parameters
1024
926
  ----------
1025
- sources : List[Union[str, Dict[str, Any]]], default: []
1026
- List of secret specs, defining how the secrets are to be retrieved
927
+ var : str, optional, default None
928
+ Name of the artifact in which to store the caught exception.
929
+ If not specified, the exception is not stored.
930
+ print_exception : bool, default True
931
+ Determines whether or not the exception is printed to
932
+ stdout when caught.
1027
933
  """
1028
934
  ...
1029
935
 
1030
936
  @typing.overload
1031
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
937
+ def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1032
938
  """
1033
- Specifies the PyPI packages for the step.
939
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
1034
940
 
1035
- Information in this decorator will augment any
1036
- attributes set in the `@pyi_base` flow-level decorator. Hence,
1037
- you can use `@pypi_base` to set packages required by all
1038
- steps and use `@pypi` to specify step-specific overrides.
941
+ Parameters
942
+ ----------
943
+ cpu : int, default 1
944
+ Number of CPUs required for this step. If `@resources` is
945
+ also present, the maximum value from all decorators is used.
946
+ gpu : int, default 0
947
+ Number of GPUs required for this step. If `@resources` is
948
+ also present, the maximum value from all decorators is used.
949
+ memory : int, default 4096
950
+ Memory size (in MB) required for this step. If
951
+ `@resources` is also present, the maximum value from all decorators is
952
+ used.
953
+ image : str, optional, default None
954
+ Docker image to use when launching on AWS Batch. If not specified, and
955
+ METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
956
+ not, a default Docker image mapping to the current version of Python is used.
957
+ queue : str, default METAFLOW_BATCH_JOB_QUEUE
958
+ AWS Batch Job Queue to submit the job to.
959
+ iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
960
+ AWS IAM role that AWS Batch container uses to access AWS cloud resources.
961
+ execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
962
+ AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
963
+ (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
964
+ shared_memory : int, optional, default None
965
+ The value for the size (in MiB) of the /dev/shm volume for this step.
966
+ This parameter maps to the `--shm-size` option in Docker.
967
+ max_swap : int, optional, default None
968
+ The total amount of swap memory (in MiB) a container can use for this
969
+ step. This parameter is translated to the `--memory-swap` option in
970
+ Docker where the value is the sum of the container memory plus the
971
+ `max_swap` value.
972
+ swappiness : int, optional, default None
973
+ This allows you to tune memory swappiness behavior for this step.
974
+ A swappiness value of 0 causes swapping not to happen unless absolutely
975
+ necessary. A swappiness value of 100 causes pages to be swapped very
976
+ aggressively. Accepted values are whole numbers between 0 and 100.
977
+ use_tmpfs : bool, default False
978
+ This enables an explicit tmpfs mount for this step. Note that tmpfs is
979
+ not available on Fargate compute environments
980
+ tmpfs_tempdir : bool, default True
981
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
982
+ tmpfs_size : int, optional, default None
983
+ The value for the size (in MiB) of the tmpfs mount for this step.
984
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
985
+ memory allocated for this step.
986
+ tmpfs_path : str, optional, default None
987
+ Path to tmpfs mount for this step. Defaults to /metaflow_temp.
988
+ inferentia : int, default 0
989
+ Number of Inferentia chips required for this step.
990
+ trainium : int, default None
991
+ Alias for inferentia. Use only one of the two.
992
+ efa : int, default 0
993
+ Number of elastic fabric adapter network devices to attach to container
994
+ ephemeral_storage : int, default None
995
+ The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
996
+ This is only relevant for Fargate compute environments
997
+ log_driver: str, optional, default None
998
+ The log driver to use for the Amazon ECS container.
999
+ log_options: List[str], optional, default None
1000
+ List of strings containing options for the chosen log driver. The configurable values
1001
+ depend on the `log driver` chosen. Validation of these options is not supported yet.
1002
+ Example: [`awslogs-group:aws/batch/job`]
1003
+ """
1004
+ ...
1005
+
1006
+ @typing.overload
1007
+ def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1008
+ ...
1009
+
1010
+ @typing.overload
1011
+ def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1012
+ ...
1013
+
1014
+ def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
1015
+ """
1016
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
1039
1017
 
1040
1018
  Parameters
1041
1019
  ----------
1042
- packages : Dict[str, str], default: {}
1043
- Packages to use for this step. The key is the name of the package
1044
- and the value is the version to use.
1045
- python : str, optional, default: None
1046
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1047
- that the version used will correspond to the version of the Python interpreter used to start the run.
1020
+ cpu : int, default 1
1021
+ Number of CPUs required for this step. If `@resources` is
1022
+ also present, the maximum value from all decorators is used.
1023
+ gpu : int, default 0
1024
+ Number of GPUs required for this step. If `@resources` is
1025
+ also present, the maximum value from all decorators is used.
1026
+ memory : int, default 4096
1027
+ Memory size (in MB) required for this step. If
1028
+ `@resources` is also present, the maximum value from all decorators is
1029
+ used.
1030
+ image : str, optional, default None
1031
+ Docker image to use when launching on AWS Batch. If not specified, and
1032
+ METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
1033
+ not, a default Docker image mapping to the current version of Python is used.
1034
+ queue : str, default METAFLOW_BATCH_JOB_QUEUE
1035
+ AWS Batch Job Queue to submit the job to.
1036
+ iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
1037
+ AWS IAM role that AWS Batch container uses to access AWS cloud resources.
1038
+ execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
1039
+ AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
1040
+ (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
1041
+ shared_memory : int, optional, default None
1042
+ The value for the size (in MiB) of the /dev/shm volume for this step.
1043
+ This parameter maps to the `--shm-size` option in Docker.
1044
+ max_swap : int, optional, default None
1045
+ The total amount of swap memory (in MiB) a container can use for this
1046
+ step. This parameter is translated to the `--memory-swap` option in
1047
+ Docker where the value is the sum of the container memory plus the
1048
+ `max_swap` value.
1049
+ swappiness : int, optional, default None
1050
+ This allows you to tune memory swappiness behavior for this step.
1051
+ A swappiness value of 0 causes swapping not to happen unless absolutely
1052
+ necessary. A swappiness value of 100 causes pages to be swapped very
1053
+ aggressively. Accepted values are whole numbers between 0 and 100.
1054
+ use_tmpfs : bool, default False
1055
+ This enables an explicit tmpfs mount for this step. Note that tmpfs is
1056
+ not available on Fargate compute environments
1057
+ tmpfs_tempdir : bool, default True
1058
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1059
+ tmpfs_size : int, optional, default None
1060
+ The value for the size (in MiB) of the tmpfs mount for this step.
1061
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1062
+ memory allocated for this step.
1063
+ tmpfs_path : str, optional, default None
1064
+ Path to tmpfs mount for this step. Defaults to /metaflow_temp.
1065
+ inferentia : int, default 0
1066
+ Number of Inferentia chips required for this step.
1067
+ trainium : int, default None
1068
+ Alias for inferentia. Use only one of the two.
1069
+ efa : int, default 0
1070
+ Number of elastic fabric adapter network devices to attach to container
1071
+ ephemeral_storage : int, default None
1072
+ The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
1073
+ This is only relevant for Fargate compute environments
1074
+ log_driver: str, optional, default None
1075
+ The log driver to use for the Amazon ECS container.
1076
+ log_options: List[str], optional, default None
1077
+ List of strings containing options for the chosen log driver. The configurable values
1078
+ depend on the `log driver` chosen. Validation of these options is not supported yet.
1079
+ Example: [`awslogs-group:aws/batch/job`]
1048
1080
  """
1049
1081
  ...
1050
1082
 
1051
1083
  @typing.overload
1052
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1084
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1085
+ """
1086
+ Specifies environment variables to be set prior to the execution of a step.
1087
+
1088
+ Parameters
1089
+ ----------
1090
+ vars : Dict[str, str], default {}
1091
+ Dictionary of environment variables to set.
1092
+ """
1053
1093
  ...
1054
1094
 
1055
1095
  @typing.overload
1056
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1096
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1057
1097
  ...
1058
1098
 
1059
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1099
+ @typing.overload
1100
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1101
+ ...
1102
+
1103
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
1060
1104
  """
1061
- Specifies the PyPI packages for the step.
1105
+ Specifies environment variables to be set prior to the execution of a step.
1062
1106
 
1063
- Information in this decorator will augment any
1064
- attributes set in the `@pyi_base` flow-level decorator. Hence,
1065
- you can use `@pypi_base` to set packages required by all
1066
- steps and use `@pypi` to specify step-specific overrides.
1107
+ Parameters
1108
+ ----------
1109
+ vars : Dict[str, str], default {}
1110
+ Dictionary of environment variables to set.
1111
+ """
1112
+ ...
1113
+
1114
+ @typing.overload
1115
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1116
+ """
1117
+ Specifies the number of times the task corresponding
1118
+ to a step needs to be retried.
1119
+
1120
+ This decorator is useful for handling transient errors, such as networking issues.
1121
+ If your task contains operations that can't be retried safely, e.g. database updates,
1122
+ it is advisable to annotate it with `@retry(times=0)`.
1123
+
1124
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
1125
+ decorator will execute a no-op task after all retries have been exhausted,
1126
+ ensuring that the flow execution can continue.
1067
1127
 
1068
1128
  Parameters
1069
1129
  ----------
1070
- packages : Dict[str, str], default: {}
1071
- Packages to use for this step. The key is the name of the package
1072
- and the value is the version to use.
1073
- python : str, optional, default: None
1074
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1075
- that the version used will correspond to the version of the Python interpreter used to start the run.
1130
+ times : int, default 3
1131
+ Number of times to retry this task.
1132
+ minutes_between_retries : int, default 2
1133
+ Number of minutes between retries.
1134
+ """
1135
+ ...
1136
+
1137
+ @typing.overload
1138
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1139
+ ...
1140
+
1141
+ @typing.overload
1142
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1143
+ ...
1144
+
1145
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
1146
+ """
1147
+ Specifies the number of times the task corresponding
1148
+ to a step needs to be retried.
1149
+
1150
+ This decorator is useful for handling transient errors, such as networking issues.
1151
+ If your task contains operations that can't be retried safely, e.g. database updates,
1152
+ it is advisable to annotate it with `@retry(times=0)`.
1153
+
1154
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
1155
+ decorator will execute a no-op task after all retries have been exhausted,
1156
+ ensuring that the flow execution can continue.
1157
+
1158
+ Parameters
1159
+ ----------
1160
+ times : int, default 3
1161
+ Number of times to retry this task.
1162
+ minutes_between_retries : int, default 2
1163
+ Number of minutes between retries.
1076
1164
  """
1077
1165
  ...
1078
1166
 
@@ -1136,33 +1224,92 @@ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: ty
1136
1224
  ...
1137
1225
 
1138
1226
  @typing.overload
1139
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1227
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1140
1228
  """
1141
- Specifies environment variables to be set prior to the execution of a step.
1229
+ Specifies the Conda environment for the step.
1230
+
1231
+ Information in this decorator will augment any
1232
+ attributes set in the `@conda_base` flow-level decorator. Hence,
1233
+ you can use `@conda_base` to set packages required by all
1234
+ steps and use `@conda` to specify step-specific overrides.
1142
1235
 
1143
1236
  Parameters
1144
1237
  ----------
1145
- vars : Dict[str, str], default {}
1146
- Dictionary of environment variables to set.
1238
+ packages : Dict[str, str], default {}
1239
+ Packages to use for this step. The key is the name of the package
1240
+ and the value is the version to use.
1241
+ libraries : Dict[str, str], default {}
1242
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1243
+ python : str, optional, default None
1244
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1245
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1246
+ disabled : bool, default False
1247
+ If set to True, disables @conda.
1147
1248
  """
1148
1249
  ...
1149
1250
 
1150
1251
  @typing.overload
1151
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1252
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1152
1253
  ...
1153
1254
 
1154
1255
  @typing.overload
1155
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1256
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1156
1257
  ...
1157
1258
 
1158
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
1259
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1159
1260
  """
1160
- Specifies environment variables to be set prior to the execution of a step.
1261
+ Specifies the Conda environment for the step.
1262
+
1263
+ Information in this decorator will augment any
1264
+ attributes set in the `@conda_base` flow-level decorator. Hence,
1265
+ you can use `@conda_base` to set packages required by all
1266
+ steps and use `@conda` to specify step-specific overrides.
1161
1267
 
1162
1268
  Parameters
1163
1269
  ----------
1164
- vars : Dict[str, str], default {}
1165
- Dictionary of environment variables to set.
1270
+ packages : Dict[str, str], default {}
1271
+ Packages to use for this step. The key is the name of the package
1272
+ and the value is the version to use.
1273
+ libraries : Dict[str, str], default {}
1274
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1275
+ python : str, optional, default None
1276
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1277
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1278
+ disabled : bool, default False
1279
+ If set to True, disables @conda.
1280
+ """
1281
+ ...
1282
+
1283
+ @typing.overload
1284
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1285
+ """
1286
+ Specifies secrets to be retrieved and injected as environment variables prior to
1287
+ the execution of a step.
1288
+
1289
+ Parameters
1290
+ ----------
1291
+ sources : List[Union[str, Dict[str, Any]]], default: []
1292
+ List of secret specs, defining how the secrets are to be retrieved
1293
+ """
1294
+ ...
1295
+
1296
+ @typing.overload
1297
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1298
+ ...
1299
+
1300
+ @typing.overload
1301
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1302
+ ...
1303
+
1304
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
1305
+ """
1306
+ Specifies secrets to be retrieved and injected as environment variables prior to
1307
+ the execution of a step.
1308
+
1309
+ Parameters
1310
+ ----------
1311
+ sources : List[Union[str, Dict[str, Any]]], default: []
1312
+ List of secret specs, defining how the secrets are to be retrieved
1166
1313
  """
1167
1314
  ...
1168
1315
 
@@ -1244,149 +1391,90 @@ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None]
1244
1391
  ...
1245
1392
 
1246
1393
  @typing.overload
1247
- def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1394
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1248
1395
  """
1249
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
1396
+ Specifies the PyPI packages for all steps of the flow.
1250
1397
 
1398
+ Use `@pypi_base` to set common packages required by all
1399
+ steps and use `@pypi` to specify step-specific overrides.
1251
1400
  Parameters
1252
1401
  ----------
1253
- cpu : int, default 1
1254
- Number of CPUs required for this step. If `@resources` is
1255
- also present, the maximum value from all decorators is used.
1256
- gpu : int, default 0
1257
- Number of GPUs required for this step. If `@resources` is
1258
- also present, the maximum value from all decorators is used.
1259
- memory : int, default 4096
1260
- Memory size (in MB) required for this step. If
1261
- `@resources` is also present, the maximum value from all decorators is
1262
- used.
1263
- image : str, optional, default None
1264
- Docker image to use when launching on AWS Batch. If not specified, and
1265
- METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
1266
- not, a default Docker image mapping to the current version of Python is used.
1267
- queue : str, default METAFLOW_BATCH_JOB_QUEUE
1268
- AWS Batch Job Queue to submit the job to.
1269
- iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
1270
- AWS IAM role that AWS Batch container uses to access AWS cloud resources.
1271
- execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
1272
- AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
1273
- (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
1274
- shared_memory : int, optional, default None
1275
- The value for the size (in MiB) of the /dev/shm volume for this step.
1276
- This parameter maps to the `--shm-size` option in Docker.
1277
- max_swap : int, optional, default None
1278
- The total amount of swap memory (in MiB) a container can use for this
1279
- step. This parameter is translated to the `--memory-swap` option in
1280
- Docker where the value is the sum of the container memory plus the
1281
- `max_swap` value.
1282
- swappiness : int, optional, default None
1283
- This allows you to tune memory swappiness behavior for this step.
1284
- A swappiness value of 0 causes swapping not to happen unless absolutely
1285
- necessary. A swappiness value of 100 causes pages to be swapped very
1286
- aggressively. Accepted values are whole numbers between 0 and 100.
1287
- use_tmpfs : bool, default False
1288
- This enables an explicit tmpfs mount for this step. Note that tmpfs is
1289
- not available on Fargate compute environments
1290
- tmpfs_tempdir : bool, default True
1291
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1292
- tmpfs_size : int, optional, default None
1293
- The value for the size (in MiB) of the tmpfs mount for this step.
1294
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1295
- memory allocated for this step.
1296
- tmpfs_path : str, optional, default None
1297
- Path to tmpfs mount for this step. Defaults to /metaflow_temp.
1298
- inferentia : int, default 0
1299
- Number of Inferentia chips required for this step.
1300
- trainium : int, default None
1301
- Alias for inferentia. Use only one of the two.
1302
- efa : int, default 0
1303
- Number of elastic fabric adapter network devices to attach to container
1304
- ephemeral_storage : int, default None
1305
- The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
1306
- This is only relevant for Fargate compute environments
1307
- log_driver: str, optional, default None
1308
- The log driver to use for the Amazon ECS container.
1309
- log_options: List[str], optional, default None
1310
- List of strings containing options for the chosen log driver. The configurable values
1311
- depend on the `log driver` chosen. Validation of these options is not supported yet.
1312
- Example: [`awslogs-group:aws/batch/job`]
1402
+ packages : Dict[str, str], default: {}
1403
+ Packages to use for this flow. The key is the name of the package
1404
+ and the value is the version to use.
1405
+ python : str, optional, default: None
1406
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1407
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1313
1408
  """
1314
1409
  ...
1315
1410
 
1316
1411
  @typing.overload
1317
- def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1412
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1318
1413
  ...
1319
1414
 
1320
- @typing.overload
1321
- def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1415
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1416
+ """
1417
+ Specifies the PyPI packages for all steps of the flow.
1418
+
1419
+ Use `@pypi_base` to set common packages required by all
1420
+ steps and use `@pypi` to specify step-specific overrides.
1421
+ Parameters
1422
+ ----------
1423
+ packages : Dict[str, str], default: {}
1424
+ Packages to use for this flow. The key is the name of the package
1425
+ and the value is the version to use.
1426
+ python : str, optional, default: None
1427
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1428
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1429
+ """
1322
1430
  ...
1323
1431
 
1324
- def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
1432
+ @typing.overload
1433
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1325
1434
  """
1326
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
1435
+ Specifies the Conda environment for all steps of the flow.
1436
+
1437
+ Use `@conda_base` to set common libraries required by all
1438
+ steps and use `@conda` to specify step-specific additions.
1327
1439
 
1328
1440
  Parameters
1329
1441
  ----------
1330
- cpu : int, default 1
1331
- Number of CPUs required for this step. If `@resources` is
1332
- also present, the maximum value from all decorators is used.
1333
- gpu : int, default 0
1334
- Number of GPUs required for this step. If `@resources` is
1335
- also present, the maximum value from all decorators is used.
1336
- memory : int, default 4096
1337
- Memory size (in MB) required for this step. If
1338
- `@resources` is also present, the maximum value from all decorators is
1339
- used.
1340
- image : str, optional, default None
1341
- Docker image to use when launching on AWS Batch. If not specified, and
1342
- METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
1343
- not, a default Docker image mapping to the current version of Python is used.
1344
- queue : str, default METAFLOW_BATCH_JOB_QUEUE
1345
- AWS Batch Job Queue to submit the job to.
1346
- iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
1347
- AWS IAM role that AWS Batch container uses to access AWS cloud resources.
1348
- execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
1349
- AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
1350
- (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
1351
- shared_memory : int, optional, default None
1352
- The value for the size (in MiB) of the /dev/shm volume for this step.
1353
- This parameter maps to the `--shm-size` option in Docker.
1354
- max_swap : int, optional, default None
1355
- The total amount of swap memory (in MiB) a container can use for this
1356
- step. This parameter is translated to the `--memory-swap` option in
1357
- Docker where the value is the sum of the container memory plus the
1358
- `max_swap` value.
1359
- swappiness : int, optional, default None
1360
- This allows you to tune memory swappiness behavior for this step.
1361
- A swappiness value of 0 causes swapping not to happen unless absolutely
1362
- necessary. A swappiness value of 100 causes pages to be swapped very
1363
- aggressively. Accepted values are whole numbers between 0 and 100.
1364
- use_tmpfs : bool, default False
1365
- This enables an explicit tmpfs mount for this step. Note that tmpfs is
1366
- not available on Fargate compute environments
1367
- tmpfs_tempdir : bool, default True
1368
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1369
- tmpfs_size : int, optional, default None
1370
- The value for the size (in MiB) of the tmpfs mount for this step.
1371
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1372
- memory allocated for this step.
1373
- tmpfs_path : str, optional, default None
1374
- Path to tmpfs mount for this step. Defaults to /metaflow_temp.
1375
- inferentia : int, default 0
1376
- Number of Inferentia chips required for this step.
1377
- trainium : int, default None
1378
- Alias for inferentia. Use only one of the two.
1379
- efa : int, default 0
1380
- Number of elastic fabric adapter network devices to attach to container
1381
- ephemeral_storage : int, default None
1382
- The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
1383
- This is only relevant for Fargate compute environments
1384
- log_driver: str, optional, default None
1385
- The log driver to use for the Amazon ECS container.
1386
- log_options: List[str], optional, default None
1387
- List of strings containing options for the chosen log driver. The configurable values
1388
- depend on the `log driver` chosen. Validation of these options is not supported yet.
1389
- Example: [`awslogs-group:aws/batch/job`]
1442
+ packages : Dict[str, str], default {}
1443
+ Packages to use for this flow. The key is the name of the package
1444
+ and the value is the version to use.
1445
+ libraries : Dict[str, str], default {}
1446
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1447
+ python : str, optional, default None
1448
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1449
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1450
+ disabled : bool, default False
1451
+ If set to True, disables Conda.
1452
+ """
1453
+ ...
1454
+
1455
+ @typing.overload
1456
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1457
+ ...
1458
+
1459
+ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1460
+ """
1461
+ Specifies the Conda environment for all steps of the flow.
1462
+
1463
+ Use `@conda_base` to set common libraries required by all
1464
+ steps and use `@conda` to specify step-specific additions.
1465
+
1466
+ Parameters
1467
+ ----------
1468
+ packages : Dict[str, str], default {}
1469
+ Packages to use for this flow. The key is the name of the package
1470
+ and the value is the version to use.
1471
+ libraries : Dict[str, str], default {}
1472
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1473
+ python : str, optional, default None
1474
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1475
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1476
+ disabled : bool, default False
1477
+ If set to True, disables Conda.
1390
1478
  """
1391
1479
  ...
1392
1480
 
@@ -1432,24 +1520,6 @@ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, expone
1432
1520
  """
1433
1521
  ...
1434
1522
 
1435
- def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1436
- """
1437
- Specifies what flows belong to the same project.
1438
-
1439
- A project-specific namespace is created for all flows that
1440
- use the same `@project(name)`.
1441
-
1442
- Parameters
1443
- ----------
1444
- name : str
1445
- Project name. Make sure that the name is unique amongst all
1446
- projects that use the same production scheduler. The name may
1447
- contain only lowercase alphanumeric characters and underscores.
1448
-
1449
-
1450
- """
1451
- ...
1452
-
1453
1523
  def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1454
1524
  """
1455
1525
  The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
@@ -1492,94 +1562,6 @@ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str,
1492
1562
  """
1493
1563
  ...
1494
1564
 
1495
- @typing.overload
1496
- def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1497
- """
1498
- Specifies the Conda environment for all steps of the flow.
1499
-
1500
- Use `@conda_base` to set common libraries required by all
1501
- steps and use `@conda` to specify step-specific additions.
1502
-
1503
- Parameters
1504
- ----------
1505
- packages : Dict[str, str], default {}
1506
- Packages to use for this flow. The key is the name of the package
1507
- and the value is the version to use.
1508
- libraries : Dict[str, str], default {}
1509
- Supported for backward compatibility. When used with packages, packages will take precedence.
1510
- python : str, optional, default None
1511
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1512
- that the version used will correspond to the version of the Python interpreter used to start the run.
1513
- disabled : bool, default False
1514
- If set to True, disables Conda.
1515
- """
1516
- ...
1517
-
1518
- @typing.overload
1519
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1520
- ...
1521
-
1522
- def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1523
- """
1524
- Specifies the Conda environment for all steps of the flow.
1525
-
1526
- Use `@conda_base` to set common libraries required by all
1527
- steps and use `@conda` to specify step-specific additions.
1528
-
1529
- Parameters
1530
- ----------
1531
- packages : Dict[str, str], default {}
1532
- Packages to use for this flow. The key is the name of the package
1533
- and the value is the version to use.
1534
- libraries : Dict[str, str], default {}
1535
- Supported for backward compatibility. When used with packages, packages will take precedence.
1536
- python : str, optional, default None
1537
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1538
- that the version used will correspond to the version of the Python interpreter used to start the run.
1539
- disabled : bool, default False
1540
- If set to True, disables Conda.
1541
- """
1542
- ...
1543
-
1544
- @typing.overload
1545
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1546
- """
1547
- Specifies the PyPI packages for all steps of the flow.
1548
-
1549
- Use `@pypi_base` to set common packages required by all
1550
- steps and use `@pypi` to specify step-specific overrides.
1551
- Parameters
1552
- ----------
1553
- packages : Dict[str, str], default: {}
1554
- Packages to use for this flow. The key is the name of the package
1555
- and the value is the version to use.
1556
- python : str, optional, default: None
1557
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1558
- that the version used will correspond to the version of the Python interpreter used to start the run.
1559
- """
1560
- ...
1561
-
1562
- @typing.overload
1563
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1564
- ...
1565
-
1566
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1567
- """
1568
- Specifies the PyPI packages for all steps of the flow.
1569
-
1570
- Use `@pypi_base` to set common packages required by all
1571
- steps and use `@pypi` to specify step-specific overrides.
1572
- Parameters
1573
- ----------
1574
- packages : Dict[str, str], default: {}
1575
- Packages to use for this flow. The key is the name of the package
1576
- and the value is the version to use.
1577
- python : str, optional, default: None
1578
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1579
- that the version used will correspond to the version of the Python interpreter used to start the run.
1580
- """
1581
- ...
1582
-
1583
1565
  @typing.overload
1584
1566
  def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1585
1567
  """
@@ -1675,55 +1657,6 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
1675
1657
  """
1676
1658
  ...
1677
1659
 
1678
- @typing.overload
1679
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1680
- """
1681
- Specifies the times when the flow should be run when running on a
1682
- production scheduler.
1683
-
1684
- Parameters
1685
- ----------
1686
- hourly : bool, default False
1687
- Run the workflow hourly.
1688
- daily : bool, default True
1689
- Run the workflow daily.
1690
- weekly : bool, default False
1691
- Run the workflow weekly.
1692
- cron : str, optional, default None
1693
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1694
- specified by this expression.
1695
- timezone : str, optional, default None
1696
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1697
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1698
- """
1699
- ...
1700
-
1701
- @typing.overload
1702
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1703
- ...
1704
-
1705
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1706
- """
1707
- Specifies the times when the flow should be run when running on a
1708
- production scheduler.
1709
-
1710
- Parameters
1711
- ----------
1712
- hourly : bool, default False
1713
- Run the workflow hourly.
1714
- daily : bool, default True
1715
- Run the workflow daily.
1716
- weekly : bool, default False
1717
- Run the workflow weekly.
1718
- cron : str, optional, default None
1719
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1720
- specified by this expression.
1721
- timezone : str, optional, default None
1722
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1723
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1724
- """
1725
- ...
1726
-
1727
1660
  @typing.overload
1728
1661
  def trigger_on_finish(*, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1729
1662
  """
@@ -1827,6 +1760,73 @@ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *
1827
1760
  """
1828
1761
  ...
1829
1762
 
1763
+ @typing.overload
1764
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1765
+ """
1766
+ Specifies the times when the flow should be run when running on a
1767
+ production scheduler.
1768
+
1769
+ Parameters
1770
+ ----------
1771
+ hourly : bool, default False
1772
+ Run the workflow hourly.
1773
+ daily : bool, default True
1774
+ Run the workflow daily.
1775
+ weekly : bool, default False
1776
+ Run the workflow weekly.
1777
+ cron : str, optional, default None
1778
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1779
+ specified by this expression.
1780
+ timezone : str, optional, default None
1781
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1782
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1783
+ """
1784
+ ...
1785
+
1786
+ @typing.overload
1787
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1788
+ ...
1789
+
1790
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1791
+ """
1792
+ Specifies the times when the flow should be run when running on a
1793
+ production scheduler.
1794
+
1795
+ Parameters
1796
+ ----------
1797
+ hourly : bool, default False
1798
+ Run the workflow hourly.
1799
+ daily : bool, default True
1800
+ Run the workflow daily.
1801
+ weekly : bool, default False
1802
+ Run the workflow weekly.
1803
+ cron : str, optional, default None
1804
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1805
+ specified by this expression.
1806
+ timezone : str, optional, default None
1807
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1808
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1809
+ """
1810
+ ...
1811
+
1812
+ def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1813
+ """
1814
+ Specifies what flows belong to the same project.
1815
+
1816
+ A project-specific namespace is created for all flows that
1817
+ use the same `@project(name)`.
1818
+
1819
+ Parameters
1820
+ ----------
1821
+ name : str
1822
+ Project name. Make sure that the name is unique amongst all
1823
+ projects that use the same production scheduler. The name may
1824
+ contain only lowercase alphanumeric characters and underscores.
1825
+
1826
+
1827
+ """
1828
+ ...
1829
+
1830
1830
  def namespace(ns: typing.Optional[str]) -> typing.Optional[str]:
1831
1831
  """
1832
1832
  Switch namespace to the one provided.