ob-metaflow-stubs 4.9__py2.py3-none-any.whl → 5.0__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (150) hide show
  1. metaflow-stubs/__init__.pyi +408 -555
  2. metaflow-stubs/cards.pyi +5 -5
  3. metaflow-stubs/cli.pyi +2 -2
  4. metaflow-stubs/client/__init__.pyi +4 -4
  5. metaflow-stubs/client/core.pyi +6 -6
  6. metaflow-stubs/client/filecache.pyi +3 -3
  7. metaflow-stubs/clone_util.pyi +2 -2
  8. metaflow-stubs/events.pyi +3 -3
  9. metaflow-stubs/exception.pyi +2 -2
  10. metaflow-stubs/flowspec.pyi +5 -5
  11. metaflow-stubs/generated_for.txt +1 -1
  12. metaflow-stubs/includefile.pyi +5 -5
  13. metaflow-stubs/metadata/metadata.pyi +3 -3
  14. metaflow-stubs/metadata/util.pyi +2 -2
  15. metaflow-stubs/metaflow_config.pyi +2 -2
  16. metaflow-stubs/metaflow_current.pyi +17 -17
  17. metaflow-stubs/mflog/mflog.pyi +2 -2
  18. metaflow-stubs/multicore_utils.pyi +2 -2
  19. metaflow-stubs/parameters.pyi +4 -4
  20. metaflow-stubs/plugins/__init__.pyi +3 -3
  21. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  22. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  23. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  24. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  25. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +3 -3
  26. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +3 -3
  27. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +3 -3
  28. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  29. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  30. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  31. metaflow-stubs/plugins/argo/argo_workflows.pyi +5 -5
  32. metaflow-stubs/plugins/argo/argo_workflows_cli.pyi +7 -7
  33. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +5 -5
  34. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +3 -3
  35. metaflow-stubs/plugins/aws/__init__.pyi +2 -2
  36. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  37. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  38. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  39. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  40. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  41. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  42. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +3 -3
  43. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  44. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  45. metaflow-stubs/plugins/aws/step_functions/production_token.pyi +2 -2
  46. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  47. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  48. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  49. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +3 -3
  50. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  51. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  52. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  53. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +3 -3
  54. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  55. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  56. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  57. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  58. metaflow-stubs/plugins/cards/card_cli.pyi +5 -5
  59. metaflow-stubs/plugins/cards/card_client.pyi +3 -3
  60. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  61. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  62. metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
  63. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  64. metaflow-stubs/plugins/cards/card_modules/basic.pyi +3 -3
  65. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  66. metaflow-stubs/plugins/cards/card_modules/components.pyi +4 -4
  67. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  68. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  69. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +3 -3
  70. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  71. metaflow-stubs/plugins/cards/component_serializer.pyi +3 -3
  72. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  73. metaflow-stubs/plugins/catch_decorator.pyi +3 -3
  74. metaflow-stubs/plugins/datatools/__init__.pyi +3 -3
  75. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  76. metaflow-stubs/plugins/datatools/s3/__init__.pyi +3 -3
  77. metaflow-stubs/plugins/datatools/s3/s3.pyi +5 -5
  78. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  79. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  80. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  81. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  82. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  83. metaflow-stubs/plugins/events_decorator.pyi +2 -2
  84. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  85. metaflow-stubs/plugins/frameworks/pytorch.pyi +3 -3
  86. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  87. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +3 -3
  88. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  89. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  90. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  91. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  92. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  93. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
  94. metaflow-stubs/plugins/kubernetes/kubernetes_cli.pyi +3 -3
  95. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  96. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +3 -3
  97. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
  98. metaflow-stubs/plugins/logs_cli.pyi +3 -3
  99. metaflow-stubs/plugins/package_cli.pyi +2 -2
  100. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  101. metaflow-stubs/plugins/perimeters.pyi +2 -2
  102. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  103. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  104. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  105. metaflow-stubs/plugins/pypi/conda_environment.pyi +3 -3
  106. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  107. metaflow-stubs/plugins/pypi/pypi_environment.pyi +3 -3
  108. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  109. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  110. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  111. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  112. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +2 -2
  113. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  114. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  115. metaflow-stubs/plugins/tag_cli.pyi +4 -4
  116. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +3 -3
  117. metaflow-stubs/plugins/timeout_decorator.pyi +3 -3
  118. metaflow-stubs/procpoll.pyi +2 -2
  119. metaflow-stubs/profilers/__init__.pyi +2 -2
  120. metaflow-stubs/pylint_wrapper.pyi +2 -2
  121. metaflow-stubs/runner/__init__.pyi +2 -2
  122. metaflow-stubs/runner/deployer.pyi +3 -3
  123. metaflow-stubs/runner/metaflow_runner.pyi +4 -4
  124. metaflow-stubs/runner/nbdeploy.pyi +2 -2
  125. metaflow-stubs/runner/nbrun.pyi +2 -2
  126. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  127. metaflow-stubs/runner/utils.pyi +2 -2
  128. metaflow-stubs/system/__init__.pyi +3 -3
  129. metaflow-stubs/system/system_logger.pyi +2 -2
  130. metaflow-stubs/system/system_monitor.pyi +3 -3
  131. metaflow-stubs/tagging_util.pyi +2 -2
  132. metaflow-stubs/tuple_util.pyi +2 -2
  133. {ob_metaflow_stubs-4.9.dist-info → ob_metaflow_stubs-5.0.dist-info}/METADATA +1 -1
  134. ob_metaflow_stubs-5.0.dist-info/RECORD +137 -0
  135. metaflow-stubs/plugins/airflow/airflow.pyi +0 -187
  136. metaflow-stubs/plugins/airflow/airflow_cli.pyi +0 -90
  137. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +0 -50
  138. metaflow-stubs/plugins/aws/batch/batch_cli.pyi +0 -46
  139. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +0 -145
  140. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +0 -22
  141. metaflow-stubs/plugins/aws/step_functions/step_functions_cli.pyi +0 -159
  142. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +0 -49
  143. metaflow-stubs/plugins/cards/card_modules/chevron/__init__.pyi +0 -78
  144. metaflow-stubs/plugins/cards/card_modules/chevron/main.pyi +0 -77
  145. metaflow-stubs/plugins/cards/card_modules/chevron/metadata.pyi +0 -11
  146. metaflow-stubs/plugins/cards/card_modules/chevron/renderer.pyi +0 -114
  147. metaflow-stubs/plugins/cards/card_modules/chevron/tokenizer.pyi +0 -75
  148. ob_metaflow_stubs-4.9.dist-info/RECORD +0 -150
  149. {ob_metaflow_stubs-4.9.dist-info → ob_metaflow_stubs-5.0.dist-info}/WHEEL +0 -0
  150. {ob_metaflow_stubs-4.9.dist-info → ob_metaflow_stubs-5.0.dist-info}/top_level.txt +0 -0
@@ -1,7 +1,7 @@
1
1
  ##################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.12.10.1+ob(v1) #
4
- # Generated on 2024-07-31T05:43:20.962506 #
3
+ # MF version: 2.12.10.2+ob(v1) #
4
+ # Generated on 2024-08-06T17:10:53.103007 #
5
5
  ##################################################################################
6
6
 
7
7
  from __future__ import annotations
@@ -10,16 +10,16 @@ import typing
10
10
  if typing.TYPE_CHECKING:
11
11
  import metaflow.events
12
12
  import metaflow.flowspec
13
+ import metaflow.parameters
13
14
  import metaflow.metaflow_current
14
- import metaflow._vendor.click.types
15
- import metaflow.plugins.datatools.s3.s3
15
+ import io
16
16
  import metaflow.client.core
17
17
  import metaflow.datastore.inputs
18
- import datetime
19
18
  import metaflow.runner.metaflow_runner
20
19
  import typing
21
- import metaflow.parameters
22
- import io
20
+ import metaflow._vendor.click.types
21
+ import metaflow.plugins.datatools.s3.s3
22
+ import datetime
23
23
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
24
24
  StepFlag = typing.NewType("StepFlag", bool)
25
25
 
@@ -727,6 +727,63 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
727
727
  """
728
728
  ...
729
729
 
730
+ @typing.overload
731
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
732
+ """
733
+ Specifies the Conda environment for the step.
734
+
735
+ Information in this decorator will augment any
736
+ attributes set in the `@conda_base` flow-level decorator. Hence,
737
+ you can use `@conda_base` to set packages required by all
738
+ steps and use `@conda` to specify step-specific overrides.
739
+
740
+ Parameters
741
+ ----------
742
+ packages : Dict[str, str], default {}
743
+ Packages to use for this step. The key is the name of the package
744
+ and the value is the version to use.
745
+ libraries : Dict[str, str], default {}
746
+ Supported for backward compatibility. When used with packages, packages will take precedence.
747
+ python : str, optional, default None
748
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
749
+ that the version used will correspond to the version of the Python interpreter used to start the run.
750
+ disabled : bool, default False
751
+ If set to True, disables @conda.
752
+ """
753
+ ...
754
+
755
+ @typing.overload
756
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
757
+ ...
758
+
759
+ @typing.overload
760
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
761
+ ...
762
+
763
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
764
+ """
765
+ Specifies the Conda environment for the step.
766
+
767
+ Information in this decorator will augment any
768
+ attributes set in the `@conda_base` flow-level decorator. Hence,
769
+ you can use `@conda_base` to set packages required by all
770
+ steps and use `@conda` to specify step-specific overrides.
771
+
772
+ Parameters
773
+ ----------
774
+ packages : Dict[str, str], default {}
775
+ Packages to use for this step. The key is the name of the package
776
+ and the value is the version to use.
777
+ libraries : Dict[str, str], default {}
778
+ Supported for backward compatibility. When used with packages, packages will take precedence.
779
+ python : str, optional, default None
780
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
781
+ that the version used will correspond to the version of the Python interpreter used to start the run.
782
+ disabled : bool, default False
783
+ If set to True, disables @conda.
784
+ """
785
+ ...
786
+
730
787
  @typing.overload
731
788
  def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
732
789
  """
@@ -759,59 +816,51 @@ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], Non
759
816
  ...
760
817
 
761
818
  @typing.overload
762
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
819
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
763
820
  """
764
- Specifies the Conda environment for the step.
821
+ Specifies the PyPI packages for the step.
765
822
 
766
823
  Information in this decorator will augment any
767
- attributes set in the `@conda_base` flow-level decorator. Hence,
768
- you can use `@conda_base` to set packages required by all
769
- steps and use `@conda` to specify step-specific overrides.
824
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
825
+ you can use `@pypi_base` to set packages required by all
826
+ steps and use `@pypi` to specify step-specific overrides.
770
827
 
771
828
  Parameters
772
829
  ----------
773
- packages : Dict[str, str], default {}
830
+ packages : Dict[str, str], default: {}
774
831
  Packages to use for this step. The key is the name of the package
775
832
  and the value is the version to use.
776
- libraries : Dict[str, str], default {}
777
- Supported for backward compatibility. When used with packages, packages will take precedence.
778
- python : str, optional, default None
833
+ python : str, optional, default: None
779
834
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
780
835
  that the version used will correspond to the version of the Python interpreter used to start the run.
781
- disabled : bool, default False
782
- If set to True, disables @conda.
783
836
  """
784
837
  ...
785
838
 
786
839
  @typing.overload
787
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
840
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
788
841
  ...
789
842
 
790
843
  @typing.overload
791
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
844
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
792
845
  ...
793
846
 
794
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
847
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
795
848
  """
796
- Specifies the Conda environment for the step.
849
+ Specifies the PyPI packages for the step.
797
850
 
798
851
  Information in this decorator will augment any
799
- attributes set in the `@conda_base` flow-level decorator. Hence,
800
- you can use `@conda_base` to set packages required by all
801
- steps and use `@conda` to specify step-specific overrides.
852
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
853
+ you can use `@pypi_base` to set packages required by all
854
+ steps and use `@pypi` to specify step-specific overrides.
802
855
 
803
856
  Parameters
804
857
  ----------
805
- packages : Dict[str, str], default {}
858
+ packages : Dict[str, str], default: {}
806
859
  Packages to use for this step. The key is the name of the package
807
860
  and the value is the version to use.
808
- libraries : Dict[str, str], default {}
809
- Supported for backward compatibility. When used with packages, packages will take precedence.
810
- python : str, optional, default None
861
+ python : str, optional, default: None
811
862
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
812
863
  that the version used will correspond to the version of the Python interpreter used to start the run.
813
- disabled : bool, default False
814
- If set to True, disables @conda.
815
864
  """
816
865
  ...
817
866
 
@@ -866,6 +915,59 @@ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
866
915
  """
867
916
  ...
868
917
 
918
+ @typing.overload
919
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
920
+ """
921
+ Specifies the number of times the task corresponding
922
+ to a step needs to be retried.
923
+
924
+ This decorator is useful for handling transient errors, such as networking issues.
925
+ If your task contains operations that can't be retried safely, e.g. database updates,
926
+ it is advisable to annotate it with `@retry(times=0)`.
927
+
928
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
929
+ decorator will execute a no-op task after all retries have been exhausted,
930
+ ensuring that the flow execution can continue.
931
+
932
+ Parameters
933
+ ----------
934
+ times : int, default 3
935
+ Number of times to retry this task.
936
+ minutes_between_retries : int, default 2
937
+ Number of minutes between retries.
938
+ """
939
+ ...
940
+
941
+ @typing.overload
942
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
943
+ ...
944
+
945
+ @typing.overload
946
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
947
+ ...
948
+
949
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
950
+ """
951
+ Specifies the number of times the task corresponding
952
+ to a step needs to be retried.
953
+
954
+ This decorator is useful for handling transient errors, such as networking issues.
955
+ If your task contains operations that can't be retried safely, e.g. database updates,
956
+ it is advisable to annotate it with `@retry(times=0)`.
957
+
958
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
959
+ decorator will execute a no-op task after all retries have been exhausted,
960
+ ensuring that the flow execution can continue.
961
+
962
+ Parameters
963
+ ----------
964
+ times : int, default 3
965
+ Number of times to retry this task.
966
+ minutes_between_retries : int, default 2
967
+ Number of minutes between retries.
968
+ """
969
+ ...
970
+
869
971
  @typing.overload
870
972
  def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
871
973
  """
@@ -923,6 +1025,83 @@ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
923
1025
  """
924
1026
  ...
925
1027
 
1028
+ @typing.overload
1029
+ def resources(*, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1030
+ """
1031
+ Specifies the resources needed when executing this step.
1032
+
1033
+ Use `@resources` to specify the resource requirements
1034
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
1035
+
1036
+ You can choose the compute layer on the command line by executing e.g.
1037
+ ```
1038
+ python myflow.py run --with batch
1039
+ ```
1040
+ or
1041
+ ```
1042
+ python myflow.py run --with kubernetes
1043
+ ```
1044
+ which executes the flow on the desired system using the
1045
+ requirements specified in `@resources`.
1046
+
1047
+ Parameters
1048
+ ----------
1049
+ cpu : int, default 1
1050
+ Number of CPUs required for this step.
1051
+ gpu : int, default 0
1052
+ Number of GPUs required for this step.
1053
+ disk : int, optional, default None
1054
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
1055
+ memory : int, default 4096
1056
+ Memory size (in MB) required for this step.
1057
+ shared_memory : int, optional, default None
1058
+ The value for the size (in MiB) of the /dev/shm volume for this step.
1059
+ This parameter maps to the `--shm-size` option in Docker.
1060
+ """
1061
+ ...
1062
+
1063
+ @typing.overload
1064
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1065
+ ...
1066
+
1067
+ @typing.overload
1068
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1069
+ ...
1070
+
1071
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
1072
+ """
1073
+ Specifies the resources needed when executing this step.
1074
+
1075
+ Use `@resources` to specify the resource requirements
1076
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
1077
+
1078
+ You can choose the compute layer on the command line by executing e.g.
1079
+ ```
1080
+ python myflow.py run --with batch
1081
+ ```
1082
+ or
1083
+ ```
1084
+ python myflow.py run --with kubernetes
1085
+ ```
1086
+ which executes the flow on the desired system using the
1087
+ requirements specified in `@resources`.
1088
+
1089
+ Parameters
1090
+ ----------
1091
+ cpu : int, default 1
1092
+ Number of CPUs required for this step.
1093
+ gpu : int, default 0
1094
+ Number of GPUs required for this step.
1095
+ disk : int, optional, default None
1096
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
1097
+ memory : int, default 4096
1098
+ Memory size (in MB) required for this step.
1099
+ shared_memory : int, optional, default None
1100
+ The value for the size (in MiB) of the /dev/shm volume for this step.
1101
+ This parameter maps to the `--shm-size` option in Docker.
1102
+ """
1103
+ ...
1104
+
926
1105
  @typing.overload
927
1106
  def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
928
1107
  """
@@ -972,114 +1151,9 @@ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
972
1151
  """
973
1152
  ...
974
1153
 
975
- @typing.overload
976
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1154
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = "KUBERNETES_IMAGE_PULL_POLICY", service_account: str = "METAFLOW_KUBERNETES_SERVICE_ACCOUNT", secrets: typing.Optional[typing.List[str]] = None, namespace: str = "METAFLOW_KUBERNETES_NAMESPACE", gpu: typing.Optional[int] = None, gpu_vendor: str = "KUBERNETES_GPU_VENDOR", tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = "/metaflow_temp", persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
977
1155
  """
978
- Specifies secrets to be retrieved and injected as environment variables prior to
979
- the execution of a step.
980
-
981
- Parameters
982
- ----------
983
- sources : List[Union[str, Dict[str, Any]]], default: []
984
- List of secret specs, defining how the secrets are to be retrieved
985
- """
986
- ...
987
-
988
- @typing.overload
989
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
990
- ...
991
-
992
- @typing.overload
993
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
994
- ...
995
-
996
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
997
- """
998
- Specifies secrets to be retrieved and injected as environment variables prior to
999
- the execution of a step.
1000
-
1001
- Parameters
1002
- ----------
1003
- sources : List[Union[str, Dict[str, Any]]], default: []
1004
- List of secret specs, defining how the secrets are to be retrieved
1005
- """
1006
- ...
1007
-
1008
- @typing.overload
1009
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1010
- """
1011
- Decorator prototype for all step decorators. This function gets specialized
1012
- and imported for all decorators types by _import_plugin_decorators().
1013
- """
1014
- ...
1015
-
1016
- @typing.overload
1017
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1018
- ...
1019
-
1020
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1021
- """
1022
- Decorator prototype for all step decorators. This function gets specialized
1023
- and imported for all decorators types by _import_plugin_decorators().
1024
- """
1025
- ...
1026
-
1027
- @typing.overload
1028
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1029
- """
1030
- Specifies the number of times the task corresponding
1031
- to a step needs to be retried.
1032
-
1033
- This decorator is useful for handling transient errors, such as networking issues.
1034
- If your task contains operations that can't be retried safely, e.g. database updates,
1035
- it is advisable to annotate it with `@retry(times=0)`.
1036
-
1037
- This can be used in conjunction with the `@catch` decorator. The `@catch`
1038
- decorator will execute a no-op task after all retries have been exhausted,
1039
- ensuring that the flow execution can continue.
1040
-
1041
- Parameters
1042
- ----------
1043
- times : int, default 3
1044
- Number of times to retry this task.
1045
- minutes_between_retries : int, default 2
1046
- Number of minutes between retries.
1047
- """
1048
- ...
1049
-
1050
- @typing.overload
1051
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1052
- ...
1053
-
1054
- @typing.overload
1055
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1056
- ...
1057
-
1058
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
1059
- """
1060
- Specifies the number of times the task corresponding
1061
- to a step needs to be retried.
1062
-
1063
- This decorator is useful for handling transient errors, such as networking issues.
1064
- If your task contains operations that can't be retried safely, e.g. database updates,
1065
- it is advisable to annotate it with `@retry(times=0)`.
1066
-
1067
- This can be used in conjunction with the `@catch` decorator. The `@catch`
1068
- decorator will execute a no-op task after all retries have been exhausted,
1069
- ensuring that the flow execution can continue.
1070
-
1071
- Parameters
1072
- ----------
1073
- times : int, default 3
1074
- Number of times to retry this task.
1075
- minutes_between_retries : int, default 2
1076
- Number of minutes between retries.
1077
- """
1078
- ...
1079
-
1080
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = "KUBERNETES_IMAGE_PULL_POLICY", service_account: str = "METAFLOW_KUBERNETES_SERVICE_ACCOUNT", secrets: typing.Optional[typing.List[str]] = None, namespace: str = "METAFLOW_KUBERNETES_NAMESPACE", gpu: typing.Optional[int] = None, gpu_vendor: str = "KUBERNETES_GPU_VENDOR", tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = "/metaflow_temp", persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1081
- """
1082
- Specifies that this step should execute on Kubernetes.
1156
+ Specifies that this step should execute on Kubernetes.
1083
1157
 
1084
1158
  Parameters
1085
1159
  ----------
@@ -1137,275 +1211,54 @@ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: ty
1137
1211
  ...
1138
1212
 
1139
1213
  @typing.overload
1140
- def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1141
- """
1142
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
1143
-
1144
- Parameters
1145
- ----------
1146
- cpu : int, default 1
1147
- Number of CPUs required for this step. If `@resources` is
1148
- also present, the maximum value from all decorators is used.
1149
- gpu : int, default 0
1150
- Number of GPUs required for this step. If `@resources` is
1151
- also present, the maximum value from all decorators is used.
1152
- memory : int, default 4096
1153
- Memory size (in MB) required for this step. If
1154
- `@resources` is also present, the maximum value from all decorators is
1155
- used.
1156
- image : str, optional, default None
1157
- Docker image to use when launching on AWS Batch. If not specified, and
1158
- METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
1159
- not, a default Docker image mapping to the current version of Python is used.
1160
- queue : str, default METAFLOW_BATCH_JOB_QUEUE
1161
- AWS Batch Job Queue to submit the job to.
1162
- iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
1163
- AWS IAM role that AWS Batch container uses to access AWS cloud resources.
1164
- execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
1165
- AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
1166
- (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
1167
- shared_memory : int, optional, default None
1168
- The value for the size (in MiB) of the /dev/shm volume for this step.
1169
- This parameter maps to the `--shm-size` option in Docker.
1170
- max_swap : int, optional, default None
1171
- The total amount of swap memory (in MiB) a container can use for this
1172
- step. This parameter is translated to the `--memory-swap` option in
1173
- Docker where the value is the sum of the container memory plus the
1174
- `max_swap` value.
1175
- swappiness : int, optional, default None
1176
- This allows you to tune memory swappiness behavior for this step.
1177
- A swappiness value of 0 causes swapping not to happen unless absolutely
1178
- necessary. A swappiness value of 100 causes pages to be swapped very
1179
- aggressively. Accepted values are whole numbers between 0 and 100.
1180
- use_tmpfs : bool, default False
1181
- This enables an explicit tmpfs mount for this step. Note that tmpfs is
1182
- not available on Fargate compute environments
1183
- tmpfs_tempdir : bool, default True
1184
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1185
- tmpfs_size : int, optional, default None
1186
- The value for the size (in MiB) of the tmpfs mount for this step.
1187
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1188
- memory allocated for this step.
1189
- tmpfs_path : str, optional, default None
1190
- Path to tmpfs mount for this step. Defaults to /metaflow_temp.
1191
- inferentia : int, default 0
1192
- Number of Inferentia chips required for this step.
1193
- trainium : int, default None
1194
- Alias for inferentia. Use only one of the two.
1195
- efa : int, default 0
1196
- Number of elastic fabric adapter network devices to attach to container
1197
- ephemeral_storage : int, default None
1198
- The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
1199
- This is only relevant for Fargate compute environments
1200
- log_driver: str, optional, default None
1201
- The log driver to use for the Amazon ECS container.
1202
- log_options: List[str], optional, default None
1203
- List of strings containing options for the chosen log driver. The configurable values
1204
- depend on the `log driver` chosen. Validation of these options is not supported yet.
1205
- Example: [`awslogs-group:aws/batch/job`]
1206
- """
1207
- ...
1208
-
1209
- @typing.overload
1210
- def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1211
- ...
1212
-
1213
- @typing.overload
1214
- def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1215
- ...
1216
-
1217
- def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
1218
- """
1219
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
1220
-
1221
- Parameters
1222
- ----------
1223
- cpu : int, default 1
1224
- Number of CPUs required for this step. If `@resources` is
1225
- also present, the maximum value from all decorators is used.
1226
- gpu : int, default 0
1227
- Number of GPUs required for this step. If `@resources` is
1228
- also present, the maximum value from all decorators is used.
1229
- memory : int, default 4096
1230
- Memory size (in MB) required for this step. If
1231
- `@resources` is also present, the maximum value from all decorators is
1232
- used.
1233
- image : str, optional, default None
1234
- Docker image to use when launching on AWS Batch. If not specified, and
1235
- METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
1236
- not, a default Docker image mapping to the current version of Python is used.
1237
- queue : str, default METAFLOW_BATCH_JOB_QUEUE
1238
- AWS Batch Job Queue to submit the job to.
1239
- iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
1240
- AWS IAM role that AWS Batch container uses to access AWS cloud resources.
1241
- execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
1242
- AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
1243
- (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
1244
- shared_memory : int, optional, default None
1245
- The value for the size (in MiB) of the /dev/shm volume for this step.
1246
- This parameter maps to the `--shm-size` option in Docker.
1247
- max_swap : int, optional, default None
1248
- The total amount of swap memory (in MiB) a container can use for this
1249
- step. This parameter is translated to the `--memory-swap` option in
1250
- Docker where the value is the sum of the container memory plus the
1251
- `max_swap` value.
1252
- swappiness : int, optional, default None
1253
- This allows you to tune memory swappiness behavior for this step.
1254
- A swappiness value of 0 causes swapping not to happen unless absolutely
1255
- necessary. A swappiness value of 100 causes pages to be swapped very
1256
- aggressively. Accepted values are whole numbers between 0 and 100.
1257
- use_tmpfs : bool, default False
1258
- This enables an explicit tmpfs mount for this step. Note that tmpfs is
1259
- not available on Fargate compute environments
1260
- tmpfs_tempdir : bool, default True
1261
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1262
- tmpfs_size : int, optional, default None
1263
- The value for the size (in MiB) of the tmpfs mount for this step.
1264
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1265
- memory allocated for this step.
1266
- tmpfs_path : str, optional, default None
1267
- Path to tmpfs mount for this step. Defaults to /metaflow_temp.
1268
- inferentia : int, default 0
1269
- Number of Inferentia chips required for this step.
1270
- trainium : int, default None
1271
- Alias for inferentia. Use only one of the two.
1272
- efa : int, default 0
1273
- Number of elastic fabric adapter network devices to attach to container
1274
- ephemeral_storage : int, default None
1275
- The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
1276
- This is only relevant for Fargate compute environments
1277
- log_driver: str, optional, default None
1278
- The log driver to use for the Amazon ECS container.
1279
- log_options: List[str], optional, default None
1280
- List of strings containing options for the chosen log driver. The configurable values
1281
- depend on the `log driver` chosen. Validation of these options is not supported yet.
1282
- Example: [`awslogs-group:aws/batch/job`]
1283
- """
1284
- ...
1285
-
1286
- @typing.overload
1287
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1214
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1288
1215
  """
1289
- Specifies the PyPI packages for the step.
1290
-
1291
- Information in this decorator will augment any
1292
- attributes set in the `@pyi_base` flow-level decorator. Hence,
1293
- you can use `@pypi_base` to set packages required by all
1294
- steps and use `@pypi` to specify step-specific overrides.
1295
-
1296
- Parameters
1297
- ----------
1298
- packages : Dict[str, str], default: {}
1299
- Packages to use for this step. The key is the name of the package
1300
- and the value is the version to use.
1301
- python : str, optional, default: None
1302
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1303
- that the version used will correspond to the version of the Python interpreter used to start the run.
1216
+ Decorator prototype for all step decorators. This function gets specialized
1217
+ and imported for all decorators types by _import_plugin_decorators().
1304
1218
  """
1305
1219
  ...
1306
1220
 
1307
1221
  @typing.overload
1308
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1309
- ...
1310
-
1311
- @typing.overload
1312
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1222
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1313
1223
  ...
1314
1224
 
1315
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1225
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1316
1226
  """
1317
- Specifies the PyPI packages for the step.
1318
-
1319
- Information in this decorator will augment any
1320
- attributes set in the `@pyi_base` flow-level decorator. Hence,
1321
- you can use `@pypi_base` to set packages required by all
1322
- steps and use `@pypi` to specify step-specific overrides.
1323
-
1324
- Parameters
1325
- ----------
1326
- packages : Dict[str, str], default: {}
1327
- Packages to use for this step. The key is the name of the package
1328
- and the value is the version to use.
1329
- python : str, optional, default: None
1330
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1331
- that the version used will correspond to the version of the Python interpreter used to start the run.
1227
+ Decorator prototype for all step decorators. This function gets specialized
1228
+ and imported for all decorators types by _import_plugin_decorators().
1332
1229
  """
1333
1230
  ...
1334
1231
 
1335
1232
  @typing.overload
1336
- def resources(*, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1233
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1337
1234
  """
1338
- Specifies the resources needed when executing this step.
1339
-
1340
- Use `@resources` to specify the resource requirements
1341
- independently of the specific compute layer (`@batch`, `@kubernetes`).
1342
-
1343
- You can choose the compute layer on the command line by executing e.g.
1344
- ```
1345
- python myflow.py run --with batch
1346
- ```
1347
- or
1348
- ```
1349
- python myflow.py run --with kubernetes
1350
- ```
1351
- which executes the flow on the desired system using the
1352
- requirements specified in `@resources`.
1235
+ Specifies secrets to be retrieved and injected as environment variables prior to
1236
+ the execution of a step.
1353
1237
 
1354
1238
  Parameters
1355
1239
  ----------
1356
- cpu : int, default 1
1357
- Number of CPUs required for this step.
1358
- gpu : int, default 0
1359
- Number of GPUs required for this step.
1360
- disk : int, optional, default None
1361
- Disk size (in MB) required for this step. Only applies on Kubernetes.
1362
- memory : int, default 4096
1363
- Memory size (in MB) required for this step.
1364
- shared_memory : int, optional, default None
1365
- The value for the size (in MiB) of the /dev/shm volume for this step.
1366
- This parameter maps to the `--shm-size` option in Docker.
1240
+ sources : List[Union[str, Dict[str, Any]]], default: []
1241
+ List of secret specs, defining how the secrets are to be retrieved
1367
1242
  """
1368
1243
  ...
1369
1244
 
1370
1245
  @typing.overload
1371
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1246
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1372
1247
  ...
1373
1248
 
1374
1249
  @typing.overload
1375
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1250
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1376
1251
  ...
1377
1252
 
1378
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
1253
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
1379
1254
  """
1380
- Specifies the resources needed when executing this step.
1381
-
1382
- Use `@resources` to specify the resource requirements
1383
- independently of the specific compute layer (`@batch`, `@kubernetes`).
1384
-
1385
- You can choose the compute layer on the command line by executing e.g.
1386
- ```
1387
- python myflow.py run --with batch
1388
- ```
1389
- or
1390
- ```
1391
- python myflow.py run --with kubernetes
1392
- ```
1393
- which executes the flow on the desired system using the
1394
- requirements specified in `@resources`.
1255
+ Specifies secrets to be retrieved and injected as environment variables prior to
1256
+ the execution of a step.
1395
1257
 
1396
1258
  Parameters
1397
1259
  ----------
1398
- cpu : int, default 1
1399
- Number of CPUs required for this step.
1400
- gpu : int, default 0
1401
- Number of GPUs required for this step.
1402
- disk : int, optional, default None
1403
- Disk size (in MB) required for this step. Only applies on Kubernetes.
1404
- memory : int, default 4096
1405
- Memory size (in MB) required for this step.
1406
- shared_memory : int, optional, default None
1407
- The value for the size (in MiB) of the /dev/shm volume for this step.
1408
- This parameter maps to the `--shm-size` option in Docker.
1260
+ sources : List[Union[str, Dict[str, Any]]], default: []
1261
+ List of secret specs, defining how the secrets are to be retrieved
1409
1262
  """
1410
1263
  ...
1411
1264
 
@@ -1440,63 +1293,6 @@ def nim(*, models: "list[NIM]", backend: str) -> typing.Callable[[typing.Type[Fl
1440
1293
  """
1441
1294
  ...
1442
1295
 
1443
- def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1444
- """
1445
- Specifies what flows belong to the same project.
1446
-
1447
- A project-specific namespace is created for all flows that
1448
- use the same `@project(name)`.
1449
-
1450
- Parameters
1451
- ----------
1452
- name : str
1453
- Project name. Make sure that the name is unique amongst all
1454
- projects that use the same production scheduler. The name may
1455
- contain only lowercase alphanumeric characters and underscores.
1456
-
1457
-
1458
- """
1459
- ...
1460
-
1461
- @typing.overload
1462
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1463
- """
1464
- Specifies the PyPI packages for all steps of the flow.
1465
-
1466
- Use `@pypi_base` to set common packages required by all
1467
- steps and use `@pypi` to specify step-specific overrides.
1468
- Parameters
1469
- ----------
1470
- packages : Dict[str, str], default: {}
1471
- Packages to use for this flow. The key is the name of the package
1472
- and the value is the version to use.
1473
- python : str, optional, default: None
1474
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1475
- that the version used will correspond to the version of the Python interpreter used to start the run.
1476
- """
1477
- ...
1478
-
1479
- @typing.overload
1480
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1481
- ...
1482
-
1483
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1484
- """
1485
- Specifies the PyPI packages for all steps of the flow.
1486
-
1487
- Use `@pypi_base` to set common packages required by all
1488
- steps and use `@pypi` to specify step-specific overrides.
1489
- Parameters
1490
- ----------
1491
- packages : Dict[str, str], default: {}
1492
- Packages to use for this flow. The key is the name of the package
1493
- and the value is the version to use.
1494
- python : str, optional, default: None
1495
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1496
- that the version used will correspond to the version of the Python interpreter used to start the run.
1497
- """
1498
- ...
1499
-
1500
1296
  def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1501
1297
  """
1502
1298
  The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
@@ -1539,6 +1335,48 @@ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, expone
1539
1335
  """
1540
1336
  ...
1541
1337
 
1338
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1339
+ """
1340
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1341
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1342
+
1343
+ Parameters
1344
+ ----------
1345
+ timeout : int
1346
+ Time, in seconds before the task times out and fails. (Default: 3600)
1347
+ poke_interval : int
1348
+ Time in seconds that the job should wait in between each try. (Default: 60)
1349
+ mode : str
1350
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1351
+ exponential_backoff : bool
1352
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1353
+ pool : str
1354
+ the slot pool this task should run in,
1355
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1356
+ soft_fail : bool
1357
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1358
+ name : str
1359
+ Name of the sensor on Airflow
1360
+ description : str
1361
+ Description of sensor in the Airflow UI
1362
+ external_dag_id : str
1363
+ The dag_id that contains the task you want to wait for.
1364
+ external_task_ids : List[str]
1365
+ The list of task_ids that you want to wait for.
1366
+ If None (default value) the sensor waits for the DAG. (Default: None)
1367
+ allowed_states : List[str]
1368
+ Iterable of allowed states, (Default: ['success'])
1369
+ failed_states : List[str]
1370
+ Iterable of failed or dis-allowed states. (Default: None)
1371
+ execution_delta : datetime.timedelta
1372
+ time difference with the previous execution to look at,
1373
+ the default is the same logical date as the current task or DAG. (Default: None)
1374
+ check_existence: bool
1375
+ Set to True to check if the external task exists or check if
1376
+ the DAG to wait for exists. (Default: True)
1377
+ """
1378
+ ...
1379
+
1542
1380
  @typing.overload
1543
1381
  def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1544
1382
  """
@@ -1589,44 +1427,48 @@ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly:
1589
1427
  ...
1590
1428
 
1591
1429
  @typing.overload
1592
- def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1430
+ def trigger_on_finish(*, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1593
1431
  """
1594
- Specifies the event(s) that this flow depends on.
1432
+ Specifies the flow(s) that this flow depends on.
1595
1433
 
1596
1434
  ```
1597
- @trigger(event='foo')
1435
+ @trigger_on_finish(flow='FooFlow')
1598
1436
  ```
1599
1437
  or
1600
1438
  ```
1601
- @trigger(events=['foo', 'bar'])
1439
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1602
1440
  ```
1441
+ This decorator respects the @project decorator and triggers the flow
1442
+ when upstream runs within the same namespace complete successfully
1603
1443
 
1604
- Additionally, you can specify the parameter mappings
1605
- to map event payload to Metaflow parameters for the flow.
1444
+ Additionally, you can specify project aware upstream flow dependencies
1445
+ by specifying the fully qualified project_flow_name.
1606
1446
  ```
1607
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1447
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1608
1448
  ```
1609
1449
  or
1610
1450
  ```
1611
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1612
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1451
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1613
1452
  ```
1614
1453
 
1615
- 'parameters' can also be a list of strings and tuples like so:
1616
- ```
1617
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1618
- ```
1619
- This is equivalent to:
1454
+ You can also specify just the project or project branch (other values will be
1455
+ inferred from the current project or project branch):
1620
1456
  ```
1621
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1457
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1622
1458
  ```
1623
1459
 
1460
+ Note that `branch` is typically one of:
1461
+ - `prod`
1462
+ - `user.bob`
1463
+ - `test.my_experiment`
1464
+ - `prod.staging`
1465
+
1624
1466
  Parameters
1625
1467
  ----------
1626
- event : Union[str, Dict[str, Any]], optional, default None
1627
- Event dependency for this flow.
1628
- events : List[Union[str, Dict[str, Any]]], default []
1629
- Events dependency for this flow.
1468
+ flow : Union[str, Dict[str, str]], optional, default None
1469
+ Upstream flow dependency for this flow.
1470
+ flows : List[Union[str, Dict[str, str]]], default []
1471
+ Upstream flow dependencies for this flow.
1630
1472
  options : Dict[str, Any], default {}
1631
1473
  Backend-specific configuration for tuning eventing behavior.
1632
1474
 
@@ -1635,47 +1477,51 @@ def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = No
1635
1477
  ...
1636
1478
 
1637
1479
  @typing.overload
1638
- def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1480
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1639
1481
  ...
1640
1482
 
1641
- def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1483
+ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1642
1484
  """
1643
- Specifies the event(s) that this flow depends on.
1485
+ Specifies the flow(s) that this flow depends on.
1644
1486
 
1645
1487
  ```
1646
- @trigger(event='foo')
1488
+ @trigger_on_finish(flow='FooFlow')
1647
1489
  ```
1648
1490
  or
1649
1491
  ```
1650
- @trigger(events=['foo', 'bar'])
1492
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1651
1493
  ```
1494
+ This decorator respects the @project decorator and triggers the flow
1495
+ when upstream runs within the same namespace complete successfully
1652
1496
 
1653
- Additionally, you can specify the parameter mappings
1654
- to map event payload to Metaflow parameters for the flow.
1497
+ Additionally, you can specify project aware upstream flow dependencies
1498
+ by specifying the fully qualified project_flow_name.
1655
1499
  ```
1656
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1500
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1657
1501
  ```
1658
1502
  or
1659
1503
  ```
1660
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1661
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1504
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1662
1505
  ```
1663
1506
 
1664
- 'parameters' can also be a list of strings and tuples like so:
1665
- ```
1666
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1667
- ```
1668
- This is equivalent to:
1507
+ You can also specify just the project or project branch (other values will be
1508
+ inferred from the current project or project branch):
1669
1509
  ```
1670
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1510
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1671
1511
  ```
1672
1512
 
1513
+ Note that `branch` is typically one of:
1514
+ - `prod`
1515
+ - `user.bob`
1516
+ - `test.my_experiment`
1517
+ - `prod.staging`
1518
+
1673
1519
  Parameters
1674
1520
  ----------
1675
- event : Union[str, Dict[str, Any]], optional, default None
1676
- Event dependency for this flow.
1677
- events : List[Union[str, Dict[str, Any]]], default []
1678
- Events dependency for this flow.
1521
+ flow : Union[str, Dict[str, str]], optional, default None
1522
+ Upstream flow dependency for this flow.
1523
+ flows : List[Union[str, Dict[str, str]]], default []
1524
+ Upstream flow dependencies for this flow.
1679
1525
  options : Dict[str, Any], default {}
1680
1526
  Backend-specific configuration for tuning eventing behavior.
1681
1527
 
@@ -1683,45 +1529,60 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
1683
1529
  """
1684
1530
  ...
1685
1531
 
1686
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1532
+ def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1687
1533
  """
1688
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1689
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1534
+ Specifies what flows belong to the same project.
1535
+
1536
+ A project-specific namespace is created for all flows that
1537
+ use the same `@project(name)`.
1690
1538
 
1691
1539
  Parameters
1692
1540
  ----------
1693
- timeout : int
1694
- Time, in seconds before the task times out and fails. (Default: 3600)
1695
- poke_interval : int
1696
- Time in seconds that the job should wait in between each try. (Default: 60)
1697
- mode : str
1698
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1699
- exponential_backoff : bool
1700
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1701
- pool : str
1702
- the slot pool this task should run in,
1703
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1704
- soft_fail : bool
1705
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1706
1541
  name : str
1707
- Name of the sensor on Airflow
1708
- description : str
1709
- Description of sensor in the Airflow UI
1710
- external_dag_id : str
1711
- The dag_id that contains the task you want to wait for.
1712
- external_task_ids : List[str]
1713
- The list of task_ids that you want to wait for.
1714
- If None (default value) the sensor waits for the DAG. (Default: None)
1715
- allowed_states : List[str]
1716
- Iterable of allowed states, (Default: ['success'])
1717
- failed_states : List[str]
1718
- Iterable of failed or dis-allowed states. (Default: None)
1719
- execution_delta : datetime.timedelta
1720
- time difference with the previous execution to look at,
1721
- the default is the same logical date as the current task or DAG. (Default: None)
1722
- check_existence: bool
1723
- Set to True to check if the external task exists or check if
1724
- the DAG to wait for exists. (Default: True)
1542
+ Project name. Make sure that the name is unique amongst all
1543
+ projects that use the same production scheduler. The name may
1544
+ contain only lowercase alphanumeric characters and underscores.
1545
+
1546
+
1547
+ """
1548
+ ...
1549
+
1550
+ @typing.overload
1551
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1552
+ """
1553
+ Specifies the PyPI packages for all steps of the flow.
1554
+
1555
+ Use `@pypi_base` to set common packages required by all
1556
+ steps and use `@pypi` to specify step-specific overrides.
1557
+ Parameters
1558
+ ----------
1559
+ packages : Dict[str, str], default: {}
1560
+ Packages to use for this flow. The key is the name of the package
1561
+ and the value is the version to use.
1562
+ python : str, optional, default: None
1563
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1564
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1565
+ """
1566
+ ...
1567
+
1568
+ @typing.overload
1569
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1570
+ ...
1571
+
1572
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1573
+ """
1574
+ Specifies the PyPI packages for all steps of the flow.
1575
+
1576
+ Use `@pypi_base` to set common packages required by all
1577
+ steps and use `@pypi` to specify step-specific overrides.
1578
+ Parameters
1579
+ ----------
1580
+ packages : Dict[str, str], default: {}
1581
+ Packages to use for this flow. The key is the name of the package
1582
+ and the value is the version to use.
1583
+ python : str, optional, default: None
1584
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1585
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1725
1586
  """
1726
1587
  ...
1727
1588
 
@@ -1775,48 +1636,44 @@ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packa
1775
1636
  ...
1776
1637
 
1777
1638
  @typing.overload
1778
- def trigger_on_finish(*, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1639
+ def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1779
1640
  """
1780
- Specifies the flow(s) that this flow depends on.
1641
+ Specifies the event(s) that this flow depends on.
1781
1642
 
1782
1643
  ```
1783
- @trigger_on_finish(flow='FooFlow')
1644
+ @trigger(event='foo')
1784
1645
  ```
1785
1646
  or
1786
1647
  ```
1787
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1648
+ @trigger(events=['foo', 'bar'])
1788
1649
  ```
1789
- This decorator respects the @project decorator and triggers the flow
1790
- when upstream runs within the same namespace complete successfully
1791
1650
 
1792
- Additionally, you can specify project aware upstream flow dependencies
1793
- by specifying the fully qualified project_flow_name.
1651
+ Additionally, you can specify the parameter mappings
1652
+ to map event payload to Metaflow parameters for the flow.
1794
1653
  ```
1795
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1654
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1796
1655
  ```
1797
1656
  or
1798
1657
  ```
1799
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1658
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1659
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1800
1660
  ```
1801
1661
 
1802
- You can also specify just the project or project branch (other values will be
1803
- inferred from the current project or project branch):
1662
+ 'parameters' can also be a list of strings and tuples like so:
1804
1663
  ```
1805
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1664
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1665
+ ```
1666
+ This is equivalent to:
1667
+ ```
1668
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1806
1669
  ```
1807
-
1808
- Note that `branch` is typically one of:
1809
- - `prod`
1810
- - `user.bob`
1811
- - `test.my_experiment`
1812
- - `prod.staging`
1813
1670
 
1814
1671
  Parameters
1815
1672
  ----------
1816
- flow : Union[str, Dict[str, str]], optional, default None
1817
- Upstream flow dependency for this flow.
1818
- flows : List[Union[str, Dict[str, str]]], default []
1819
- Upstream flow dependencies for this flow.
1673
+ event : Union[str, Dict[str, Any]], optional, default None
1674
+ Event dependency for this flow.
1675
+ events : List[Union[str, Dict[str, Any]]], default []
1676
+ Events dependency for this flow.
1820
1677
  options : Dict[str, Any], default {}
1821
1678
  Backend-specific configuration for tuning eventing behavior.
1822
1679
 
@@ -1825,51 +1682,47 @@ def trigger_on_finish(*, flow: typing.Union[str, typing.Dict[str, str], None] =
1825
1682
  ...
1826
1683
 
1827
1684
  @typing.overload
1828
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1685
+ def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1829
1686
  ...
1830
1687
 
1831
- def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1688
+ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1832
1689
  """
1833
- Specifies the flow(s) that this flow depends on.
1690
+ Specifies the event(s) that this flow depends on.
1834
1691
 
1835
1692
  ```
1836
- @trigger_on_finish(flow='FooFlow')
1693
+ @trigger(event='foo')
1837
1694
  ```
1838
1695
  or
1839
1696
  ```
1840
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1697
+ @trigger(events=['foo', 'bar'])
1841
1698
  ```
1842
- This decorator respects the @project decorator and triggers the flow
1843
- when upstream runs within the same namespace complete successfully
1844
1699
 
1845
- Additionally, you can specify project aware upstream flow dependencies
1846
- by specifying the fully qualified project_flow_name.
1700
+ Additionally, you can specify the parameter mappings
1701
+ to map event payload to Metaflow parameters for the flow.
1847
1702
  ```
1848
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1703
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1849
1704
  ```
1850
1705
  or
1851
1706
  ```
1852
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1707
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1708
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1853
1709
  ```
1854
1710
 
1855
- You can also specify just the project or project branch (other values will be
1856
- inferred from the current project or project branch):
1711
+ 'parameters' can also be a list of strings and tuples like so:
1857
1712
  ```
1858
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1713
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1714
+ ```
1715
+ This is equivalent to:
1716
+ ```
1717
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1859
1718
  ```
1860
-
1861
- Note that `branch` is typically one of:
1862
- - `prod`
1863
- - `user.bob`
1864
- - `test.my_experiment`
1865
- - `prod.staging`
1866
1719
 
1867
1720
  Parameters
1868
1721
  ----------
1869
- flow : Union[str, Dict[str, str]], optional, default None
1870
- Upstream flow dependency for this flow.
1871
- flows : List[Union[str, Dict[str, str]]], default []
1872
- Upstream flow dependencies for this flow.
1722
+ event : Union[str, Dict[str, Any]], optional, default None
1723
+ Event dependency for this flow.
1724
+ events : List[Union[str, Dict[str, Any]]], default []
1725
+ Events dependency for this flow.
1873
1726
  options : Dict[str, Any], default {}
1874
1727
  Backend-specific configuration for tuning eventing behavior.
1875
1728