metaflow-stubs 2.12.11__py2.py3-none-any.whl → 2.12.12__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (150) hide show
  1. metaflow-stubs/__init__.pyi +450 -450
  2. metaflow-stubs/cards.pyi +6 -6
  3. metaflow-stubs/cli.pyi +2 -2
  4. metaflow-stubs/client/__init__.pyi +3 -3
  5. metaflow-stubs/client/core.pyi +7 -7
  6. metaflow-stubs/client/filecache.pyi +2 -2
  7. metaflow-stubs/clone_util.pyi +2 -2
  8. metaflow-stubs/events.pyi +3 -3
  9. metaflow-stubs/exception.pyi +2 -2
  10. metaflow-stubs/flowspec.pyi +6 -6
  11. metaflow-stubs/generated_for.txt +1 -1
  12. metaflow-stubs/includefile.pyi +4 -4
  13. metaflow-stubs/metadata/metadata.pyi +2 -2
  14. metaflow-stubs/metadata/util.pyi +2 -2
  15. metaflow-stubs/metaflow_config.pyi +2 -2
  16. metaflow-stubs/metaflow_current.pyi +22 -22
  17. metaflow-stubs/mflog/mflog.pyi +2 -2
  18. metaflow-stubs/multicore_utils.pyi +2 -2
  19. metaflow-stubs/parameters.pyi +3 -3
  20. metaflow-stubs/plugins/__init__.pyi +2 -2
  21. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  22. metaflow-stubs/plugins/airflow/airflow.pyi +3 -3
  23. metaflow-stubs/plugins/airflow/airflow_cli.pyi +2 -2
  24. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
  25. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  26. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  27. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  28. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
  29. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +3 -3
  30. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +3 -3
  31. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  32. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  33. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  34. metaflow-stubs/plugins/argo/argo_workflows.pyi +5 -5
  35. metaflow-stubs/plugins/argo/argo_workflows_cli.pyi +6 -6
  36. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +7 -7
  37. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +3 -3
  38. metaflow-stubs/plugins/aws/__init__.pyi +2 -2
  39. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  40. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  41. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  42. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  43. metaflow-stubs/plugins/aws/batch/batch_cli.pyi +2 -2
  44. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  45. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
  46. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  47. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +3 -3
  48. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  49. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
  50. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  51. metaflow-stubs/plugins/aws/step_functions/production_token.pyi +2 -2
  52. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  53. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  54. metaflow-stubs/plugins/aws/step_functions/step_functions_cli.pyi +3 -3
  55. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  56. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
  57. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +3 -3
  58. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  59. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  60. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  61. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +3 -3
  62. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  63. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  64. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  65. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  66. metaflow-stubs/plugins/cards/card_cli.pyi +5 -5
  67. metaflow-stubs/plugins/cards/card_client.pyi +4 -4
  68. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  69. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  70. metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
  71. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  72. metaflow-stubs/plugins/cards/card_modules/basic.pyi +4 -4
  73. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  74. metaflow-stubs/plugins/cards/card_modules/chevron/__init__.pyi +2 -2
  75. metaflow-stubs/plugins/cards/card_modules/chevron/main.pyi +2 -2
  76. metaflow-stubs/plugins/cards/card_modules/chevron/metadata.pyi +2 -2
  77. metaflow-stubs/plugins/cards/card_modules/chevron/renderer.pyi +2 -2
  78. metaflow-stubs/plugins/cards/card_modules/chevron/tokenizer.pyi +2 -2
  79. metaflow-stubs/plugins/cards/card_modules/components.pyi +5 -5
  80. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  81. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  82. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +3 -3
  83. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  84. metaflow-stubs/plugins/cards/component_serializer.pyi +4 -4
  85. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  86. metaflow-stubs/plugins/catch_decorator.pyi +2 -2
  87. metaflow-stubs/plugins/datatools/__init__.pyi +3 -3
  88. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  89. metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
  90. metaflow-stubs/plugins/datatools/s3/s3.pyi +6 -6
  91. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  92. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  93. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  94. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  95. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  96. metaflow-stubs/plugins/events_decorator.pyi +2 -2
  97. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  98. metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
  99. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  100. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +3 -3
  101. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  102. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  103. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  104. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  105. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  106. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
  107. metaflow-stubs/plugins/kubernetes/kubernetes_cli.pyi +3 -3
  108. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  109. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
  110. metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +2 -2
  111. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
  112. metaflow-stubs/plugins/logs_cli.pyi +3 -3
  113. metaflow-stubs/plugins/package_cli.pyi +2 -2
  114. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  115. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  116. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  117. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  118. metaflow-stubs/plugins/pypi/conda_environment.pyi +4 -4
  119. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  120. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  121. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  122. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  123. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  124. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  125. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +2 -2
  126. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  127. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  128. metaflow-stubs/plugins/tag_cli.pyi +5 -5
  129. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +3 -3
  130. metaflow-stubs/plugins/timeout_decorator.pyi +2 -2
  131. metaflow-stubs/procpoll.pyi +2 -2
  132. metaflow-stubs/pylint_wrapper.pyi +2 -2
  133. metaflow-stubs/runner/__init__.pyi +2 -2
  134. metaflow-stubs/runner/deployer.pyi +3 -3
  135. metaflow-stubs/runner/metaflow_runner.pyi +5 -5
  136. metaflow-stubs/runner/nbdeploy.pyi +2 -2
  137. metaflow-stubs/runner/nbrun.pyi +2 -2
  138. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  139. metaflow-stubs/runner/utils.pyi +2 -2
  140. metaflow-stubs/system/__init__.pyi +4 -4
  141. metaflow-stubs/system/system_logger.pyi +3 -3
  142. metaflow-stubs/system/system_monitor.pyi +3 -3
  143. metaflow-stubs/tagging_util.pyi +2 -2
  144. metaflow-stubs/tuple_util.pyi +2 -2
  145. metaflow-stubs/version.pyi +2 -2
  146. {metaflow_stubs-2.12.11.dist-info → metaflow_stubs-2.12.12.dist-info}/METADATA +2 -2
  147. metaflow_stubs-2.12.12.dist-info/RECORD +150 -0
  148. {metaflow_stubs-2.12.11.dist-info → metaflow_stubs-2.12.12.dist-info}/WHEEL +1 -1
  149. metaflow_stubs-2.12.11.dist-info/RECORD +0 -150
  150. {metaflow_stubs-2.12.11.dist-info → metaflow_stubs-2.12.12.dist-info}/top_level.txt +0 -0
@@ -1,25 +1,25 @@
1
1
  ##################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.12.11 #
4
- # Generated on 2024-08-09T01:05:47.258349 #
3
+ # MF version: 2.12.12 #
4
+ # Generated on 2024-08-13T23:49:26.963512 #
5
5
  ##################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import io
12
- import metaflow.metaflow_current
13
- import typing
14
- import metaflow.flowspec
15
11
  import metaflow.runner.metaflow_runner
16
- import metaflow.datastore.inputs
17
- import metaflow._vendor.click.types
12
+ import metaflow.plugins.datatools.s3.s3
13
+ import io
14
+ import metaflow.client.core
18
15
  import metaflow.events
19
16
  import metaflow.parameters
20
- import metaflow.plugins.datatools.s3.s3
17
+ import typing
18
+ import metaflow.metaflow_current
21
19
  import datetime
22
- import metaflow.client.core
20
+ import metaflow.datastore.inputs
21
+ import metaflow._vendor.click.types
22
+ import metaflow.flowspec
23
23
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
24
24
  StepFlag = typing.NewType("StepFlag", bool)
25
25
 
@@ -727,147 +727,6 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
727
727
  """
728
728
  ...
729
729
 
730
- @typing.overload
731
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
732
- """
733
- Specifies environment variables to be set prior to the execution of a step.
734
-
735
- Parameters
736
- ----------
737
- vars : Dict[str, str], default {}
738
- Dictionary of environment variables to set.
739
- """
740
- ...
741
-
742
- @typing.overload
743
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
744
- ...
745
-
746
- @typing.overload
747
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
748
- ...
749
-
750
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
751
- """
752
- Specifies environment variables to be set prior to the execution of a step.
753
-
754
- Parameters
755
- ----------
756
- vars : Dict[str, str], default {}
757
- Dictionary of environment variables to set.
758
- """
759
- ...
760
-
761
- @typing.overload
762
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
763
- """
764
- Specifies the Conda environment for the step.
765
-
766
- Information in this decorator will augment any
767
- attributes set in the `@conda_base` flow-level decorator. Hence,
768
- you can use `@conda_base` to set packages required by all
769
- steps and use `@conda` to specify step-specific overrides.
770
-
771
- Parameters
772
- ----------
773
- packages : Dict[str, str], default {}
774
- Packages to use for this step. The key is the name of the package
775
- and the value is the version to use.
776
- libraries : Dict[str, str], default {}
777
- Supported for backward compatibility. When used with packages, packages will take precedence.
778
- python : str, optional, default None
779
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
780
- that the version used will correspond to the version of the Python interpreter used to start the run.
781
- disabled : bool, default False
782
- If set to True, disables @conda.
783
- """
784
- ...
785
-
786
- @typing.overload
787
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
788
- ...
789
-
790
- @typing.overload
791
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
792
- ...
793
-
794
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
795
- """
796
- Specifies the Conda environment for the step.
797
-
798
- Information in this decorator will augment any
799
- attributes set in the `@conda_base` flow-level decorator. Hence,
800
- you can use `@conda_base` to set packages required by all
801
- steps and use `@conda` to specify step-specific overrides.
802
-
803
- Parameters
804
- ----------
805
- packages : Dict[str, str], default {}
806
- Packages to use for this step. The key is the name of the package
807
- and the value is the version to use.
808
- libraries : Dict[str, str], default {}
809
- Supported for backward compatibility. When used with packages, packages will take precedence.
810
- python : str, optional, default None
811
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
812
- that the version used will correspond to the version of the Python interpreter used to start the run.
813
- disabled : bool, default False
814
- If set to True, disables @conda.
815
- """
816
- ...
817
-
818
- @typing.overload
819
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
820
- """
821
- Specifies the number of times the task corresponding
822
- to a step needs to be retried.
823
-
824
- This decorator is useful for handling transient errors, such as networking issues.
825
- If your task contains operations that can't be retried safely, e.g. database updates,
826
- it is advisable to annotate it with `@retry(times=0)`.
827
-
828
- This can be used in conjunction with the `@catch` decorator. The `@catch`
829
- decorator will execute a no-op task after all retries have been exhausted,
830
- ensuring that the flow execution can continue.
831
-
832
- Parameters
833
- ----------
834
- times : int, default 3
835
- Number of times to retry this task.
836
- minutes_between_retries : int, default 2
837
- Number of minutes between retries.
838
- """
839
- ...
840
-
841
- @typing.overload
842
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
843
- ...
844
-
845
- @typing.overload
846
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
847
- ...
848
-
849
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
850
- """
851
- Specifies the number of times the task corresponding
852
- to a step needs to be retried.
853
-
854
- This decorator is useful for handling transient errors, such as networking issues.
855
- If your task contains operations that can't be retried safely, e.g. database updates,
856
- it is advisable to annotate it with `@retry(times=0)`.
857
-
858
- This can be used in conjunction with the `@catch` decorator. The `@catch`
859
- decorator will execute a no-op task after all retries have been exhausted,
860
- ensuring that the flow execution can continue.
861
-
862
- Parameters
863
- ----------
864
- times : int, default 3
865
- Number of times to retry this task.
866
- minutes_between_retries : int, default 2
867
- Number of minutes between retries.
868
- """
869
- ...
870
-
871
730
  @typing.overload
872
731
  def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
873
732
  """
@@ -1016,59 +875,92 @@ def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
1016
875
  ...
1017
876
 
1018
877
  @typing.overload
1019
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
878
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1020
879
  """
1021
- Specifies a timeout for your step.
1022
-
1023
- This decorator is useful if this step may hang indefinitely.
1024
-
1025
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1026
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
1027
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
880
+ Specifies the Conda environment for the step.
1028
881
 
1029
- Note that all the values specified in parameters are added together so if you specify
1030
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
882
+ Information in this decorator will augment any
883
+ attributes set in the `@conda_base` flow-level decorator. Hence,
884
+ you can use `@conda_base` to set packages required by all
885
+ steps and use `@conda` to specify step-specific overrides.
1031
886
 
1032
887
  Parameters
1033
888
  ----------
1034
- seconds : int, default 0
1035
- Number of seconds to wait prior to timing out.
1036
- minutes : int, default 0
1037
- Number of minutes to wait prior to timing out.
1038
- hours : int, default 0
1039
- Number of hours to wait prior to timing out.
889
+ packages : Dict[str, str], default {}
890
+ Packages to use for this step. The key is the name of the package
891
+ and the value is the version to use.
892
+ libraries : Dict[str, str], default {}
893
+ Supported for backward compatibility. When used with packages, packages will take precedence.
894
+ python : str, optional, default None
895
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
896
+ that the version used will correspond to the version of the Python interpreter used to start the run.
897
+ disabled : bool, default False
898
+ If set to True, disables @conda.
1040
899
  """
1041
900
  ...
1042
901
 
1043
902
  @typing.overload
1044
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
903
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1045
904
  ...
1046
905
 
1047
906
  @typing.overload
1048
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
907
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1049
908
  ...
1050
909
 
1051
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
910
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1052
911
  """
1053
- Specifies a timeout for your step.
912
+ Specifies the Conda environment for the step.
1054
913
 
1055
- This decorator is useful if this step may hang indefinitely.
914
+ Information in this decorator will augment any
915
+ attributes set in the `@conda_base` flow-level decorator. Hence,
916
+ you can use `@conda_base` to set packages required by all
917
+ steps and use `@conda` to specify step-specific overrides.
1056
918
 
1057
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1058
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
1059
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
919
+ Parameters
920
+ ----------
921
+ packages : Dict[str, str], default {}
922
+ Packages to use for this step. The key is the name of the package
923
+ and the value is the version to use.
924
+ libraries : Dict[str, str], default {}
925
+ Supported for backward compatibility. When used with packages, packages will take precedence.
926
+ python : str, optional, default None
927
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
928
+ that the version used will correspond to the version of the Python interpreter used to start the run.
929
+ disabled : bool, default False
930
+ If set to True, disables @conda.
931
+ """
932
+ ...
933
+
934
+ @typing.overload
935
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
936
+ """
937
+ Specifies secrets to be retrieved and injected as environment variables prior to
938
+ the execution of a step.
1060
939
 
1061
- Note that all the values specified in parameters are added together so if you specify
1062
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
940
+ Parameters
941
+ ----------
942
+ sources : List[Union[str, Dict[str, Any]]], default: []
943
+ List of secret specs, defining how the secrets are to be retrieved
944
+ """
945
+ ...
946
+
947
+ @typing.overload
948
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
949
+ ...
950
+
951
+ @typing.overload
952
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
953
+ ...
954
+
955
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
956
+ """
957
+ Specifies secrets to be retrieved and injected as environment variables prior to
958
+ the execution of a step.
1063
959
 
1064
960
  Parameters
1065
961
  ----------
1066
- seconds : int, default 0
1067
- Number of seconds to wait prior to timing out.
1068
- minutes : int, default 0
1069
- Number of minutes to wait prior to timing out.
1070
- hours : int, default 0
1071
- Number of hours to wait prior to timing out.
962
+ sources : List[Union[str, Dict[str, Any]]], default: []
963
+ List of secret specs, defining how the secrets are to be retrieved
1072
964
  """
1073
965
  ...
1074
966
 
@@ -1150,7 +1042,167 @@ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None]
1150
1042
  ...
1151
1043
 
1152
1044
  @typing.overload
1153
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1045
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1046
+ """
1047
+ Specifies a timeout for your step.
1048
+
1049
+ This decorator is useful if this step may hang indefinitely.
1050
+
1051
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1052
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
1053
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
1054
+
1055
+ Note that all the values specified in parameters are added together so if you specify
1056
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1057
+
1058
+ Parameters
1059
+ ----------
1060
+ seconds : int, default 0
1061
+ Number of seconds to wait prior to timing out.
1062
+ minutes : int, default 0
1063
+ Number of minutes to wait prior to timing out.
1064
+ hours : int, default 0
1065
+ Number of hours to wait prior to timing out.
1066
+ """
1067
+ ...
1068
+
1069
+ @typing.overload
1070
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1071
+ ...
1072
+
1073
+ @typing.overload
1074
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1075
+ ...
1076
+
1077
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
1078
+ """
1079
+ Specifies a timeout for your step.
1080
+
1081
+ This decorator is useful if this step may hang indefinitely.
1082
+
1083
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1084
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
1085
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
1086
+
1087
+ Note that all the values specified in parameters are added together so if you specify
1088
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1089
+
1090
+ Parameters
1091
+ ----------
1092
+ seconds : int, default 0
1093
+ Number of seconds to wait prior to timing out.
1094
+ minutes : int, default 0
1095
+ Number of minutes to wait prior to timing out.
1096
+ hours : int, default 0
1097
+ Number of hours to wait prior to timing out.
1098
+ """
1099
+ ...
1100
+
1101
+ @typing.overload
1102
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1103
+ """
1104
+ Specifies environment variables to be set prior to the execution of a step.
1105
+
1106
+ Parameters
1107
+ ----------
1108
+ vars : Dict[str, str], default {}
1109
+ Dictionary of environment variables to set.
1110
+ """
1111
+ ...
1112
+
1113
+ @typing.overload
1114
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1115
+ ...
1116
+
1117
+ @typing.overload
1118
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1119
+ ...
1120
+
1121
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
1122
+ """
1123
+ Specifies environment variables to be set prior to the execution of a step.
1124
+
1125
+ Parameters
1126
+ ----------
1127
+ vars : Dict[str, str], default {}
1128
+ Dictionary of environment variables to set.
1129
+ """
1130
+ ...
1131
+
1132
+ @typing.overload
1133
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1134
+ """
1135
+ Specifies the number of times the task corresponding
1136
+ to a step needs to be retried.
1137
+
1138
+ This decorator is useful for handling transient errors, such as networking issues.
1139
+ If your task contains operations that can't be retried safely, e.g. database updates,
1140
+ it is advisable to annotate it with `@retry(times=0)`.
1141
+
1142
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
1143
+ decorator will execute a no-op task after all retries have been exhausted,
1144
+ ensuring that the flow execution can continue.
1145
+
1146
+ Parameters
1147
+ ----------
1148
+ times : int, default 3
1149
+ Number of times to retry this task.
1150
+ minutes_between_retries : int, default 2
1151
+ Number of minutes between retries.
1152
+ """
1153
+ ...
1154
+
1155
+ @typing.overload
1156
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1157
+ ...
1158
+
1159
+ @typing.overload
1160
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1161
+ ...
1162
+
1163
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
1164
+ """
1165
+ Specifies the number of times the task corresponding
1166
+ to a step needs to be retried.
1167
+
1168
+ This decorator is useful for handling transient errors, such as networking issues.
1169
+ If your task contains operations that can't be retried safely, e.g. database updates,
1170
+ it is advisable to annotate it with `@retry(times=0)`.
1171
+
1172
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
1173
+ decorator will execute a no-op task after all retries have been exhausted,
1174
+ ensuring that the flow execution can continue.
1175
+
1176
+ Parameters
1177
+ ----------
1178
+ times : int, default 3
1179
+ Number of times to retry this task.
1180
+ minutes_between_retries : int, default 2
1181
+ Number of minutes between retries.
1182
+ """
1183
+ ...
1184
+
1185
+ @typing.overload
1186
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1187
+ """
1188
+ Decorator prototype for all step decorators. This function gets specialized
1189
+ and imported for all decorators types by _import_plugin_decorators().
1190
+ """
1191
+ ...
1192
+
1193
+ @typing.overload
1194
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1195
+ ...
1196
+
1197
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1198
+ """
1199
+ Decorator prototype for all step decorators. This function gets specialized
1200
+ and imported for all decorators types by _import_plugin_decorators().
1201
+ """
1202
+ ...
1203
+
1204
+ @typing.overload
1205
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1154
1206
  """
1155
1207
  Specifies that the step will success under all circumstances.
1156
1208
 
@@ -1249,6 +1301,55 @@ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
1249
1301
  """
1250
1302
  ...
1251
1303
 
1304
+ @typing.overload
1305
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1306
+ """
1307
+ Specifies the PyPI packages for the step.
1308
+
1309
+ Information in this decorator will augment any
1310
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
1311
+ you can use `@pypi_base` to set packages required by all
1312
+ steps and use `@pypi` to specify step-specific overrides.
1313
+
1314
+ Parameters
1315
+ ----------
1316
+ packages : Dict[str, str], default: {}
1317
+ Packages to use for this step. The key is the name of the package
1318
+ and the value is the version to use.
1319
+ python : str, optional, default: None
1320
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1321
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1322
+ """
1323
+ ...
1324
+
1325
+ @typing.overload
1326
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1327
+ ...
1328
+
1329
+ @typing.overload
1330
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1331
+ ...
1332
+
1333
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1334
+ """
1335
+ Specifies the PyPI packages for the step.
1336
+
1337
+ Information in this decorator will augment any
1338
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
1339
+ you can use `@pypi_base` to set packages required by all
1340
+ steps and use `@pypi` to specify step-specific overrides.
1341
+
1342
+ Parameters
1343
+ ----------
1344
+ packages : Dict[str, str], default: {}
1345
+ Packages to use for this step. The key is the name of the package
1346
+ and the value is the version to use.
1347
+ python : str, optional, default: None
1348
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1349
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1350
+ """
1351
+ ...
1352
+
1252
1353
  def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = "KUBERNETES_IMAGE_PULL_POLICY", service_account: str = "METAFLOW_KUBERNETES_SERVICE_ACCOUNT", secrets: typing.Optional[typing.List[str]] = None, namespace: str = "METAFLOW_KUBERNETES_NAMESPACE", gpu: typing.Optional[int] = None, gpu_vendor: str = "KUBERNETES_GPU_VENDOR", tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = "/metaflow_temp", persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1253
1354
  """
1254
1355
  Specifies that this step should execute on Kubernetes.
@@ -1309,103 +1410,105 @@ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: ty
1309
1410
  ...
1310
1411
 
1311
1412
  @typing.overload
1312
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1413
+ def trigger_on_finish(*, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1313
1414
  """
1314
- Specifies secrets to be retrieved and injected as environment variables prior to
1315
- the execution of a step.
1415
+ Specifies the flow(s) that this flow depends on.
1416
+
1417
+ ```
1418
+ @trigger_on_finish(flow='FooFlow')
1419
+ ```
1420
+ or
1421
+ ```
1422
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1423
+ ```
1424
+ This decorator respects the @project decorator and triggers the flow
1425
+ when upstream runs within the same namespace complete successfully
1426
+
1427
+ Additionally, you can specify project aware upstream flow dependencies
1428
+ by specifying the fully qualified project_flow_name.
1429
+ ```
1430
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1431
+ ```
1432
+ or
1433
+ ```
1434
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1435
+ ```
1436
+
1437
+ You can also specify just the project or project branch (other values will be
1438
+ inferred from the current project or project branch):
1439
+ ```
1440
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1441
+ ```
1442
+
1443
+ Note that `branch` is typically one of:
1444
+ - `prod`
1445
+ - `user.bob`
1446
+ - `test.my_experiment`
1447
+ - `prod.staging`
1316
1448
 
1317
1449
  Parameters
1318
1450
  ----------
1319
- sources : List[Union[str, Dict[str, Any]]], default: []
1320
- List of secret specs, defining how the secrets are to be retrieved
1451
+ flow : Union[str, Dict[str, str]], optional, default None
1452
+ Upstream flow dependency for this flow.
1453
+ flows : List[Union[str, Dict[str, str]]], default []
1454
+ Upstream flow dependencies for this flow.
1455
+ options : Dict[str, Any], default {}
1456
+ Backend-specific configuration for tuning eventing behavior.
1457
+
1458
+
1321
1459
  """
1322
1460
  ...
1323
1461
 
1324
1462
  @typing.overload
1325
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1326
- ...
1327
-
1328
- @typing.overload
1329
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1463
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1330
1464
  ...
1331
1465
 
1332
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
1466
+ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1333
1467
  """
1334
- Specifies secrets to be retrieved and injected as environment variables prior to
1335
- the execution of a step.
1468
+ Specifies the flow(s) that this flow depends on.
1336
1469
 
1337
- Parameters
1338
- ----------
1339
- sources : List[Union[str, Dict[str, Any]]], default: []
1340
- List of secret specs, defining how the secrets are to be retrieved
1341
- """
1342
- ...
1343
-
1344
- @typing.overload
1345
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1346
- """
1347
- Decorator prototype for all step decorators. This function gets specialized
1348
- and imported for all decorators types by _import_plugin_decorators().
1349
- """
1350
- ...
1351
-
1352
- @typing.overload
1353
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1354
- ...
1355
-
1356
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1357
- """
1358
- Decorator prototype for all step decorators. This function gets specialized
1359
- and imported for all decorators types by _import_plugin_decorators().
1360
- """
1361
- ...
1362
-
1363
- @typing.overload
1364
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1365
- """
1366
- Specifies the PyPI packages for the step.
1470
+ ```
1471
+ @trigger_on_finish(flow='FooFlow')
1472
+ ```
1473
+ or
1474
+ ```
1475
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1476
+ ```
1477
+ This decorator respects the @project decorator and triggers the flow
1478
+ when upstream runs within the same namespace complete successfully
1367
1479
 
1368
- Information in this decorator will augment any
1369
- attributes set in the `@pyi_base` flow-level decorator. Hence,
1370
- you can use `@pypi_base` to set packages required by all
1371
- steps and use `@pypi` to specify step-specific overrides.
1480
+ Additionally, you can specify project aware upstream flow dependencies
1481
+ by specifying the fully qualified project_flow_name.
1482
+ ```
1483
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1484
+ ```
1485
+ or
1486
+ ```
1487
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1488
+ ```
1372
1489
 
1373
- Parameters
1374
- ----------
1375
- packages : Dict[str, str], default: {}
1376
- Packages to use for this step. The key is the name of the package
1377
- and the value is the version to use.
1378
- python : str, optional, default: None
1379
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1380
- that the version used will correspond to the version of the Python interpreter used to start the run.
1381
- """
1382
- ...
1383
-
1384
- @typing.overload
1385
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1386
- ...
1387
-
1388
- @typing.overload
1389
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1390
- ...
1391
-
1392
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1393
- """
1394
- Specifies the PyPI packages for the step.
1490
+ You can also specify just the project or project branch (other values will be
1491
+ inferred from the current project or project branch):
1492
+ ```
1493
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1494
+ ```
1395
1495
 
1396
- Information in this decorator will augment any
1397
- attributes set in the `@pyi_base` flow-level decorator. Hence,
1398
- you can use `@pypi_base` to set packages required by all
1399
- steps and use `@pypi` to specify step-specific overrides.
1496
+ Note that `branch` is typically one of:
1497
+ - `prod`
1498
+ - `user.bob`
1499
+ - `test.my_experiment`
1500
+ - `prod.staging`
1400
1501
 
1401
1502
  Parameters
1402
1503
  ----------
1403
- packages : Dict[str, str], default: {}
1404
- Packages to use for this step. The key is the name of the package
1405
- and the value is the version to use.
1406
- python : str, optional, default: None
1407
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1408
- that the version used will correspond to the version of the Python interpreter used to start the run.
1504
+ flow : Union[str, Dict[str, str]], optional, default None
1505
+ Upstream flow dependency for this flow.
1506
+ flows : List[Union[str, Dict[str, str]]], default []
1507
+ Upstream flow dependencies for this flow.
1508
+ options : Dict[str, Any], default {}
1509
+ Backend-specific configuration for tuning eventing behavior.
1510
+
1511
+
1409
1512
  """
1410
1513
  ...
1411
1514
 
@@ -1451,97 +1554,6 @@ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, expone
1451
1554
  """
1452
1555
  ...
1453
1556
 
1454
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1455
- """
1456
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1457
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1458
-
1459
- Parameters
1460
- ----------
1461
- timeout : int
1462
- Time, in seconds before the task times out and fails. (Default: 3600)
1463
- poke_interval : int
1464
- Time in seconds that the job should wait in between each try. (Default: 60)
1465
- mode : str
1466
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1467
- exponential_backoff : bool
1468
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1469
- pool : str
1470
- the slot pool this task should run in,
1471
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1472
- soft_fail : bool
1473
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1474
- name : str
1475
- Name of the sensor on Airflow
1476
- description : str
1477
- Description of sensor in the Airflow UI
1478
- external_dag_id : str
1479
- The dag_id that contains the task you want to wait for.
1480
- external_task_ids : List[str]
1481
- The list of task_ids that you want to wait for.
1482
- If None (default value) the sensor waits for the DAG. (Default: None)
1483
- allowed_states : List[str]
1484
- Iterable of allowed states, (Default: ['success'])
1485
- failed_states : List[str]
1486
- Iterable of failed or dis-allowed states. (Default: None)
1487
- execution_delta : datetime.timedelta
1488
- time difference with the previous execution to look at,
1489
- the default is the same logical date as the current task or DAG. (Default: None)
1490
- check_existence: bool
1491
- Set to True to check if the external task exists or check if
1492
- the DAG to wait for exists. (Default: True)
1493
- """
1494
- ...
1495
-
1496
- @typing.overload
1497
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1498
- """
1499
- Specifies the times when the flow should be run when running on a
1500
- production scheduler.
1501
-
1502
- Parameters
1503
- ----------
1504
- hourly : bool, default False
1505
- Run the workflow hourly.
1506
- daily : bool, default True
1507
- Run the workflow daily.
1508
- weekly : bool, default False
1509
- Run the workflow weekly.
1510
- cron : str, optional, default None
1511
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1512
- specified by this expression.
1513
- timezone : str, optional, default None
1514
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1515
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1516
- """
1517
- ...
1518
-
1519
- @typing.overload
1520
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1521
- ...
1522
-
1523
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1524
- """
1525
- Specifies the times when the flow should be run when running on a
1526
- production scheduler.
1527
-
1528
- Parameters
1529
- ----------
1530
- hourly : bool, default False
1531
- Run the workflow hourly.
1532
- daily : bool, default True
1533
- Run the workflow daily.
1534
- weekly : bool, default False
1535
- Run the workflow weekly.
1536
- cron : str, optional, default None
1537
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1538
- specified by this expression.
1539
- timezone : str, optional, default None
1540
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1541
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1542
- """
1543
- ...
1544
-
1545
1557
  @typing.overload
1546
1558
  def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1547
1559
  """
@@ -1591,106 +1603,45 @@ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packa
1591
1603
  """
1592
1604
  ...
1593
1605
 
1594
- @typing.overload
1595
- def trigger_on_finish(*, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1596
- """
1597
- Specifies the flow(s) that this flow depends on.
1598
-
1599
- ```
1600
- @trigger_on_finish(flow='FooFlow')
1601
- ```
1602
- or
1603
- ```
1604
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1605
- ```
1606
- This decorator respects the @project decorator and triggers the flow
1607
- when upstream runs within the same namespace complete successfully
1608
-
1609
- Additionally, you can specify project aware upstream flow dependencies
1610
- by specifying the fully qualified project_flow_name.
1611
- ```
1612
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1613
- ```
1614
- or
1615
- ```
1616
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1617
- ```
1618
-
1619
- You can also specify just the project or project branch (other values will be
1620
- inferred from the current project or project branch):
1621
- ```
1622
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1623
- ```
1624
-
1625
- Note that `branch` is typically one of:
1626
- - `prod`
1627
- - `user.bob`
1628
- - `test.my_experiment`
1629
- - `prod.staging`
1630
-
1631
- Parameters
1632
- ----------
1633
- flow : Union[str, Dict[str, str]], optional, default None
1634
- Upstream flow dependency for this flow.
1635
- flows : List[Union[str, Dict[str, str]]], default []
1636
- Upstream flow dependencies for this flow.
1637
- options : Dict[str, Any], default {}
1638
- Backend-specific configuration for tuning eventing behavior.
1639
-
1640
-
1641
- """
1642
- ...
1643
-
1644
- @typing.overload
1645
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1646
- ...
1647
-
1648
- def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1606
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1649
1607
  """
1650
- Specifies the flow(s) that this flow depends on.
1651
-
1652
- ```
1653
- @trigger_on_finish(flow='FooFlow')
1654
- ```
1655
- or
1656
- ```
1657
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1658
- ```
1659
- This decorator respects the @project decorator and triggers the flow
1660
- when upstream runs within the same namespace complete successfully
1661
-
1662
- Additionally, you can specify project aware upstream flow dependencies
1663
- by specifying the fully qualified project_flow_name.
1664
- ```
1665
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1666
- ```
1667
- or
1668
- ```
1669
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1670
- ```
1671
-
1672
- You can also specify just the project or project branch (other values will be
1673
- inferred from the current project or project branch):
1674
- ```
1675
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1676
- ```
1677
-
1678
- Note that `branch` is typically one of:
1679
- - `prod`
1680
- - `user.bob`
1681
- - `test.my_experiment`
1682
- - `prod.staging`
1608
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1609
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1683
1610
 
1684
1611
  Parameters
1685
1612
  ----------
1686
- flow : Union[str, Dict[str, str]], optional, default None
1687
- Upstream flow dependency for this flow.
1688
- flows : List[Union[str, Dict[str, str]]], default []
1689
- Upstream flow dependencies for this flow.
1690
- options : Dict[str, Any], default {}
1691
- Backend-specific configuration for tuning eventing behavior.
1692
-
1693
-
1613
+ timeout : int
1614
+ Time, in seconds before the task times out and fails. (Default: 3600)
1615
+ poke_interval : int
1616
+ Time in seconds that the job should wait in between each try. (Default: 60)
1617
+ mode : str
1618
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1619
+ exponential_backoff : bool
1620
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1621
+ pool : str
1622
+ the slot pool this task should run in,
1623
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1624
+ soft_fail : bool
1625
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1626
+ name : str
1627
+ Name of the sensor on Airflow
1628
+ description : str
1629
+ Description of sensor in the Airflow UI
1630
+ external_dag_id : str
1631
+ The dag_id that contains the task you want to wait for.
1632
+ external_task_ids : List[str]
1633
+ The list of task_ids that you want to wait for.
1634
+ If None (default value) the sensor waits for the DAG. (Default: None)
1635
+ allowed_states : List[str]
1636
+ Iterable of allowed states, (Default: ['success'])
1637
+ failed_states : List[str]
1638
+ Iterable of failed or dis-allowed states. (Default: None)
1639
+ execution_delta : datetime.timedelta
1640
+ time difference with the previous execution to look at,
1641
+ the default is the same logical date as the current task or DAG. (Default: None)
1642
+ check_existence: bool
1643
+ Set to True to check if the external task exists or check if
1644
+ the DAG to wait for exists. (Default: True)
1694
1645
  """
1695
1646
  ...
1696
1647
 
@@ -1733,6 +1684,55 @@ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packag
1733
1684
  """
1734
1685
  ...
1735
1686
 
1687
+ @typing.overload
1688
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1689
+ """
1690
+ Specifies the times when the flow should be run when running on a
1691
+ production scheduler.
1692
+
1693
+ Parameters
1694
+ ----------
1695
+ hourly : bool, default False
1696
+ Run the workflow hourly.
1697
+ daily : bool, default True
1698
+ Run the workflow daily.
1699
+ weekly : bool, default False
1700
+ Run the workflow weekly.
1701
+ cron : str, optional, default None
1702
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1703
+ specified by this expression.
1704
+ timezone : str, optional, default None
1705
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1706
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1707
+ """
1708
+ ...
1709
+
1710
+ @typing.overload
1711
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1712
+ ...
1713
+
1714
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1715
+ """
1716
+ Specifies the times when the flow should be run when running on a
1717
+ production scheduler.
1718
+
1719
+ Parameters
1720
+ ----------
1721
+ hourly : bool, default False
1722
+ Run the workflow hourly.
1723
+ daily : bool, default True
1724
+ Run the workflow daily.
1725
+ weekly : bool, default False
1726
+ Run the workflow weekly.
1727
+ cron : str, optional, default None
1728
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1729
+ specified by this expression.
1730
+ timezone : str, optional, default None
1731
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1732
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1733
+ """
1734
+ ...
1735
+
1736
1736
  @typing.overload
1737
1737
  def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1738
1738
  """