metaflow-stubs 2.11.8__py2.py3-none-any.whl → 2.11.9__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (132) hide show
  1. metaflow-stubs/__init__.pyi +393 -393
  2. metaflow-stubs/cards.pyi +6 -6
  3. metaflow-stubs/cli.pyi +2 -2
  4. metaflow-stubs/client/__init__.pyi +3 -3
  5. metaflow-stubs/client/core.pyi +7 -7
  6. metaflow-stubs/client/filecache.pyi +2 -2
  7. metaflow-stubs/clone_util.pyi +2 -2
  8. metaflow-stubs/events.pyi +3 -3
  9. metaflow-stubs/exception.pyi +2 -2
  10. metaflow-stubs/flowspec.pyi +4 -4
  11. metaflow-stubs/generated_for.txt +1 -1
  12. metaflow-stubs/includefile.pyi +4 -4
  13. metaflow-stubs/metadata/metadata.pyi +2 -2
  14. metaflow-stubs/metadata/util.pyi +2 -2
  15. metaflow-stubs/metaflow_config.pyi +2 -2
  16. metaflow-stubs/metaflow_current.pyi +18 -18
  17. metaflow-stubs/mflog/mflog.pyi +2 -2
  18. metaflow-stubs/multicore_utils.pyi +2 -2
  19. metaflow-stubs/parameters.pyi +3 -3
  20. metaflow-stubs/plugins/__init__.pyi +3 -3
  21. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  22. metaflow-stubs/plugins/airflow/airflow.pyi +3 -3
  23. metaflow-stubs/plugins/airflow/airflow_cli.pyi +3 -3
  24. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
  25. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  26. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  27. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  28. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
  29. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +4 -4
  30. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +4 -4
  31. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  32. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  33. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  34. metaflow-stubs/plugins/argo/argo_workflows.pyi +3 -3
  35. metaflow-stubs/plugins/argo/argo_workflows_cli.pyi +3 -3
  36. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +4 -4
  37. metaflow-stubs/plugins/aws/__init__.pyi +2 -2
  38. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  39. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  40. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  41. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  42. metaflow-stubs/plugins/aws/batch/batch_cli.pyi +2 -2
  43. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  44. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +4 -4
  45. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  46. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +3 -3
  47. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  48. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
  49. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  50. metaflow-stubs/plugins/aws/step_functions/production_token.pyi +2 -2
  51. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  52. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  53. metaflow-stubs/plugins/aws/step_functions/step_functions_cli.pyi +3 -3
  54. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  55. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
  56. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  57. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  58. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  59. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  60. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  61. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  62. metaflow-stubs/plugins/cards/card_cli.pyi +4 -4
  63. metaflow-stubs/plugins/cards/card_client.pyi +3 -3
  64. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  65. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  66. metaflow-stubs/plugins/cards/card_decorator.pyi +3 -3
  67. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  68. metaflow-stubs/plugins/cards/card_modules/basic.pyi +3 -3
  69. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  70. metaflow-stubs/plugins/cards/card_modules/chevron/__init__.pyi +2 -2
  71. metaflow-stubs/plugins/cards/card_modules/chevron/main.pyi +2 -2
  72. metaflow-stubs/plugins/cards/card_modules/chevron/metadata.pyi +2 -2
  73. metaflow-stubs/plugins/cards/card_modules/chevron/renderer.pyi +2 -2
  74. metaflow-stubs/plugins/cards/card_modules/chevron/tokenizer.pyi +2 -2
  75. metaflow-stubs/plugins/cards/card_modules/components.pyi +3 -3
  76. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  77. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  78. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +3 -3
  79. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  80. metaflow-stubs/plugins/cards/component_serializer.pyi +3 -3
  81. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  82. metaflow-stubs/plugins/catch_decorator.pyi +3 -3
  83. metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
  84. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  85. metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
  86. metaflow-stubs/plugins/datatools/s3/s3.pyi +4 -4
  87. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  88. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  89. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  90. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  91. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  92. metaflow-stubs/plugins/events_decorator.pyi +3 -3
  93. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  94. metaflow-stubs/plugins/frameworks/pytorch.pyi +4 -4
  95. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  96. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  97. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  98. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  99. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  100. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  101. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
  102. metaflow-stubs/plugins/kubernetes/kubernetes_cli.pyi +3 -3
  103. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  104. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +4 -4
  105. metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +2 -2
  106. metaflow-stubs/plugins/package_cli.pyi +2 -2
  107. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  108. metaflow-stubs/plugins/project_decorator.pyi +3 -3
  109. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  110. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  111. metaflow-stubs/plugins/pypi/conda_environment.pyi +3 -3
  112. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  113. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  114. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  115. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  116. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  117. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  118. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +2 -2
  119. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  120. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  121. metaflow-stubs/plugins/tag_cli.pyi +4 -4
  122. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +3 -3
  123. metaflow-stubs/plugins/timeout_decorator.pyi +2 -2
  124. metaflow-stubs/procpoll.pyi +2 -2
  125. metaflow-stubs/pylint_wrapper.pyi +2 -2
  126. metaflow-stubs/tagging_util.pyi +2 -2
  127. metaflow-stubs/version.pyi +2 -2
  128. {metaflow_stubs-2.11.8.dist-info → metaflow_stubs-2.11.9.dist-info}/METADATA +2 -2
  129. metaflow_stubs-2.11.9.dist-info/RECORD +132 -0
  130. metaflow_stubs-2.11.8.dist-info/RECORD +0 -132
  131. {metaflow_stubs-2.11.8.dist-info → metaflow_stubs-2.11.9.dist-info}/WHEEL +0 -0
  132. {metaflow_stubs-2.11.8.dist-info → metaflow_stubs-2.11.9.dist-info}/top_level.txt +0 -0
@@ -1,23 +1,23 @@
1
1
  ##################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.11.8 #
4
- # Generated on 2024-03-29T12:37:59.184463 #
3
+ # MF version: 2.11.9 #
4
+ # Generated on 2024-03-29T22:28:00.911417 #
5
5
  ##################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import metaflow.events
12
- import metaflow.plugins.datatools.s3.s3
13
- import metaflow._vendor.click.types
14
- import metaflow.client.core
15
11
  import metaflow.datastore.inputs
12
+ import metaflow.plugins.datatools.s3.s3
13
+ import metaflow.metaflow_current
16
14
  import typing
15
+ import io
16
+ import metaflow.events
17
17
  import metaflow.parameters
18
18
  import datetime
19
- import io
20
- import metaflow.metaflow_current
19
+ import metaflow.client.core
20
+ import metaflow._vendor.click.types
21
21
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
22
22
  StepFlag = typing.NewType("StepFlag", bool)
23
23
 
@@ -725,6 +725,57 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
725
725
  """
726
726
  ...
727
727
 
728
+ @typing.overload
729
+ def card(*, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
730
+ """
731
+ Creates a human-readable report, a Metaflow Card, after this step completes.
732
+
733
+ Note that you may add multiple `@card` decorators in a step with different parameters.
734
+
735
+ Parameters
736
+ ----------
737
+ type : str, default 'default'
738
+ Card type.
739
+ id : str, optional, default None
740
+ If multiple cards are present, use this id to identify this card.
741
+ options : Dict[str, Any], default {}
742
+ Options passed to the card. The contents depend on the card type.
743
+ timeout : int, default 45
744
+ Interrupt reporting if it takes more than this many seconds.
745
+
746
+
747
+ """
748
+ ...
749
+
750
+ @typing.overload
751
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
752
+ ...
753
+
754
+ @typing.overload
755
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
756
+ ...
757
+
758
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
759
+ """
760
+ Creates a human-readable report, a Metaflow Card, after this step completes.
761
+
762
+ Note that you may add multiple `@card` decorators in a step with different parameters.
763
+
764
+ Parameters
765
+ ----------
766
+ type : str, default 'default'
767
+ Card type.
768
+ id : str, optional, default None
769
+ If multiple cards are present, use this id to identify this card.
770
+ options : Dict[str, Any], default {}
771
+ Options passed to the card. The contents depend on the card type.
772
+ timeout : int, default 45
773
+ Interrupt reporting if it takes more than this many seconds.
774
+
775
+
776
+ """
777
+ ...
778
+
728
779
  @typing.overload
729
780
  def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
730
781
  """
@@ -757,55 +808,35 @@ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], Non
757
808
  ...
758
809
 
759
810
  @typing.overload
760
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
811
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
761
812
  """
762
- Specifies the number of times the task corresponding
763
- to a step needs to be retried.
764
-
765
- This decorator is useful for handling transient errors, such as networking issues.
766
- If your task contains operations that can't be retried safely, e.g. database updates,
767
- it is advisable to annotate it with `@retry(times=0)`.
768
-
769
- This can be used in conjunction with the `@catch` decorator. The `@catch`
770
- decorator will execute a no-op task after all retries have been exhausted,
771
- ensuring that the flow execution can continue.
813
+ Specifies secrets to be retrieved and injected as environment variables prior to
814
+ the execution of a step.
772
815
 
773
816
  Parameters
774
817
  ----------
775
- times : int, default 3
776
- Number of times to retry this task.
777
- minutes_between_retries : int, default 2
778
- Number of minutes between retries.
818
+ sources : List[Union[str, Dict[str, Any]]], default: []
819
+ List of secret specs, defining how the secrets are to be retrieved
779
820
  """
780
821
  ...
781
822
 
782
823
  @typing.overload
783
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
824
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
784
825
  ...
785
826
 
786
827
  @typing.overload
787
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
828
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
788
829
  ...
789
830
 
790
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
831
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
791
832
  """
792
- Specifies the number of times the task corresponding
793
- to a step needs to be retried.
794
-
795
- This decorator is useful for handling transient errors, such as networking issues.
796
- If your task contains operations that can't be retried safely, e.g. database updates,
797
- it is advisable to annotate it with `@retry(times=0)`.
798
-
799
- This can be used in conjunction with the `@catch` decorator. The `@catch`
800
- decorator will execute a no-op task after all retries have been exhausted,
801
- ensuring that the flow execution can continue.
833
+ Specifies secrets to be retrieved and injected as environment variables prior to
834
+ the execution of a step.
802
835
 
803
836
  Parameters
804
837
  ----------
805
- times : int, default 3
806
- Number of times to retry this task.
807
- minutes_between_retries : int, default 2
808
- Number of minutes between retries.
838
+ sources : List[Union[str, Dict[str, Any]]], default: []
839
+ List of secret specs, defining how the secrets are to be retrieved
809
840
  """
810
841
  ...
811
842
 
@@ -886,55 +917,6 @@ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None]
886
917
  """
887
918
  ...
888
919
 
889
- @typing.overload
890
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
891
- """
892
- Specifies that the step will success under all circumstances.
893
-
894
- The decorator will create an optional artifact, specified by `var`, which
895
- contains the exception raised. You can use it to detect the presence
896
- of errors, indicating that all happy-path artifacts produced by the step
897
- are missing.
898
-
899
- Parameters
900
- ----------
901
- var : str, optional, default None
902
- Name of the artifact in which to store the caught exception.
903
- If not specified, the exception is not stored.
904
- print_exception : bool, default True
905
- Determines whether or not the exception is printed to
906
- stdout when caught.
907
- """
908
- ...
909
-
910
- @typing.overload
911
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
912
- ...
913
-
914
- @typing.overload
915
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
916
- ...
917
-
918
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
919
- """
920
- Specifies that the step will success under all circumstances.
921
-
922
- The decorator will create an optional artifact, specified by `var`, which
923
- contains the exception raised. You can use it to detect the presence
924
- of errors, indicating that all happy-path artifacts produced by the step
925
- are missing.
926
-
927
- Parameters
928
- ----------
929
- var : str, optional, default None
930
- Name of the artifact in which to store the caught exception.
931
- If not specified, the exception is not stored.
932
- print_exception : bool, default True
933
- Determines whether or not the exception is printed to
934
- stdout when caught.
935
- """
936
- ...
937
-
938
920
  @typing.overload
939
921
  def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
940
922
  """
@@ -1083,192 +1065,153 @@ def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
1083
1065
  ...
1084
1066
 
1085
1067
  @typing.overload
1086
- def card(*, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1068
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1087
1069
  """
1088
- Creates a human-readable report, a Metaflow Card, after this step completes.
1070
+ Specifies the PyPI packages for the step.
1089
1071
 
1090
- Note that you may add multiple `@card` decorators in a step with different parameters.
1072
+ Information in this decorator will augment any
1073
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
1074
+ you can use `@pypi_base` to set packages required by all
1075
+ steps and use `@pypi` to specify step-specific overrides.
1091
1076
 
1092
1077
  Parameters
1093
1078
  ----------
1094
- type : str, default 'default'
1095
- Card type.
1096
- id : str, optional, default None
1097
- If multiple cards are present, use this id to identify this card.
1098
- options : Dict[str, Any], default {}
1099
- Options passed to the card. The contents depend on the card type.
1100
- timeout : int, default 45
1101
- Interrupt reporting if it takes more than this many seconds.
1102
-
1103
-
1079
+ packages : Dict[str, str], default: {}
1080
+ Packages to use for this step. The key is the name of the package
1081
+ and the value is the version to use.
1082
+ python : str, optional, default: None
1083
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1084
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1104
1085
  """
1105
1086
  ...
1106
1087
 
1107
1088
  @typing.overload
1108
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1089
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1109
1090
  ...
1110
1091
 
1111
1092
  @typing.overload
1112
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1093
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1113
1094
  ...
1114
1095
 
1115
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
1096
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1116
1097
  """
1117
- Creates a human-readable report, a Metaflow Card, after this step completes.
1098
+ Specifies the PyPI packages for the step.
1118
1099
 
1119
- Note that you may add multiple `@card` decorators in a step with different parameters.
1100
+ Information in this decorator will augment any
1101
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
1102
+ you can use `@pypi_base` to set packages required by all
1103
+ steps and use `@pypi` to specify step-specific overrides.
1120
1104
 
1121
1105
  Parameters
1122
1106
  ----------
1123
- type : str, default 'default'
1124
- Card type.
1125
- id : str, optional, default None
1126
- If multiple cards are present, use this id to identify this card.
1127
- options : Dict[str, Any], default {}
1128
- Options passed to the card. The contents depend on the card type.
1129
- timeout : int, default 45
1130
- Interrupt reporting if it takes more than this many seconds.
1131
-
1132
-
1107
+ packages : Dict[str, str], default: {}
1108
+ Packages to use for this step. The key is the name of the package
1109
+ and the value is the version to use.
1110
+ python : str, optional, default: None
1111
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1112
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1133
1113
  """
1134
1114
  ...
1135
1115
 
1136
1116
  @typing.overload
1137
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1117
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1138
1118
  """
1139
- Specifies secrets to be retrieved and injected as environment variables prior to
1140
- the execution of a step.
1119
+ Specifies the number of times the task corresponding
1120
+ to a step needs to be retried.
1121
+
1122
+ This decorator is useful for handling transient errors, such as networking issues.
1123
+ If your task contains operations that can't be retried safely, e.g. database updates,
1124
+ it is advisable to annotate it with `@retry(times=0)`.
1125
+
1126
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
1127
+ decorator will execute a no-op task after all retries have been exhausted,
1128
+ ensuring that the flow execution can continue.
1141
1129
 
1142
1130
  Parameters
1143
1131
  ----------
1144
- sources : List[Union[str, Dict[str, Any]]], default: []
1145
- List of secret specs, defining how the secrets are to be retrieved
1132
+ times : int, default 3
1133
+ Number of times to retry this task.
1134
+ minutes_between_retries : int, default 2
1135
+ Number of minutes between retries.
1146
1136
  """
1147
1137
  ...
1148
1138
 
1149
1139
  @typing.overload
1150
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1140
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1151
1141
  ...
1152
1142
 
1153
1143
  @typing.overload
1154
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1144
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1155
1145
  ...
1156
1146
 
1157
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
1147
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
1158
1148
  """
1159
- Specifies secrets to be retrieved and injected as environment variables prior to
1160
- the execution of a step.
1149
+ Specifies the number of times the task corresponding
1150
+ to a step needs to be retried.
1151
+
1152
+ This decorator is useful for handling transient errors, such as networking issues.
1153
+ If your task contains operations that can't be retried safely, e.g. database updates,
1154
+ it is advisable to annotate it with `@retry(times=0)`.
1155
+
1156
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
1157
+ decorator will execute a no-op task after all retries have been exhausted,
1158
+ ensuring that the flow execution can continue.
1161
1159
 
1162
1160
  Parameters
1163
1161
  ----------
1164
- sources : List[Union[str, Dict[str, Any]]], default: []
1165
- List of secret specs, defining how the secrets are to be retrieved
1162
+ times : int, default 3
1163
+ Number of times to retry this task.
1164
+ minutes_between_retries : int, default 2
1165
+ Number of minutes between retries.
1166
1166
  """
1167
1167
  ...
1168
1168
 
1169
1169
  @typing.overload
1170
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1170
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1171
1171
  """
1172
- Specifies a timeout for your step.
1173
-
1174
- This decorator is useful if this step may hang indefinitely.
1175
-
1176
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1177
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
1178
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
1179
-
1180
- Note that all the values specified in parameters are added together so if you specify
1181
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1182
-
1183
- Parameters
1184
- ----------
1185
- seconds : int, default 0
1186
- Number of seconds to wait prior to timing out.
1187
- minutes : int, default 0
1188
- Number of minutes to wait prior to timing out.
1189
- hours : int, default 0
1190
- Number of hours to wait prior to timing out.
1191
- """
1192
- ...
1193
-
1194
- @typing.overload
1195
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1196
- ...
1197
-
1198
- @typing.overload
1199
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1200
- ...
1201
-
1202
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
1203
- """
1204
- Specifies a timeout for your step.
1205
-
1206
- This decorator is useful if this step may hang indefinitely.
1207
-
1208
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1209
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
1210
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
1211
-
1212
- Note that all the values specified in parameters are added together so if you specify
1213
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1214
-
1215
- Parameters
1216
- ----------
1217
- seconds : int, default 0
1218
- Number of seconds to wait prior to timing out.
1219
- minutes : int, default 0
1220
- Number of minutes to wait prior to timing out.
1221
- hours : int, default 0
1222
- Number of hours to wait prior to timing out.
1223
- """
1224
- ...
1225
-
1226
- @typing.overload
1227
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1228
- """
1229
- Specifies the PyPI packages for the step.
1172
+ Specifies that the step will success under all circumstances.
1230
1173
 
1231
- Information in this decorator will augment any
1232
- attributes set in the `@pyi_base` flow-level decorator. Hence,
1233
- you can use `@pypi_base` to set packages required by all
1234
- steps and use `@pypi` to specify step-specific overrides.
1174
+ The decorator will create an optional artifact, specified by `var`, which
1175
+ contains the exception raised. You can use it to detect the presence
1176
+ of errors, indicating that all happy-path artifacts produced by the step
1177
+ are missing.
1235
1178
 
1236
1179
  Parameters
1237
1180
  ----------
1238
- packages : Dict[str, str], default: {}
1239
- Packages to use for this step. The key is the name of the package
1240
- and the value is the version to use.
1241
- python : str, optional, default: None
1242
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1243
- that the version used will correspond to the version of the Python interpreter used to start the run.
1181
+ var : str, optional, default None
1182
+ Name of the artifact in which to store the caught exception.
1183
+ If not specified, the exception is not stored.
1184
+ print_exception : bool, default True
1185
+ Determines whether or not the exception is printed to
1186
+ stdout when caught.
1244
1187
  """
1245
1188
  ...
1246
1189
 
1247
1190
  @typing.overload
1248
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1191
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1249
1192
  ...
1250
1193
 
1251
1194
  @typing.overload
1252
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1195
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1253
1196
  ...
1254
1197
 
1255
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1198
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
1256
1199
  """
1257
- Specifies the PyPI packages for the step.
1200
+ Specifies that the step will success under all circumstances.
1258
1201
 
1259
- Information in this decorator will augment any
1260
- attributes set in the `@pyi_base` flow-level decorator. Hence,
1261
- you can use `@pypi_base` to set packages required by all
1262
- steps and use `@pypi` to specify step-specific overrides.
1202
+ The decorator will create an optional artifact, specified by `var`, which
1203
+ contains the exception raised. You can use it to detect the presence
1204
+ of errors, indicating that all happy-path artifacts produced by the step
1205
+ are missing.
1263
1206
 
1264
1207
  Parameters
1265
1208
  ----------
1266
- packages : Dict[str, str], default: {}
1267
- Packages to use for this step. The key is the name of the package
1268
- and the value is the version to use.
1269
- python : str, optional, default: None
1270
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1271
- that the version used will correspond to the version of the Python interpreter used to start the run.
1209
+ var : str, optional, default None
1210
+ Name of the artifact in which to store the caught exception.
1211
+ If not specified, the exception is not stored.
1212
+ print_exception : bool, default True
1213
+ Determines whether or not the exception is printed to
1214
+ stdout when caught.
1272
1215
  """
1273
1216
  ...
1274
1217
 
@@ -1387,103 +1330,75 @@ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
1387
1330
  ...
1388
1331
 
1389
1332
  @typing.overload
1390
- def trigger_on_finish(*, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1333
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1391
1334
  """
1392
- Specifies the flow(s) that this flow depends on.
1393
-
1394
- ```
1395
- @trigger_on_finish(flow='FooFlow')
1396
- ```
1397
- or
1398
- ```
1399
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1400
- ```
1401
- This decorator respects the @project decorator and triggers the flow
1402
- when upstream runs within the same namespace complete successfully
1335
+ Specifies a timeout for your step.
1403
1336
 
1404
- Additionally, you can specify project aware upstream flow dependencies
1405
- by specifying the fully qualified project_flow_name.
1406
- ```
1407
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1408
- ```
1409
- or
1410
- ```
1411
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1412
- ```
1337
+ This decorator is useful if this step may hang indefinitely.
1413
1338
 
1414
- You can also specify just the project or project branch (other values will be
1415
- inferred from the current project or project branch):
1416
- ```
1417
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1418
- ```
1339
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1340
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
1341
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
1419
1342
 
1420
- Note that `branch` is typically one of:
1421
- - `prod`
1422
- - `user.bob`
1423
- - `test.my_experiment`
1424
- - `prod.staging`
1343
+ Note that all the values specified in parameters are added together so if you specify
1344
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1425
1345
 
1426
1346
  Parameters
1427
1347
  ----------
1428
- flow : Union[str, Dict[str, str]], optional, default None
1429
- Upstream flow dependency for this flow.
1430
- flows : List[Union[str, Dict[str, str]]], default []
1431
- Upstream flow dependencies for this flow.
1432
- options : Dict[str, Any], default {}
1433
- Backend-specific configuration for tuning eventing behavior.
1434
-
1435
-
1348
+ seconds : int, default 0
1349
+ Number of seconds to wait prior to timing out.
1350
+ minutes : int, default 0
1351
+ Number of minutes to wait prior to timing out.
1352
+ hours : int, default 0
1353
+ Number of hours to wait prior to timing out.
1436
1354
  """
1437
1355
  ...
1438
1356
 
1439
1357
  @typing.overload
1440
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1358
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1441
1359
  ...
1442
1360
 
1443
- def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1361
+ @typing.overload
1362
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1363
+ ...
1364
+
1365
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
1444
1366
  """
1445
- Specifies the flow(s) that this flow depends on.
1367
+ Specifies a timeout for your step.
1446
1368
 
1447
- ```
1448
- @trigger_on_finish(flow='FooFlow')
1449
- ```
1450
- or
1451
- ```
1452
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1453
- ```
1454
- This decorator respects the @project decorator and triggers the flow
1455
- when upstream runs within the same namespace complete successfully
1369
+ This decorator is useful if this step may hang indefinitely.
1456
1370
 
1457
- Additionally, you can specify project aware upstream flow dependencies
1458
- by specifying the fully qualified project_flow_name.
1459
- ```
1460
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1461
- ```
1462
- or
1463
- ```
1464
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1465
- ```
1371
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1372
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
1373
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
1466
1374
 
1467
- You can also specify just the project or project branch (other values will be
1468
- inferred from the current project or project branch):
1469
- ```
1470
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1471
- ```
1375
+ Note that all the values specified in parameters are added together so if you specify
1376
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1472
1377
 
1473
- Note that `branch` is typically one of:
1474
- - `prod`
1475
- - `user.bob`
1476
- - `test.my_experiment`
1477
- - `prod.staging`
1378
+ Parameters
1379
+ ----------
1380
+ seconds : int, default 0
1381
+ Number of seconds to wait prior to timing out.
1382
+ minutes : int, default 0
1383
+ Number of minutes to wait prior to timing out.
1384
+ hours : int, default 0
1385
+ Number of hours to wait prior to timing out.
1386
+ """
1387
+ ...
1388
+
1389
+ def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1390
+ """
1391
+ Specifies what flows belong to the same project.
1392
+
1393
+ A project-specific namespace is created for all flows that
1394
+ use the same `@project(name)`.
1478
1395
 
1479
1396
  Parameters
1480
1397
  ----------
1481
- flow : Union[str, Dict[str, str]], optional, default None
1482
- Upstream flow dependency for this flow.
1483
- flows : List[Union[str, Dict[str, str]]], default []
1484
- Upstream flow dependencies for this flow.
1485
- options : Dict[str, Any], default {}
1486
- Backend-specific configuration for tuning eventing behavior.
1398
+ name : str
1399
+ Project name. Make sure that the name is unique amongst all
1400
+ projects that use the same production scheduler. The name may
1401
+ contain only lowercase alphanumeric characters and underscores.
1487
1402
 
1488
1403
 
1489
1404
  """
@@ -1623,62 +1538,13 @@ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packag
1623
1538
  """
1624
1539
  ...
1625
1540
 
1626
- @typing.overload
1627
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1541
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1628
1542
  """
1629
- Specifies the times when the flow should be run when running on a
1630
- production scheduler.
1631
-
1632
- Parameters
1633
- ----------
1634
- hourly : bool, default False
1635
- Run the workflow hourly.
1636
- daily : bool, default True
1637
- Run the workflow daily.
1638
- weekly : bool, default False
1639
- Run the workflow weekly.
1640
- cron : str, optional, default None
1641
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1642
- specified by this expression.
1643
- timezone : str, optional, default None
1644
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1645
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1646
- """
1647
- ...
1648
-
1649
- @typing.overload
1650
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1651
- ...
1652
-
1653
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1654
- """
1655
- Specifies the times when the flow should be run when running on a
1656
- production scheduler.
1657
-
1658
- Parameters
1659
- ----------
1660
- hourly : bool, default False
1661
- Run the workflow hourly.
1662
- daily : bool, default True
1663
- Run the workflow daily.
1664
- weekly : bool, default False
1665
- Run the workflow weekly.
1666
- cron : str, optional, default None
1667
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1668
- specified by this expression.
1669
- timezone : str, optional, default None
1670
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1671
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1672
- """
1673
- ...
1674
-
1675
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1676
- """
1677
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1678
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1679
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1680
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
1681
- starts only after all sensors finish.
1543
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1544
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1545
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1546
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
1547
+ starts only after all sensors finish.
1682
1548
 
1683
1549
  Parameters
1684
1550
  ----------
@@ -1714,48 +1580,6 @@ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, expone
1714
1580
  """
1715
1581
  ...
1716
1582
 
1717
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1718
- """
1719
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1720
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1721
-
1722
- Parameters
1723
- ----------
1724
- timeout : int
1725
- Time, in seconds before the task times out and fails. (Default: 3600)
1726
- poke_interval : int
1727
- Time in seconds that the job should wait in between each try. (Default: 60)
1728
- mode : str
1729
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1730
- exponential_backoff : bool
1731
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1732
- pool : str
1733
- the slot pool this task should run in,
1734
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1735
- soft_fail : bool
1736
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1737
- name : str
1738
- Name of the sensor on Airflow
1739
- description : str
1740
- Description of sensor in the Airflow UI
1741
- external_dag_id : str
1742
- The dag_id that contains the task you want to wait for.
1743
- external_task_ids : List[str]
1744
- The list of task_ids that you want to wait for.
1745
- If None (default value) the sensor waits for the DAG. (Default: None)
1746
- allowed_states : List[str]
1747
- Iterable of allowed states, (Default: ['success'])
1748
- failed_states : List[str]
1749
- Iterable of failed or dis-allowed states. (Default: None)
1750
- execution_delta : datetime.timedelta
1751
- time difference with the previous execution to look at,
1752
- the default is the same logical date as the current task or DAG. (Default: None)
1753
- check_existence: bool
1754
- Set to True to check if the external task exists or check if
1755
- the DAG to wait for exists. (Default: True)
1756
- """
1757
- ...
1758
-
1759
1583
  @typing.overload
1760
1584
  def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1761
1585
  """
@@ -1805,21 +1629,197 @@ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packa
1805
1629
  """
1806
1630
  ...
1807
1631
 
1808
- def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1632
+ @typing.overload
1633
+ def trigger_on_finish(*, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1809
1634
  """
1810
- Specifies what flows belong to the same project.
1635
+ Specifies the flow(s) that this flow depends on.
1811
1636
 
1812
- A project-specific namespace is created for all flows that
1813
- use the same `@project(name)`.
1637
+ ```
1638
+ @trigger_on_finish(flow='FooFlow')
1639
+ ```
1640
+ or
1641
+ ```
1642
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1643
+ ```
1644
+ This decorator respects the @project decorator and triggers the flow
1645
+ when upstream runs within the same namespace complete successfully
1646
+
1647
+ Additionally, you can specify project aware upstream flow dependencies
1648
+ by specifying the fully qualified project_flow_name.
1649
+ ```
1650
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1651
+ ```
1652
+ or
1653
+ ```
1654
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1655
+ ```
1656
+
1657
+ You can also specify just the project or project branch (other values will be
1658
+ inferred from the current project or project branch):
1659
+ ```
1660
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1661
+ ```
1662
+
1663
+ Note that `branch` is typically one of:
1664
+ - `prod`
1665
+ - `user.bob`
1666
+ - `test.my_experiment`
1667
+ - `prod.staging`
1814
1668
 
1815
1669
  Parameters
1816
1670
  ----------
1817
- name : str
1818
- Project name. Make sure that the name is unique amongst all
1819
- projects that use the same production scheduler. The name may
1820
- contain only lowercase alphanumeric characters and underscores.
1671
+ flow : Union[str, Dict[str, str]], optional, default None
1672
+ Upstream flow dependency for this flow.
1673
+ flows : List[Union[str, Dict[str, str]]], default []
1674
+ Upstream flow dependencies for this flow.
1675
+ options : Dict[str, Any], default {}
1676
+ Backend-specific configuration for tuning eventing behavior.
1677
+
1678
+
1679
+ """
1680
+ ...
1681
+
1682
+ @typing.overload
1683
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1684
+ ...
1685
+
1686
+ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1687
+ """
1688
+ Specifies the flow(s) that this flow depends on.
1689
+
1690
+ ```
1691
+ @trigger_on_finish(flow='FooFlow')
1692
+ ```
1693
+ or
1694
+ ```
1695
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1696
+ ```
1697
+ This decorator respects the @project decorator and triggers the flow
1698
+ when upstream runs within the same namespace complete successfully
1699
+
1700
+ Additionally, you can specify project aware upstream flow dependencies
1701
+ by specifying the fully qualified project_flow_name.
1702
+ ```
1703
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1704
+ ```
1705
+ or
1706
+ ```
1707
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1708
+ ```
1709
+
1710
+ You can also specify just the project or project branch (other values will be
1711
+ inferred from the current project or project branch):
1712
+ ```
1713
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1714
+ ```
1715
+
1716
+ Note that `branch` is typically one of:
1717
+ - `prod`
1718
+ - `user.bob`
1719
+ - `test.my_experiment`
1720
+ - `prod.staging`
1821
1721
 
1722
+ Parameters
1723
+ ----------
1724
+ flow : Union[str, Dict[str, str]], optional, default None
1725
+ Upstream flow dependency for this flow.
1726
+ flows : List[Union[str, Dict[str, str]]], default []
1727
+ Upstream flow dependencies for this flow.
1728
+ options : Dict[str, Any], default {}
1729
+ Backend-specific configuration for tuning eventing behavior.
1730
+
1731
+
1732
+ """
1733
+ ...
1734
+
1735
+ @typing.overload
1736
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1737
+ """
1738
+ Specifies the times when the flow should be run when running on a
1739
+ production scheduler.
1822
1740
 
1741
+ Parameters
1742
+ ----------
1743
+ hourly : bool, default False
1744
+ Run the workflow hourly.
1745
+ daily : bool, default True
1746
+ Run the workflow daily.
1747
+ weekly : bool, default False
1748
+ Run the workflow weekly.
1749
+ cron : str, optional, default None
1750
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1751
+ specified by this expression.
1752
+ timezone : str, optional, default None
1753
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1754
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1755
+ """
1756
+ ...
1757
+
1758
+ @typing.overload
1759
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1760
+ ...
1761
+
1762
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1763
+ """
1764
+ Specifies the times when the flow should be run when running on a
1765
+ production scheduler.
1766
+
1767
+ Parameters
1768
+ ----------
1769
+ hourly : bool, default False
1770
+ Run the workflow hourly.
1771
+ daily : bool, default True
1772
+ Run the workflow daily.
1773
+ weekly : bool, default False
1774
+ Run the workflow weekly.
1775
+ cron : str, optional, default None
1776
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1777
+ specified by this expression.
1778
+ timezone : str, optional, default None
1779
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1780
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1781
+ """
1782
+ ...
1783
+
1784
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1785
+ """
1786
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1787
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1788
+
1789
+ Parameters
1790
+ ----------
1791
+ timeout : int
1792
+ Time, in seconds before the task times out and fails. (Default: 3600)
1793
+ poke_interval : int
1794
+ Time in seconds that the job should wait in between each try. (Default: 60)
1795
+ mode : str
1796
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1797
+ exponential_backoff : bool
1798
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1799
+ pool : str
1800
+ the slot pool this task should run in,
1801
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1802
+ soft_fail : bool
1803
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1804
+ name : str
1805
+ Name of the sensor on Airflow
1806
+ description : str
1807
+ Description of sensor in the Airflow UI
1808
+ external_dag_id : str
1809
+ The dag_id that contains the task you want to wait for.
1810
+ external_task_ids : List[str]
1811
+ The list of task_ids that you want to wait for.
1812
+ If None (default value) the sensor waits for the DAG. (Default: None)
1813
+ allowed_states : List[str]
1814
+ Iterable of allowed states, (Default: ['success'])
1815
+ failed_states : List[str]
1816
+ Iterable of failed or dis-allowed states. (Default: None)
1817
+ execution_delta : datetime.timedelta
1818
+ time difference with the previous execution to look at,
1819
+ the default is the same logical date as the current task or DAG. (Default: None)
1820
+ check_existence: bool
1821
+ Set to True to check if the external task exists or check if
1822
+ the DAG to wait for exists. (Default: True)
1823
1823
  """
1824
1824
  ...
1825
1825