ob-metaflow-stubs 4.1__py2.py3-none-any.whl → 4.2__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (145) hide show
  1. metaflow-stubs/__init__.pyi +430 -430
  2. metaflow-stubs/cards.pyi +5 -5
  3. metaflow-stubs/cli.pyi +3 -3
  4. metaflow-stubs/client/__init__.pyi +3 -3
  5. metaflow-stubs/client/core.pyi +5 -5
  6. metaflow-stubs/client/filecache.pyi +2 -2
  7. metaflow-stubs/clone_util.pyi +2 -2
  8. metaflow-stubs/events.pyi +2 -2
  9. metaflow-stubs/exception.pyi +2 -2
  10. metaflow-stubs/flowspec.pyi +5 -5
  11. metaflow-stubs/generated_for.txt +1 -1
  12. metaflow-stubs/includefile.pyi +3 -3
  13. metaflow-stubs/metadata/metadata.pyi +3 -3
  14. metaflow-stubs/metadata/util.pyi +2 -2
  15. metaflow-stubs/metaflow_config.pyi +2 -2
  16. metaflow-stubs/metaflow_current.pyi +2 -2
  17. metaflow-stubs/mflog/mflog.pyi +2 -2
  18. metaflow-stubs/multicore_utils.pyi +2 -2
  19. metaflow-stubs/parameters.pyi +3 -3
  20. metaflow-stubs/plugins/__init__.pyi +2 -2
  21. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  22. metaflow-stubs/plugins/airflow/airflow.pyi +2 -2
  23. metaflow-stubs/plugins/airflow/airflow_cli.pyi +3 -3
  24. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
  25. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  26. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  27. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  28. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +3 -3
  29. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +4 -4
  30. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +4 -4
  31. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  32. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  33. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  34. metaflow-stubs/plugins/argo/argo_workflows.pyi +3 -3
  35. metaflow-stubs/plugins/argo/argo_workflows_cli.pyi +4 -4
  36. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +4 -4
  37. metaflow-stubs/plugins/aws/__init__.pyi +2 -2
  38. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  39. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  40. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  41. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  42. metaflow-stubs/plugins/aws/batch/batch_cli.pyi +2 -2
  43. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  44. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +3 -3
  45. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  46. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +3 -3
  47. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  48. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
  49. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  50. metaflow-stubs/plugins/aws/step_functions/production_token.pyi +2 -2
  51. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  52. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  53. metaflow-stubs/plugins/aws/step_functions/step_functions_cli.pyi +4 -4
  54. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  55. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
  56. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  57. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  58. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  59. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +3 -3
  60. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  61. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  62. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  63. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  64. metaflow-stubs/plugins/cards/card_cli.pyi +4 -4
  65. metaflow-stubs/plugins/cards/card_client.pyi +4 -4
  66. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  67. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  68. metaflow-stubs/plugins/cards/card_decorator.pyi +3 -3
  69. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  70. metaflow-stubs/plugins/cards/card_modules/basic.pyi +3 -3
  71. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  72. metaflow-stubs/plugins/cards/card_modules/chevron/__init__.pyi +2 -2
  73. metaflow-stubs/plugins/cards/card_modules/chevron/main.pyi +2 -2
  74. metaflow-stubs/plugins/cards/card_modules/chevron/metadata.pyi +2 -2
  75. metaflow-stubs/plugins/cards/card_modules/chevron/renderer.pyi +2 -2
  76. metaflow-stubs/plugins/cards/card_modules/chevron/tokenizer.pyi +2 -2
  77. metaflow-stubs/plugins/cards/card_modules/components.pyi +4 -4
  78. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  79. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  80. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +3 -3
  81. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  82. metaflow-stubs/plugins/cards/component_serializer.pyi +4 -4
  83. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  84. metaflow-stubs/plugins/catch_decorator.pyi +3 -3
  85. metaflow-stubs/plugins/datatools/__init__.pyi +3 -3
  86. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  87. metaflow-stubs/plugins/datatools/s3/__init__.pyi +3 -3
  88. metaflow-stubs/plugins/datatools/s3/s3.pyi +5 -5
  89. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  90. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  91. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  92. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  93. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  94. metaflow-stubs/plugins/events_decorator.pyi +3 -3
  95. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  96. metaflow-stubs/plugins/frameworks/pytorch.pyi +4 -4
  97. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  98. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +3 -3
  99. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  100. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  101. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  102. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  103. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  104. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
  105. metaflow-stubs/plugins/kubernetes/kubernetes_cli.pyi +3 -3
  106. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  107. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +3 -3
  108. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
  109. metaflow-stubs/plugins/logs_cli.pyi +3 -3
  110. metaflow-stubs/plugins/package_cli.pyi +2 -2
  111. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  112. metaflow-stubs/plugins/project_decorator.pyi +3 -3
  113. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  114. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  115. metaflow-stubs/plugins/pypi/conda_environment.pyi +3 -3
  116. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  117. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  118. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  119. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  120. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  121. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  122. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +2 -2
  123. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  124. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  125. metaflow-stubs/plugins/tag_cli.pyi +3 -3
  126. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +2 -2
  127. metaflow-stubs/plugins/timeout_decorator.pyi +3 -3
  128. metaflow-stubs/procpoll.pyi +2 -2
  129. metaflow-stubs/profilers/__init__.pyi +2 -2
  130. metaflow-stubs/pylint_wrapper.pyi +2 -2
  131. metaflow-stubs/runner/__init__.pyi +2 -2
  132. metaflow-stubs/runner/metaflow_runner.pyi +4 -4
  133. metaflow-stubs/runner/nbrun.pyi +2 -2
  134. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  135. metaflow-stubs/system/__init__.pyi +112 -0
  136. metaflow-stubs/system/system_logger.pyi +51 -0
  137. metaflow-stubs/system/system_monitor.pyi +73 -0
  138. metaflow-stubs/tagging_util.pyi +2 -2
  139. metaflow-stubs/tuple_util.pyi +2 -2
  140. {ob_metaflow_stubs-4.1.dist-info → ob_metaflow_stubs-4.2.dist-info}/METADATA +1 -1
  141. ob_metaflow_stubs-4.2.dist-info/RECORD +144 -0
  142. metaflow-stubs/plugins/perimeters.pyi +0 -24
  143. ob_metaflow_stubs-4.1.dist-info/RECORD +0 -142
  144. {ob_metaflow_stubs-4.1.dist-info → ob_metaflow_stubs-4.2.dist-info}/WHEEL +0 -0
  145. {ob_metaflow_stubs-4.1.dist-info → ob_metaflow_stubs-4.2.dist-info}/top_level.txt +0 -0
@@ -1,25 +1,25 @@
1
1
  ##################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.12.6.1+ob(v1) #
4
- # Generated on 2024-07-03T18:20:45.077392 #
3
+ # MF version: 2.12.7.1+nim(0.0.1);ob(v1) #
4
+ # Generated on 2024-07-08T23:52:24.906750 #
5
5
  ##################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import metaflow.flowspec
12
- import metaflow.datastore.inputs
13
- import metaflow.events
14
11
  import metaflow._vendor.click.types
12
+ import metaflow.events
15
13
  import metaflow.metaflow_current
16
14
  import metaflow.parameters
17
- import typing
18
- import metaflow.runner.metaflow_runner
19
- import io
15
+ import metaflow.datastore.inputs
16
+ import datetime
17
+ import metaflow.flowspec
20
18
  import metaflow.client.core
21
19
  import metaflow.plugins.datatools.s3.s3
22
- import datetime
20
+ import metaflow.runner.metaflow_runner
21
+ import typing
22
+ import io
23
23
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
24
24
  StepFlag = typing.NewType("StepFlag", bool)
25
25
 
@@ -776,6 +776,63 @@ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
776
776
  """
777
777
  ...
778
778
 
779
+ @typing.overload
780
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
781
+ """
782
+ Specifies the Conda environment for the step.
783
+
784
+ Information in this decorator will augment any
785
+ attributes set in the `@conda_base` flow-level decorator. Hence,
786
+ you can use `@conda_base` to set packages required by all
787
+ steps and use `@conda` to specify step-specific overrides.
788
+
789
+ Parameters
790
+ ----------
791
+ packages : Dict[str, str], default {}
792
+ Packages to use for this step. The key is the name of the package
793
+ and the value is the version to use.
794
+ libraries : Dict[str, str], default {}
795
+ Supported for backward compatibility. When used with packages, packages will take precedence.
796
+ python : str, optional, default None
797
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
798
+ that the version used will correspond to the version of the Python interpreter used to start the run.
799
+ disabled : bool, default False
800
+ If set to True, disables @conda.
801
+ """
802
+ ...
803
+
804
+ @typing.overload
805
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
806
+ ...
807
+
808
+ @typing.overload
809
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
810
+ ...
811
+
812
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
813
+ """
814
+ Specifies the Conda environment for the step.
815
+
816
+ Information in this decorator will augment any
817
+ attributes set in the `@conda_base` flow-level decorator. Hence,
818
+ you can use `@conda_base` to set packages required by all
819
+ steps and use `@conda` to specify step-specific overrides.
820
+
821
+ Parameters
822
+ ----------
823
+ packages : Dict[str, str], default {}
824
+ Packages to use for this step. The key is the name of the package
825
+ and the value is the version to use.
826
+ libraries : Dict[str, str], default {}
827
+ Supported for backward compatibility. When used with packages, packages will take precedence.
828
+ python : str, optional, default None
829
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
830
+ that the version used will correspond to the version of the Python interpreter used to start the run.
831
+ disabled : bool, default False
832
+ If set to True, disables @conda.
833
+ """
834
+ ...
835
+
779
836
  def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = "KUBERNETES_IMAGE_PULL_POLICY", service_account: str = "METAFLOW_KUBERNETES_SERVICE_ACCOUNT", secrets: typing.Optional[typing.List[str]] = None, namespace: str = "METAFLOW_KUBERNETES_NAMESPACE", gpu: typing.Optional[int] = None, gpu_vendor: str = "KUBERNETES_GPU_VENDOR", tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = "/metaflow_temp", persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
780
837
  """
781
838
  Specifies that this step should execute on Kubernetes.
@@ -835,6 +892,59 @@ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: ty
835
892
  """
836
893
  ...
837
894
 
895
+ @typing.overload
896
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
897
+ """
898
+ Specifies the number of times the task corresponding
899
+ to a step needs to be retried.
900
+
901
+ This decorator is useful for handling transient errors, such as networking issues.
902
+ If your task contains operations that can't be retried safely, e.g. database updates,
903
+ it is advisable to annotate it with `@retry(times=0)`.
904
+
905
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
906
+ decorator will execute a no-op task after all retries have been exhausted,
907
+ ensuring that the flow execution can continue.
908
+
909
+ Parameters
910
+ ----------
911
+ times : int, default 3
912
+ Number of times to retry this task.
913
+ minutes_between_retries : int, default 2
914
+ Number of minutes between retries.
915
+ """
916
+ ...
917
+
918
+ @typing.overload
919
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
920
+ ...
921
+
922
+ @typing.overload
923
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
924
+ ...
925
+
926
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
927
+ """
928
+ Specifies the number of times the task corresponding
929
+ to a step needs to be retried.
930
+
931
+ This decorator is useful for handling transient errors, such as networking issues.
932
+ If your task contains operations that can't be retried safely, e.g. database updates,
933
+ it is advisable to annotate it with `@retry(times=0)`.
934
+
935
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
936
+ decorator will execute a no-op task after all retries have been exhausted,
937
+ ensuring that the flow execution can continue.
938
+
939
+ Parameters
940
+ ----------
941
+ times : int, default 3
942
+ Number of times to retry this task.
943
+ minutes_between_retries : int, default 2
944
+ Number of minutes between retries.
945
+ """
946
+ ...
947
+
838
948
  @typing.overload
839
949
  def card(*, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
840
950
  """
@@ -886,6 +996,116 @@ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
886
996
  """
887
997
  ...
888
998
 
999
+ @typing.overload
1000
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1001
+ """
1002
+ Specifies secrets to be retrieved and injected as environment variables prior to
1003
+ the execution of a step.
1004
+
1005
+ Parameters
1006
+ ----------
1007
+ sources : List[Union[str, Dict[str, Any]]], default: []
1008
+ List of secret specs, defining how the secrets are to be retrieved
1009
+ """
1010
+ ...
1011
+
1012
+ @typing.overload
1013
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1014
+ ...
1015
+
1016
+ @typing.overload
1017
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1018
+ ...
1019
+
1020
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
1021
+ """
1022
+ Specifies secrets to be retrieved and injected as environment variables prior to
1023
+ the execution of a step.
1024
+
1025
+ Parameters
1026
+ ----------
1027
+ sources : List[Union[str, Dict[str, Any]]], default: []
1028
+ List of secret specs, defining how the secrets are to be retrieved
1029
+ """
1030
+ ...
1031
+
1032
+ @typing.overload
1033
+ def resources(*, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1034
+ """
1035
+ Specifies the resources needed when executing this step.
1036
+
1037
+ Use `@resources` to specify the resource requirements
1038
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
1039
+
1040
+ You can choose the compute layer on the command line by executing e.g.
1041
+ ```
1042
+ python myflow.py run --with batch
1043
+ ```
1044
+ or
1045
+ ```
1046
+ python myflow.py run --with kubernetes
1047
+ ```
1048
+ which executes the flow on the desired system using the
1049
+ requirements specified in `@resources`.
1050
+
1051
+ Parameters
1052
+ ----------
1053
+ cpu : int, default 1
1054
+ Number of CPUs required for this step.
1055
+ gpu : int, default 0
1056
+ Number of GPUs required for this step.
1057
+ disk : int, optional, default None
1058
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
1059
+ memory : int, default 4096
1060
+ Memory size (in MB) required for this step.
1061
+ shared_memory : int, optional, default None
1062
+ The value for the size (in MiB) of the /dev/shm volume for this step.
1063
+ This parameter maps to the `--shm-size` option in Docker.
1064
+ """
1065
+ ...
1066
+
1067
+ @typing.overload
1068
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1069
+ ...
1070
+
1071
+ @typing.overload
1072
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1073
+ ...
1074
+
1075
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
1076
+ """
1077
+ Specifies the resources needed when executing this step.
1078
+
1079
+ Use `@resources` to specify the resource requirements
1080
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
1081
+
1082
+ You can choose the compute layer on the command line by executing e.g.
1083
+ ```
1084
+ python myflow.py run --with batch
1085
+ ```
1086
+ or
1087
+ ```
1088
+ python myflow.py run --with kubernetes
1089
+ ```
1090
+ which executes the flow on the desired system using the
1091
+ requirements specified in `@resources`.
1092
+
1093
+ Parameters
1094
+ ----------
1095
+ cpu : int, default 1
1096
+ Number of CPUs required for this step.
1097
+ gpu : int, default 0
1098
+ Number of GPUs required for this step.
1099
+ disk : int, optional, default None
1100
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
1101
+ memory : int, default 4096
1102
+ Memory size (in MB) required for this step.
1103
+ shared_memory : int, optional, default None
1104
+ The value for the size (in MiB) of the /dev/shm volume for this step.
1105
+ This parameter maps to the `--shm-size` option in Docker.
1106
+ """
1107
+ ...
1108
+
889
1109
  @typing.overload
890
1110
  def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
891
1111
  """
@@ -1091,302 +1311,333 @@ def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
1091
1311
  ...
1092
1312
 
1093
1313
  @typing.overload
1094
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1314
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1095
1315
  """
1096
- Specifies the number of times the task corresponding
1097
- to a step needs to be retried.
1098
-
1099
- This decorator is useful for handling transient errors, such as networking issues.
1100
- If your task contains operations that can't be retried safely, e.g. database updates,
1101
- it is advisable to annotate it with `@retry(times=0)`.
1102
-
1103
- This can be used in conjunction with the `@catch` decorator. The `@catch`
1104
- decorator will execute a no-op task after all retries have been exhausted,
1105
- ensuring that the flow execution can continue.
1316
+ Specifies environment variables to be set prior to the execution of a step.
1106
1317
 
1107
1318
  Parameters
1108
1319
  ----------
1109
- times : int, default 3
1110
- Number of times to retry this task.
1111
- minutes_between_retries : int, default 2
1112
- Number of minutes between retries.
1320
+ vars : Dict[str, str], default {}
1321
+ Dictionary of environment variables to set.
1113
1322
  """
1114
1323
  ...
1115
1324
 
1116
1325
  @typing.overload
1117
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1326
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1118
1327
  ...
1119
1328
 
1120
1329
  @typing.overload
1121
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1330
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1122
1331
  ...
1123
1332
 
1124
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
1333
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
1125
1334
  """
1126
- Specifies the number of times the task corresponding
1127
- to a step needs to be retried.
1128
-
1129
- This decorator is useful for handling transient errors, such as networking issues.
1130
- If your task contains operations that can't be retried safely, e.g. database updates,
1131
- it is advisable to annotate it with `@retry(times=0)`.
1132
-
1133
- This can be used in conjunction with the `@catch` decorator. The `@catch`
1134
- decorator will execute a no-op task after all retries have been exhausted,
1135
- ensuring that the flow execution can continue.
1335
+ Specifies environment variables to be set prior to the execution of a step.
1136
1336
 
1137
1337
  Parameters
1138
1338
  ----------
1139
- times : int, default 3
1140
- Number of times to retry this task.
1141
- minutes_between_retries : int, default 2
1142
- Number of minutes between retries.
1339
+ vars : Dict[str, str], default {}
1340
+ Dictionary of environment variables to set.
1143
1341
  """
1144
1342
  ...
1145
1343
 
1146
1344
  @typing.overload
1147
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1345
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1148
1346
  """
1149
- Specifies secrets to be retrieved and injected as environment variables prior to
1150
- the execution of a step.
1347
+ Specifies the PyPI packages for the step.
1348
+
1349
+ Information in this decorator will augment any
1350
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
1351
+ you can use `@pypi_base` to set packages required by all
1352
+ steps and use `@pypi` to specify step-specific overrides.
1151
1353
 
1152
1354
  Parameters
1153
1355
  ----------
1154
- sources : List[Union[str, Dict[str, Any]]], default: []
1155
- List of secret specs, defining how the secrets are to be retrieved
1356
+ packages : Dict[str, str], default: {}
1357
+ Packages to use for this step. The key is the name of the package
1358
+ and the value is the version to use.
1359
+ python : str, optional, default: None
1360
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1361
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1156
1362
  """
1157
1363
  ...
1158
1364
 
1159
1365
  @typing.overload
1160
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1366
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1161
1367
  ...
1162
1368
 
1163
1369
  @typing.overload
1164
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1370
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1165
1371
  ...
1166
1372
 
1167
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
1373
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1168
1374
  """
1169
- Specifies secrets to be retrieved and injected as environment variables prior to
1170
- the execution of a step.
1375
+ Specifies the PyPI packages for the step.
1376
+
1377
+ Information in this decorator will augment any
1378
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
1379
+ you can use `@pypi_base` to set packages required by all
1380
+ steps and use `@pypi` to specify step-specific overrides.
1171
1381
 
1172
1382
  Parameters
1173
1383
  ----------
1174
- sources : List[Union[str, Dict[str, Any]]], default: []
1175
- List of secret specs, defining how the secrets are to be retrieved
1384
+ packages : Dict[str, str], default: {}
1385
+ Packages to use for this step. The key is the name of the package
1386
+ and the value is the version to use.
1387
+ python : str, optional, default: None
1388
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1389
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1176
1390
  """
1177
1391
  ...
1178
1392
 
1179
1393
  @typing.overload
1180
- def resources(*, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1394
+ def trigger_on_finish(*, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1181
1395
  """
1182
- Specifies the resources needed when executing this step.
1396
+ Specifies the flow(s) that this flow depends on.
1183
1397
 
1184
- Use `@resources` to specify the resource requirements
1185
- independently of the specific compute layer (`@batch`, `@kubernetes`).
1398
+ ```
1399
+ @trigger_on_finish(flow='FooFlow')
1400
+ ```
1401
+ or
1402
+ ```
1403
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1404
+ ```
1405
+ This decorator respects the @project decorator and triggers the flow
1406
+ when upstream runs within the same namespace complete successfully
1186
1407
 
1187
- You can choose the compute layer on the command line by executing e.g.
1408
+ Additionally, you can specify project aware upstream flow dependencies
1409
+ by specifying the fully qualified project_flow_name.
1188
1410
  ```
1189
- python myflow.py run --with batch
1411
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1190
1412
  ```
1191
1413
  or
1192
1414
  ```
1193
- python myflow.py run --with kubernetes
1415
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1416
+ ```
1417
+
1418
+ You can also specify just the project or project branch (other values will be
1419
+ inferred from the current project or project branch):
1420
+ ```
1421
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1194
1422
  ```
1195
- which executes the flow on the desired system using the
1196
- requirements specified in `@resources`.
1423
+
1424
+ Note that `branch` is typically one of:
1425
+ - `prod`
1426
+ - `user.bob`
1427
+ - `test.my_experiment`
1428
+ - `prod.staging`
1197
1429
 
1198
1430
  Parameters
1199
1431
  ----------
1200
- cpu : int, default 1
1201
- Number of CPUs required for this step.
1202
- gpu : int, default 0
1203
- Number of GPUs required for this step.
1204
- disk : int, optional, default None
1205
- Disk size (in MB) required for this step. Only applies on Kubernetes.
1206
- memory : int, default 4096
1207
- Memory size (in MB) required for this step.
1208
- shared_memory : int, optional, default None
1209
- The value for the size (in MiB) of the /dev/shm volume for this step.
1210
- This parameter maps to the `--shm-size` option in Docker.
1432
+ flow : Union[str, Dict[str, str]], optional, default None
1433
+ Upstream flow dependency for this flow.
1434
+ flows : List[Union[str, Dict[str, str]]], default []
1435
+ Upstream flow dependencies for this flow.
1436
+ options : Dict[str, Any], default {}
1437
+ Backend-specific configuration for tuning eventing behavior.
1438
+
1439
+
1211
1440
  """
1212
1441
  ...
1213
1442
 
1214
1443
  @typing.overload
1215
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1216
- ...
1217
-
1218
- @typing.overload
1219
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1444
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1220
1445
  ...
1221
1446
 
1222
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
1447
+ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1223
1448
  """
1224
- Specifies the resources needed when executing this step.
1449
+ Specifies the flow(s) that this flow depends on.
1225
1450
 
1226
- Use `@resources` to specify the resource requirements
1227
- independently of the specific compute layer (`@batch`, `@kubernetes`).
1451
+ ```
1452
+ @trigger_on_finish(flow='FooFlow')
1453
+ ```
1454
+ or
1455
+ ```
1456
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1457
+ ```
1458
+ This decorator respects the @project decorator and triggers the flow
1459
+ when upstream runs within the same namespace complete successfully
1228
1460
 
1229
- You can choose the compute layer on the command line by executing e.g.
1461
+ Additionally, you can specify project aware upstream flow dependencies
1462
+ by specifying the fully qualified project_flow_name.
1230
1463
  ```
1231
- python myflow.py run --with batch
1464
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1232
1465
  ```
1233
1466
  or
1234
1467
  ```
1235
- python myflow.py run --with kubernetes
1468
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1469
+ ```
1470
+
1471
+ You can also specify just the project or project branch (other values will be
1472
+ inferred from the current project or project branch):
1473
+ ```
1474
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1236
1475
  ```
1237
- which executes the flow on the desired system using the
1238
- requirements specified in `@resources`.
1476
+
1477
+ Note that `branch` is typically one of:
1478
+ - `prod`
1479
+ - `user.bob`
1480
+ - `test.my_experiment`
1481
+ - `prod.staging`
1239
1482
 
1240
1483
  Parameters
1241
1484
  ----------
1242
- cpu : int, default 1
1243
- Number of CPUs required for this step.
1244
- gpu : int, default 0
1245
- Number of GPUs required for this step.
1246
- disk : int, optional, default None
1247
- Disk size (in MB) required for this step. Only applies on Kubernetes.
1248
- memory : int, default 4096
1249
- Memory size (in MB) required for this step.
1250
- shared_memory : int, optional, default None
1251
- The value for the size (in MiB) of the /dev/shm volume for this step.
1252
- This parameter maps to the `--shm-size` option in Docker.
1485
+ flow : Union[str, Dict[str, str]], optional, default None
1486
+ Upstream flow dependency for this flow.
1487
+ flows : List[Union[str, Dict[str, str]]], default []
1488
+ Upstream flow dependencies for this flow.
1489
+ options : Dict[str, Any], default {}
1490
+ Backend-specific configuration for tuning eventing behavior.
1491
+
1492
+
1253
1493
  """
1254
1494
  ...
1255
1495
 
1256
1496
  @typing.overload
1257
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1497
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1258
1498
  """
1259
- Specifies the Conda environment for the step.
1260
-
1261
- Information in this decorator will augment any
1262
- attributes set in the `@conda_base` flow-level decorator. Hence,
1263
- you can use `@conda_base` to set packages required by all
1264
- steps and use `@conda` to specify step-specific overrides.
1499
+ Specifies the PyPI packages for all steps of the flow.
1265
1500
 
1501
+ Use `@pypi_base` to set common packages required by all
1502
+ steps and use `@pypi` to specify step-specific overrides.
1266
1503
  Parameters
1267
1504
  ----------
1268
- packages : Dict[str, str], default {}
1269
- Packages to use for this step. The key is the name of the package
1505
+ packages : Dict[str, str], default: {}
1506
+ Packages to use for this flow. The key is the name of the package
1270
1507
  and the value is the version to use.
1271
- libraries : Dict[str, str], default {}
1272
- Supported for backward compatibility. When used with packages, packages will take precedence.
1273
- python : str, optional, default None
1508
+ python : str, optional, default: None
1274
1509
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1275
1510
  that the version used will correspond to the version of the Python interpreter used to start the run.
1276
- disabled : bool, default False
1277
- If set to True, disables @conda.
1278
1511
  """
1279
1512
  ...
1280
1513
 
1281
1514
  @typing.overload
1282
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1283
- ...
1284
-
1285
- @typing.overload
1286
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1515
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1287
1516
  ...
1288
1517
 
1289
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1518
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1290
1519
  """
1291
- Specifies the Conda environment for the step.
1292
-
1293
- Information in this decorator will augment any
1294
- attributes set in the `@conda_base` flow-level decorator. Hence,
1295
- you can use `@conda_base` to set packages required by all
1296
- steps and use `@conda` to specify step-specific overrides.
1520
+ Specifies the PyPI packages for all steps of the flow.
1297
1521
 
1522
+ Use `@pypi_base` to set common packages required by all
1523
+ steps and use `@pypi` to specify step-specific overrides.
1298
1524
  Parameters
1299
1525
  ----------
1300
- packages : Dict[str, str], default {}
1301
- Packages to use for this step. The key is the name of the package
1526
+ packages : Dict[str, str], default: {}
1527
+ Packages to use for this flow. The key is the name of the package
1302
1528
  and the value is the version to use.
1303
- libraries : Dict[str, str], default {}
1304
- Supported for backward compatibility. When used with packages, packages will take precedence.
1305
- python : str, optional, default None
1529
+ python : str, optional, default: None
1306
1530
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1307
1531
  that the version used will correspond to the version of the Python interpreter used to start the run.
1308
- disabled : bool, default False
1309
- If set to True, disables @conda.
1310
1532
  """
1311
1533
  ...
1312
1534
 
1313
- @typing.overload
1314
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1535
+ def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1315
1536
  """
1316
- Specifies environment variables to be set prior to the execution of a step.
1537
+ Specifies what flows belong to the same project.
1538
+
1539
+ A project-specific namespace is created for all flows that
1540
+ use the same `@project(name)`.
1317
1541
 
1318
1542
  Parameters
1319
1543
  ----------
1320
- vars : Dict[str, str], default {}
1321
- Dictionary of environment variables to set.
1544
+ name : str
1545
+ Project name. Make sure that the name is unique amongst all
1546
+ projects that use the same production scheduler. The name may
1547
+ contain only lowercase alphanumeric characters and underscores.
1548
+
1549
+
1322
1550
  """
1323
1551
  ...
1324
1552
 
1325
- @typing.overload
1326
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1327
- ...
1328
-
1329
- @typing.overload
1330
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1331
- ...
1332
-
1333
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
1553
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1334
1554
  """
1335
- Specifies environment variables to be set prior to the execution of a step.
1555
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1556
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1557
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1558
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
1559
+ starts only after all sensors finish.
1336
1560
 
1337
1561
  Parameters
1338
1562
  ----------
1339
- vars : Dict[str, str], default {}
1340
- Dictionary of environment variables to set.
1563
+ timeout : int
1564
+ Time, in seconds before the task times out and fails. (Default: 3600)
1565
+ poke_interval : int
1566
+ Time in seconds that the job should wait in between each try. (Default: 60)
1567
+ mode : str
1568
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1569
+ exponential_backoff : bool
1570
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1571
+ pool : str
1572
+ the slot pool this task should run in,
1573
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1574
+ soft_fail : bool
1575
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1576
+ name : str
1577
+ Name of the sensor on Airflow
1578
+ description : str
1579
+ Description of sensor in the Airflow UI
1580
+ bucket_key : Union[str, List[str]]
1581
+ The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1582
+ When it's specified as a full s3:// url, please leave `bucket_name` as None
1583
+ bucket_name : str
1584
+ Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1585
+ When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1586
+ wildcard_match : bool
1587
+ whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1588
+ aws_conn_id : str
1589
+ a reference to the s3 connection on Airflow. (Default: None)
1590
+ verify : bool
1591
+ Whether or not to verify SSL certificates for S3 connection. (Default: None)
1341
1592
  """
1342
1593
  ...
1343
1594
 
1344
1595
  @typing.overload
1345
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1596
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1346
1597
  """
1347
- Specifies the PyPI packages for the step.
1348
-
1349
- Information in this decorator will augment any
1350
- attributes set in the `@pyi_base` flow-level decorator. Hence,
1351
- you can use `@pypi_base` to set packages required by all
1352
- steps and use `@pypi` to specify step-specific overrides.
1598
+ Specifies the times when the flow should be run when running on a
1599
+ production scheduler.
1353
1600
 
1354
1601
  Parameters
1355
1602
  ----------
1356
- packages : Dict[str, str], default: {}
1357
- Packages to use for this step. The key is the name of the package
1358
- and the value is the version to use.
1359
- python : str, optional, default: None
1360
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1361
- that the version used will correspond to the version of the Python interpreter used to start the run.
1603
+ hourly : bool, default False
1604
+ Run the workflow hourly.
1605
+ daily : bool, default True
1606
+ Run the workflow daily.
1607
+ weekly : bool, default False
1608
+ Run the workflow weekly.
1609
+ cron : str, optional, default None
1610
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1611
+ specified by this expression.
1612
+ timezone : str, optional, default None
1613
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1614
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1362
1615
  """
1363
1616
  ...
1364
1617
 
1365
1618
  @typing.overload
1366
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1367
- ...
1368
-
1369
- @typing.overload
1370
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1619
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1371
1620
  ...
1372
1621
 
1373
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1622
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1374
1623
  """
1375
- Specifies the PyPI packages for the step.
1376
-
1377
- Information in this decorator will augment any
1378
- attributes set in the `@pyi_base` flow-level decorator. Hence,
1379
- you can use `@pypi_base` to set packages required by all
1380
- steps and use `@pypi` to specify step-specific overrides.
1624
+ Specifies the times when the flow should be run when running on a
1625
+ production scheduler.
1381
1626
 
1382
1627
  Parameters
1383
1628
  ----------
1384
- packages : Dict[str, str], default: {}
1385
- Packages to use for this step. The key is the name of the package
1386
- and the value is the version to use.
1387
- python : str, optional, default: None
1388
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1389
- that the version used will correspond to the version of the Python interpreter used to start the run.
1629
+ hourly : bool, default False
1630
+ Run the workflow hourly.
1631
+ daily : bool, default True
1632
+ Run the workflow daily.
1633
+ weekly : bool, default False
1634
+ Run the workflow weekly.
1635
+ cron : str, optional, default None
1636
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1637
+ specified by this expression.
1638
+ timezone : str, optional, default None
1639
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1640
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1390
1641
  """
1391
1642
  ...
1392
1643
 
@@ -1432,45 +1683,6 @@ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str,
1432
1683
  """
1433
1684
  ...
1434
1685
 
1435
- @typing.overload
1436
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1437
- """
1438
- Specifies the PyPI packages for all steps of the flow.
1439
-
1440
- Use `@pypi_base` to set common packages required by all
1441
- steps and use `@pypi` to specify step-specific overrides.
1442
- Parameters
1443
- ----------
1444
- packages : Dict[str, str], default: {}
1445
- Packages to use for this flow. The key is the name of the package
1446
- and the value is the version to use.
1447
- python : str, optional, default: None
1448
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1449
- that the version used will correspond to the version of the Python interpreter used to start the run.
1450
- """
1451
- ...
1452
-
1453
- @typing.overload
1454
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1455
- ...
1456
-
1457
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1458
- """
1459
- Specifies the PyPI packages for all steps of the flow.
1460
-
1461
- Use `@pypi_base` to set common packages required by all
1462
- steps and use `@pypi` to specify step-specific overrides.
1463
- Parameters
1464
- ----------
1465
- packages : Dict[str, str], default: {}
1466
- Packages to use for this flow. The key is the name of the package
1467
- and the value is the version to use.
1468
- python : str, optional, default: None
1469
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1470
- that the version used will correspond to the version of the Python interpreter used to start the run.
1471
- """
1472
- ...
1473
-
1474
1686
  @typing.overload
1475
1687
  def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1476
1688
  """
@@ -1520,158 +1732,6 @@ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packa
1520
1732
  """
1521
1733
  ...
1522
1734
 
1523
- @typing.overload
1524
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1525
- """
1526
- Specifies the times when the flow should be run when running on a
1527
- production scheduler.
1528
-
1529
- Parameters
1530
- ----------
1531
- hourly : bool, default False
1532
- Run the workflow hourly.
1533
- daily : bool, default True
1534
- Run the workflow daily.
1535
- weekly : bool, default False
1536
- Run the workflow weekly.
1537
- cron : str, optional, default None
1538
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1539
- specified by this expression.
1540
- timezone : str, optional, default None
1541
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1542
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1543
- """
1544
- ...
1545
-
1546
- @typing.overload
1547
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1548
- ...
1549
-
1550
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1551
- """
1552
- Specifies the times when the flow should be run when running on a
1553
- production scheduler.
1554
-
1555
- Parameters
1556
- ----------
1557
- hourly : bool, default False
1558
- Run the workflow hourly.
1559
- daily : bool, default True
1560
- Run the workflow daily.
1561
- weekly : bool, default False
1562
- Run the workflow weekly.
1563
- cron : str, optional, default None
1564
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1565
- specified by this expression.
1566
- timezone : str, optional, default None
1567
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1568
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1569
- """
1570
- ...
1571
-
1572
- @typing.overload
1573
- def trigger_on_finish(*, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1574
- """
1575
- Specifies the flow(s) that this flow depends on.
1576
-
1577
- ```
1578
- @trigger_on_finish(flow='FooFlow')
1579
- ```
1580
- or
1581
- ```
1582
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1583
- ```
1584
- This decorator respects the @project decorator and triggers the flow
1585
- when upstream runs within the same namespace complete successfully
1586
-
1587
- Additionally, you can specify project aware upstream flow dependencies
1588
- by specifying the fully qualified project_flow_name.
1589
- ```
1590
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1591
- ```
1592
- or
1593
- ```
1594
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1595
- ```
1596
-
1597
- You can also specify just the project or project branch (other values will be
1598
- inferred from the current project or project branch):
1599
- ```
1600
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1601
- ```
1602
-
1603
- Note that `branch` is typically one of:
1604
- - `prod`
1605
- - `user.bob`
1606
- - `test.my_experiment`
1607
- - `prod.staging`
1608
-
1609
- Parameters
1610
- ----------
1611
- flow : Union[str, Dict[str, str]], optional, default None
1612
- Upstream flow dependency for this flow.
1613
- flows : List[Union[str, Dict[str, str]]], default []
1614
- Upstream flow dependencies for this flow.
1615
- options : Dict[str, Any], default {}
1616
- Backend-specific configuration for tuning eventing behavior.
1617
-
1618
-
1619
- """
1620
- ...
1621
-
1622
- @typing.overload
1623
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1624
- ...
1625
-
1626
- def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1627
- """
1628
- Specifies the flow(s) that this flow depends on.
1629
-
1630
- ```
1631
- @trigger_on_finish(flow='FooFlow')
1632
- ```
1633
- or
1634
- ```
1635
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1636
- ```
1637
- This decorator respects the @project decorator and triggers the flow
1638
- when upstream runs within the same namespace complete successfully
1639
-
1640
- Additionally, you can specify project aware upstream flow dependencies
1641
- by specifying the fully qualified project_flow_name.
1642
- ```
1643
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1644
- ```
1645
- or
1646
- ```
1647
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1648
- ```
1649
-
1650
- You can also specify just the project or project branch (other values will be
1651
- inferred from the current project or project branch):
1652
- ```
1653
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1654
- ```
1655
-
1656
- Note that `branch` is typically one of:
1657
- - `prod`
1658
- - `user.bob`
1659
- - `test.my_experiment`
1660
- - `prod.staging`
1661
-
1662
- Parameters
1663
- ----------
1664
- flow : Union[str, Dict[str, str]], optional, default None
1665
- Upstream flow dependency for this flow.
1666
- flows : List[Union[str, Dict[str, str]]], default []
1667
- Upstream flow dependencies for this flow.
1668
- options : Dict[str, Any], default {}
1669
- Backend-specific configuration for tuning eventing behavior.
1670
-
1671
-
1672
- """
1673
- ...
1674
-
1675
1735
  @typing.overload
1676
1736
  def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1677
1737
  """
@@ -1767,66 +1827,6 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
1767
1827
  """
1768
1828
  ...
1769
1829
 
1770
- def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1771
- """
1772
- Specifies what flows belong to the same project.
1773
-
1774
- A project-specific namespace is created for all flows that
1775
- use the same `@project(name)`.
1776
-
1777
- Parameters
1778
- ----------
1779
- name : str
1780
- Project name. Make sure that the name is unique amongst all
1781
- projects that use the same production scheduler. The name may
1782
- contain only lowercase alphanumeric characters and underscores.
1783
-
1784
-
1785
- """
1786
- ...
1787
-
1788
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1789
- """
1790
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1791
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1792
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1793
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
1794
- starts only after all sensors finish.
1795
-
1796
- Parameters
1797
- ----------
1798
- timeout : int
1799
- Time, in seconds before the task times out and fails. (Default: 3600)
1800
- poke_interval : int
1801
- Time in seconds that the job should wait in between each try. (Default: 60)
1802
- mode : str
1803
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1804
- exponential_backoff : bool
1805
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1806
- pool : str
1807
- the slot pool this task should run in,
1808
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1809
- soft_fail : bool
1810
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1811
- name : str
1812
- Name of the sensor on Airflow
1813
- description : str
1814
- Description of sensor in the Airflow UI
1815
- bucket_key : Union[str, List[str]]
1816
- The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1817
- When it's specified as a full s3:// url, please leave `bucket_name` as None
1818
- bucket_name : str
1819
- Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1820
- When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1821
- wildcard_match : bool
1822
- whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1823
- aws_conn_id : str
1824
- a reference to the s3 connection on Airflow. (Default: None)
1825
- verify : bool
1826
- Whether or not to verify SSL certificates for S3 connection. (Default: None)
1827
- """
1828
- ...
1829
-
1830
1830
  def namespace(ns: typing.Optional[str]) -> typing.Optional[str]:
1831
1831
  """
1832
1832
  Switch namespace to the one provided.