metaflow-stubs 2.15.21__py2.py3-none-any.whl → 2.16.1__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of metaflow-stubs might be problematic. Click here for more details.

Files changed (167) hide show
  1. metaflow-stubs/__init__.pyi +626 -620
  2. metaflow-stubs/cards.pyi +2 -2
  3. metaflow-stubs/cli.pyi +3 -2
  4. metaflow-stubs/cli_components/__init__.pyi +2 -2
  5. metaflow-stubs/cli_components/utils.pyi +2 -2
  6. metaflow-stubs/client/__init__.pyi +2 -2
  7. metaflow-stubs/client/core.pyi +6 -7
  8. metaflow-stubs/client/filecache.pyi +3 -3
  9. metaflow-stubs/events.pyi +2 -2
  10. metaflow-stubs/exception.pyi +2 -2
  11. metaflow-stubs/flowspec.pyi +8 -8
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +2 -2
  14. metaflow-stubs/{info_file.pyi → meta_files.pyi} +2 -6
  15. metaflow-stubs/metadata_provider/__init__.pyi +2 -2
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +2 -2
  17. metaflow-stubs/metadata_provider/metadata.pyi +2 -2
  18. metaflow-stubs/metadata_provider/util.pyi +2 -2
  19. metaflow-stubs/metaflow_config.pyi +6 -2
  20. metaflow-stubs/metaflow_current.pyi +17 -17
  21. metaflow-stubs/metaflow_git.pyi +2 -2
  22. metaflow-stubs/multicore_utils.pyi +2 -2
  23. metaflow-stubs/packaging_sys/__init__.pyi +430 -0
  24. metaflow-stubs/packaging_sys/backend.pyi +73 -0
  25. metaflow-stubs/packaging_sys/distribution_support.pyi +72 -0
  26. metaflow-stubs/packaging_sys/tar_backend.pyi +53 -0
  27. metaflow-stubs/packaging_sys/utils.pyi +26 -0
  28. metaflow-stubs/packaging_sys/v1.pyi +145 -0
  29. metaflow-stubs/parameters.pyi +2 -2
  30. metaflow-stubs/plugins/__init__.pyi +12 -12
  31. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  32. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
  33. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  34. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  35. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  36. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
  37. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
  38. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
  39. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  40. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  41. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  42. metaflow-stubs/plugins/argo/argo_workflows.pyi +3 -3
  43. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +2 -2
  44. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +3 -3
  45. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +2 -2
  46. metaflow-stubs/plugins/argo/exit_hooks.pyi +2 -2
  47. metaflow-stubs/plugins/aws/__init__.pyi +3 -3
  48. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  49. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  50. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  51. metaflow-stubs/plugins/aws/batch/batch.pyi +4 -4
  52. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  53. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +4 -2
  54. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  55. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +3 -3
  56. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  57. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
  58. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  59. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  60. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +3 -3
  61. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  62. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
  63. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +3 -3
  64. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +2 -2
  65. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  66. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  67. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  68. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +3 -3
  69. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  70. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  71. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  72. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  73. metaflow-stubs/plugins/cards/card_client.pyi +3 -3
  74. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  75. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  76. metaflow-stubs/plugins/cards/card_decorator.pyi +2 -4
  77. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  78. metaflow-stubs/plugins/cards/card_modules/basic.pyi +2 -2
  79. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  80. metaflow-stubs/plugins/cards/card_modules/components.pyi +4 -4
  81. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  82. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  83. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  84. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  85. metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
  86. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  87. metaflow-stubs/plugins/catch_decorator.pyi +2 -2
  88. metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
  89. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  90. metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
  91. metaflow-stubs/plugins/datatools/s3/s3.pyi +3 -3
  92. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  93. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  94. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  95. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  96. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  97. metaflow-stubs/plugins/events_decorator.pyi +2 -2
  98. metaflow-stubs/plugins/exit_hook/__init__.pyi +2 -2
  99. metaflow-stubs/plugins/exit_hook/exit_hook_decorator.pyi +2 -2
  100. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  101. metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
  102. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  103. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +3 -3
  104. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  105. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  106. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  107. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  108. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  109. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +2 -2
  110. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +4 -4
  111. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  112. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +4 -2
  113. metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +2 -2
  114. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
  115. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +2 -2
  116. metaflow-stubs/plugins/parallel_decorator.pyi +3 -3
  117. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  118. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  119. metaflow-stubs/plugins/pypi/conda_decorator.pyi +5 -8
  120. metaflow-stubs/plugins/pypi/conda_environment.pyi +5 -4
  121. metaflow-stubs/plugins/pypi/parsers.pyi +2 -2
  122. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +4 -4
  123. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  124. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  125. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  126. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  127. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  128. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +2 -2
  129. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  130. metaflow-stubs/plugins/secrets/secrets_func.pyi +2 -2
  131. metaflow-stubs/plugins/secrets/secrets_spec.pyi +2 -2
  132. metaflow-stubs/plugins/secrets/utils.pyi +2 -2
  133. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  134. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +3 -3
  135. metaflow-stubs/plugins/timeout_decorator.pyi +2 -2
  136. metaflow-stubs/plugins/uv/__init__.pyi +2 -2
  137. metaflow-stubs/plugins/uv/uv_environment.pyi +3 -2
  138. metaflow-stubs/pylint_wrapper.pyi +2 -2
  139. metaflow-stubs/runner/__init__.pyi +2 -2
  140. metaflow-stubs/runner/deployer.pyi +29 -29
  141. metaflow-stubs/runner/deployer_impl.pyi +3 -3
  142. metaflow-stubs/runner/metaflow_runner.pyi +3 -3
  143. metaflow-stubs/runner/nbdeploy.pyi +2 -2
  144. metaflow-stubs/runner/nbrun.pyi +2 -2
  145. metaflow-stubs/runner/subprocess_manager.pyi +3 -2
  146. metaflow-stubs/runner/utils.pyi +4 -4
  147. metaflow-stubs/system/__init__.pyi +2 -2
  148. metaflow-stubs/system/system_logger.pyi +2 -2
  149. metaflow-stubs/system/system_monitor.pyi +2 -2
  150. metaflow-stubs/tagging_util.pyi +2 -2
  151. metaflow-stubs/tuple_util.pyi +2 -2
  152. metaflow-stubs/user_configs/__init__.pyi +2 -3
  153. metaflow-stubs/user_configs/config_options.pyi +3 -4
  154. metaflow-stubs/user_configs/config_parameters.pyi +5 -7
  155. metaflow-stubs/user_decorators/__init__.pyi +15 -0
  156. metaflow-stubs/user_decorators/common.pyi +38 -0
  157. metaflow-stubs/user_decorators/mutable_flow.pyi +223 -0
  158. metaflow-stubs/user_decorators/mutable_step.pyi +152 -0
  159. metaflow-stubs/user_decorators/user_flow_decorator.pyi +137 -0
  160. metaflow-stubs/user_decorators/user_step_decorator.pyi +323 -0
  161. metaflow-stubs/version.pyi +2 -2
  162. {metaflow_stubs-2.15.21.dist-info → metaflow_stubs-2.16.1.dist-info}/METADATA +2 -2
  163. metaflow_stubs-2.16.1.dist-info/RECORD +166 -0
  164. metaflow-stubs/user_configs/config_decorators.pyi +0 -251
  165. metaflow_stubs-2.15.21.dist-info/RECORD +0 -155
  166. {metaflow_stubs-2.15.21.dist-info → metaflow_stubs-2.16.1.dist-info}/WHEEL +0 -0
  167. {metaflow_stubs-2.15.21.dist-info → metaflow_stubs-2.16.1.dist-info}/top_level.txt +0 -0
@@ -1,7 +1,7 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.15.21 #
4
- # Generated on 2025-07-11T15:58:13.105014 #
3
+ # MF version: 2.16.1 #
4
+ # Generated on 2025-07-15T19:29:43.053659 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
@@ -13,7 +13,8 @@ if typing.TYPE_CHECKING:
13
13
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
14
14
  StepFlag = typing.NewType("StepFlag", bool)
15
15
 
16
- from . import info_file as info_file
16
+ from . import meta_files as meta_files
17
+ from . import packaging_sys as packaging_sys
17
18
  from . import exception as exception
18
19
  from . import metaflow_config as metaflow_config
19
20
  from . import multicore_utils as multicore_utils
@@ -23,6 +24,7 @@ from . import metaflow_current as metaflow_current
23
24
  from .metaflow_current import current as current
24
25
  from . import parameters as parameters
25
26
  from . import user_configs as user_configs
27
+ from . import user_decorators as user_decorators
26
28
  from . import tagging_util as tagging_util
27
29
  from . import metadata_provider as metadata_provider
28
30
  from . import flowspec as flowspec
@@ -33,19 +35,21 @@ from .parameters import JSONType as JSONType
33
35
  from .user_configs.config_parameters import Config as Config
34
36
  from .user_configs.config_parameters import ConfigValue as ConfigValue
35
37
  from .user_configs.config_parameters import config_expr as config_expr
36
- from .user_configs.config_decorators import CustomFlowDecorator as CustomFlowDecorator
37
- from .user_configs.config_decorators import CustomStepDecorator as CustomStepDecorator
38
+ from .user_decorators.user_step_decorator import UserStepDecorator as UserStepDecorator
39
+ from .user_decorators.user_step_decorator import StepMutator as StepMutator
40
+ from .user_decorators.user_step_decorator import user_step_decorator as user_step_decorator
41
+ from .user_decorators.user_flow_decorator import FlowMutator as FlowMutator
38
42
  from . import metaflow_git as metaflow_git
39
- from . import tuple_util as tuple_util
40
43
  from . import events as events
44
+ from . import tuple_util as tuple_util
41
45
  from . import runner as runner
42
46
  from . import plugins as plugins
43
47
  from .plugins.datatools.s3.s3 import S3 as S3
44
48
  from . import includefile as includefile
45
49
  from .includefile import IncludeFile as IncludeFile
46
50
  from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
47
- from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
48
51
  from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
52
+ from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
49
53
  from . import cards as cards
50
54
  from . import client as client
51
55
  from .client.core import namespace as namespace
@@ -73,6 +77,8 @@ from . import cli as cli
73
77
 
74
78
  EXT_PKG: str
75
79
 
80
+ USER_SKIP_STEP: dict
81
+
76
82
  @typing.overload
77
83
  def step(f: typing.Callable[[FlowSpecDerived], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
78
84
  """
@@ -146,95 +152,6 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
146
152
  """
147
153
  ...
148
154
 
149
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
150
- """
151
- Specifies that this step should execute on Kubernetes.
152
-
153
-
154
- Parameters
155
- ----------
156
- cpu : int, default 1
157
- Number of CPUs required for this step. If `@resources` is
158
- also present, the maximum value from all decorators is used.
159
- memory : int, default 4096
160
- Memory size (in MB) required for this step. If
161
- `@resources` is also present, the maximum value from all decorators is
162
- used.
163
- disk : int, default 10240
164
- Disk size (in MB) required for this step. If
165
- `@resources` is also present, the maximum value from all decorators is
166
- used.
167
- image : str, optional, default None
168
- Docker image to use when launching on Kubernetes. If not specified, and
169
- METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
170
- not, a default Docker image mapping to the current version of Python is used.
171
- image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
172
- If given, the imagePullPolicy to be applied to the Docker image of the step.
173
- image_pull_secrets: List[str], default []
174
- The default is extracted from METAFLOW_KUBERNETES_IMAGE_PULL_SECRETS.
175
- Kubernetes image pull secrets to use when pulling container images
176
- in Kubernetes.
177
- service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
178
- Kubernetes service account to use when launching pod in Kubernetes.
179
- secrets : List[str], optional, default None
180
- Kubernetes secrets to use when launching pod in Kubernetes. These
181
- secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
182
- in Metaflow configuration.
183
- node_selector: Union[Dict[str,str], str], optional, default None
184
- Kubernetes node selector(s) to apply to the pod running the task.
185
- Can be passed in as a comma separated string of values e.g.
186
- 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
187
- {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
188
- namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
189
- Kubernetes namespace to use when launching pod in Kubernetes.
190
- gpu : int, optional, default None
191
- Number of GPUs required for this step. A value of zero implies that
192
- the scheduled node should not have GPUs.
193
- gpu_vendor : str, default KUBERNETES_GPU_VENDOR
194
- The vendor of the GPUs to be used for this step.
195
- tolerations : List[str], default []
196
- The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
197
- Kubernetes tolerations to use when launching pod in Kubernetes.
198
- labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
199
- Kubernetes labels to use when launching pod in Kubernetes.
200
- annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
201
- Kubernetes annotations to use when launching pod in Kubernetes.
202
- use_tmpfs : bool, default False
203
- This enables an explicit tmpfs mount for this step.
204
- tmpfs_tempdir : bool, default True
205
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
206
- tmpfs_size : int, optional, default: None
207
- The value for the size (in MiB) of the tmpfs mount for this step.
208
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
209
- memory allocated for this step.
210
- tmpfs_path : str, optional, default /metaflow_temp
211
- Path to tmpfs mount for this step.
212
- persistent_volume_claims : Dict[str, str], optional, default None
213
- A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
214
- volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
215
- shared_memory: int, optional
216
- Shared memory size (in MiB) required for this step
217
- port: int, optional
218
- Port number to specify in the Kubernetes job object
219
- compute_pool : str, optional, default None
220
- Compute pool to be used for for this step.
221
- If not specified, any accessible compute pool within the perimeter is used.
222
- hostname_resolution_timeout: int, default 10 * 60
223
- Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
224
- Only applicable when @parallel is used.
225
- qos: str, default: Burstable
226
- Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
227
-
228
- security_context: Dict[str, Any], optional, default None
229
- Container security context. Applies to the task container. Allows the following keys:
230
- - privileged: bool, optional, default None
231
- - allow_privilege_escalation: bool, optional, default None
232
- - run_as_user: int, optional, default None
233
- - run_as_group: int, optional, default None
234
- - run_as_non_root: bool, optional, default None
235
- """
236
- ...
237
-
238
155
  @typing.overload
239
156
  def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
240
157
  """
@@ -295,35 +212,61 @@ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
295
212
  ...
296
213
 
297
214
  @typing.overload
298
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
215
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
299
216
  """
300
- Specifies environment variables to be set prior to the execution of a step.
217
+ Specifies a timeout for your step.
218
+
219
+ This decorator is useful if this step may hang indefinitely.
220
+
221
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
222
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
223
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
224
+
225
+ Note that all the values specified in parameters are added together so if you specify
226
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
301
227
 
302
228
 
303
229
  Parameters
304
230
  ----------
305
- vars : Dict[str, str], default {}
306
- Dictionary of environment variables to set.
231
+ seconds : int, default 0
232
+ Number of seconds to wait prior to timing out.
233
+ minutes : int, default 0
234
+ Number of minutes to wait prior to timing out.
235
+ hours : int, default 0
236
+ Number of hours to wait prior to timing out.
307
237
  """
308
238
  ...
309
239
 
310
240
  @typing.overload
311
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
241
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
312
242
  ...
313
243
 
314
244
  @typing.overload
315
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
245
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
316
246
  ...
317
247
 
318
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
248
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
319
249
  """
320
- Specifies environment variables to be set prior to the execution of a step.
250
+ Specifies a timeout for your step.
251
+
252
+ This decorator is useful if this step may hang indefinitely.
253
+
254
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
255
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
256
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
257
+
258
+ Note that all the values specified in parameters are added together so if you specify
259
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
321
260
 
322
261
 
323
262
  Parameters
324
263
  ----------
325
- vars : Dict[str, str], default {}
326
- Dictionary of environment variables to set.
264
+ seconds : int, default 0
265
+ Number of seconds to wait prior to timing out.
266
+ minutes : int, default 0
267
+ Number of minutes to wait prior to timing out.
268
+ hours : int, default 0
269
+ Number of hours to wait prior to timing out.
327
270
  """
328
271
  ...
329
272
 
@@ -407,210 +350,54 @@ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None]
407
350
  ...
408
351
 
409
352
  @typing.overload
410
- def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
353
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
411
354
  """
412
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
355
+ Specifies environment variables to be set prior to the execution of a step.
413
356
 
414
357
 
415
358
  Parameters
416
359
  ----------
417
- cpu : int, default 1
418
- Number of CPUs required for this step. If `@resources` is
419
- also present, the maximum value from all decorators is used.
420
- gpu : int, default 0
421
- Number of GPUs required for this step. If `@resources` is
422
- also present, the maximum value from all decorators is used.
423
- memory : int, default 4096
424
- Memory size (in MB) required for this step. If
425
- `@resources` is also present, the maximum value from all decorators is
426
- used.
427
- image : str, optional, default None
428
- Docker image to use when launching on AWS Batch. If not specified, and
429
- METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
430
- not, a default Docker image mapping to the current version of Python is used.
431
- queue : str, default METAFLOW_BATCH_JOB_QUEUE
432
- AWS Batch Job Queue to submit the job to.
433
- iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
434
- AWS IAM role that AWS Batch container uses to access AWS cloud resources.
435
- execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
436
- AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
437
- (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
438
- shared_memory : int, optional, default None
439
- The value for the size (in MiB) of the /dev/shm volume for this step.
440
- This parameter maps to the `--shm-size` option in Docker.
441
- max_swap : int, optional, default None
442
- The total amount of swap memory (in MiB) a container can use for this
443
- step. This parameter is translated to the `--memory-swap` option in
444
- Docker where the value is the sum of the container memory plus the
445
- `max_swap` value.
446
- swappiness : int, optional, default None
447
- This allows you to tune memory swappiness behavior for this step.
448
- A swappiness value of 0 causes swapping not to happen unless absolutely
449
- necessary. A swappiness value of 100 causes pages to be swapped very
450
- aggressively. Accepted values are whole numbers between 0 and 100.
451
- use_tmpfs : bool, default False
452
- This enables an explicit tmpfs mount for this step. Note that tmpfs is
453
- not available on Fargate compute environments
454
- tmpfs_tempdir : bool, default True
455
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
456
- tmpfs_size : int, optional, default None
457
- The value for the size (in MiB) of the tmpfs mount for this step.
458
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
459
- memory allocated for this step.
460
- tmpfs_path : str, optional, default None
461
- Path to tmpfs mount for this step. Defaults to /metaflow_temp.
462
- inferentia : int, default 0
463
- Number of Inferentia chips required for this step.
464
- trainium : int, default None
465
- Alias for inferentia. Use only one of the two.
466
- efa : int, default 0
467
- Number of elastic fabric adapter network devices to attach to container
468
- ephemeral_storage : int, default None
469
- The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
470
- This is only relevant for Fargate compute environments
471
- log_driver: str, optional, default None
472
- The log driver to use for the Amazon ECS container.
473
- log_options: List[str], optional, default None
474
- List of strings containing options for the chosen log driver. The configurable values
475
- depend on the `log driver` chosen. Validation of these options is not supported yet.
476
- Example: [`awslogs-group:aws/batch/job`]
360
+ vars : Dict[str, str], default {}
361
+ Dictionary of environment variables to set.
477
362
  """
478
363
  ...
479
364
 
480
365
  @typing.overload
481
- def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
366
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
482
367
  ...
483
368
 
484
369
  @typing.overload
485
- def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
370
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
486
371
  ...
487
372
 
488
- def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
373
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
489
374
  """
490
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
375
+ Specifies environment variables to be set prior to the execution of a step.
491
376
 
492
377
 
493
378
  Parameters
494
379
  ----------
495
- cpu : int, default 1
496
- Number of CPUs required for this step. If `@resources` is
497
- also present, the maximum value from all decorators is used.
498
- gpu : int, default 0
499
- Number of GPUs required for this step. If `@resources` is
500
- also present, the maximum value from all decorators is used.
501
- memory : int, default 4096
502
- Memory size (in MB) required for this step. If
503
- `@resources` is also present, the maximum value from all decorators is
504
- used.
505
- image : str, optional, default None
506
- Docker image to use when launching on AWS Batch. If not specified, and
507
- METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
508
- not, a default Docker image mapping to the current version of Python is used.
509
- queue : str, default METAFLOW_BATCH_JOB_QUEUE
510
- AWS Batch Job Queue to submit the job to.
511
- iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
512
- AWS IAM role that AWS Batch container uses to access AWS cloud resources.
513
- execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
514
- AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
515
- (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
516
- shared_memory : int, optional, default None
517
- The value for the size (in MiB) of the /dev/shm volume for this step.
518
- This parameter maps to the `--shm-size` option in Docker.
519
- max_swap : int, optional, default None
520
- The total amount of swap memory (in MiB) a container can use for this
521
- step. This parameter is translated to the `--memory-swap` option in
522
- Docker where the value is the sum of the container memory plus the
523
- `max_swap` value.
524
- swappiness : int, optional, default None
525
- This allows you to tune memory swappiness behavior for this step.
526
- A swappiness value of 0 causes swapping not to happen unless absolutely
527
- necessary. A swappiness value of 100 causes pages to be swapped very
528
- aggressively. Accepted values are whole numbers between 0 and 100.
529
- use_tmpfs : bool, default False
530
- This enables an explicit tmpfs mount for this step. Note that tmpfs is
531
- not available on Fargate compute environments
532
- tmpfs_tempdir : bool, default True
533
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
534
- tmpfs_size : int, optional, default None
535
- The value for the size (in MiB) of the tmpfs mount for this step.
536
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
537
- memory allocated for this step.
538
- tmpfs_path : str, optional, default None
539
- Path to tmpfs mount for this step. Defaults to /metaflow_temp.
540
- inferentia : int, default 0
541
- Number of Inferentia chips required for this step.
542
- trainium : int, default None
543
- Alias for inferentia. Use only one of the two.
544
- efa : int, default 0
545
- Number of elastic fabric adapter network devices to attach to container
546
- ephemeral_storage : int, default None
547
- The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
548
- This is only relevant for Fargate compute environments
549
- log_driver: str, optional, default None
550
- The log driver to use for the Amazon ECS container.
551
- log_options: List[str], optional, default None
552
- List of strings containing options for the chosen log driver. The configurable values
553
- depend on the `log driver` chosen. Validation of these options is not supported yet.
554
- Example: [`awslogs-group:aws/batch/job`]
380
+ vars : Dict[str, str], default {}
381
+ Dictionary of environment variables to set.
555
382
  """
556
383
  ...
557
384
 
558
385
  @typing.overload
559
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
386
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
560
387
  """
561
- Specifies a timeout for your step.
562
-
563
- This decorator is useful if this step may hang indefinitely.
564
-
565
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
566
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
567
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
568
-
569
- Note that all the values specified in parameters are added together so if you specify
570
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
571
-
572
-
573
- Parameters
574
- ----------
575
- seconds : int, default 0
576
- Number of seconds to wait prior to timing out.
577
- minutes : int, default 0
578
- Number of minutes to wait prior to timing out.
579
- hours : int, default 0
580
- Number of hours to wait prior to timing out.
388
+ Decorator prototype for all step decorators. This function gets specialized
389
+ and imported for all decorators types by _import_plugin_decorators().
581
390
  """
582
391
  ...
583
392
 
584
393
  @typing.overload
585
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
586
- ...
587
-
588
- @typing.overload
589
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
394
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
590
395
  ...
591
396
 
592
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
397
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
593
398
  """
594
- Specifies a timeout for your step.
595
-
596
- This decorator is useful if this step may hang indefinitely.
597
-
598
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
599
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
600
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
601
-
602
- Note that all the values specified in parameters are added together so if you specify
603
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
604
-
605
-
606
- Parameters
607
- ----------
608
- seconds : int, default 0
609
- Number of seconds to wait prior to timing out.
610
- minutes : int, default 0
611
- Number of minutes to wait prior to timing out.
612
- hours : int, default 0
613
- Number of hours to wait prior to timing out.
399
+ Decorator prototype for all step decorators. This function gets specialized
400
+ and imported for all decorators types by _import_plugin_decorators().
614
401
  """
615
402
  ...
616
403
 
@@ -666,21 +453,53 @@ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
666
453
  ...
667
454
 
668
455
  @typing.overload
669
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
456
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
670
457
  """
671
- Decorator prototype for all step decorators. This function gets specialized
672
- and imported for all decorators types by _import_plugin_decorators().
458
+ Specifies that the step will success under all circumstances.
459
+
460
+ The decorator will create an optional artifact, specified by `var`, which
461
+ contains the exception raised. You can use it to detect the presence
462
+ of errors, indicating that all happy-path artifacts produced by the step
463
+ are missing.
464
+
465
+
466
+ Parameters
467
+ ----------
468
+ var : str, optional, default None
469
+ Name of the artifact in which to store the caught exception.
470
+ If not specified, the exception is not stored.
471
+ print_exception : bool, default True
472
+ Determines whether or not the exception is printed to
473
+ stdout when caught.
673
474
  """
674
475
  ...
675
476
 
676
477
  @typing.overload
677
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
478
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
678
479
  ...
679
480
 
680
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
481
+ @typing.overload
482
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
483
+ ...
484
+
485
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
681
486
  """
682
- Decorator prototype for all step decorators. This function gets specialized
683
- and imported for all decorators types by _import_plugin_decorators().
487
+ Specifies that the step will success under all circumstances.
488
+
489
+ The decorator will create an optional artifact, specified by `var`, which
490
+ contains the exception raised. You can use it to detect the presence
491
+ of errors, indicating that all happy-path artifacts produced by the step
492
+ are missing.
493
+
494
+
495
+ Parameters
496
+ ----------
497
+ var : str, optional, default None
498
+ Name of the artifact in which to store the caught exception.
499
+ If not specified, the exception is not stored.
500
+ print_exception : bool, default True
501
+ Determines whether or not the exception is printed to
502
+ stdout when caught.
684
503
  """
685
504
  ...
686
505
 
@@ -733,96 +552,6 @@ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
733
552
  """
734
553
  ...
735
554
 
736
- @typing.overload
737
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
738
- """
739
- Specifies secrets to be retrieved and injected as environment variables prior to
740
- the execution of a step.
741
-
742
-
743
- Parameters
744
- ----------
745
- sources : List[Union[str, Dict[str, Any]]], default: []
746
- List of secret specs, defining how the secrets are to be retrieved
747
- role : str, optional, default: None
748
- Role to use for fetching secrets
749
- """
750
- ...
751
-
752
- @typing.overload
753
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
754
- ...
755
-
756
- @typing.overload
757
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
758
- ...
759
-
760
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
761
- """
762
- Specifies secrets to be retrieved and injected as environment variables prior to
763
- the execution of a step.
764
-
765
-
766
- Parameters
767
- ----------
768
- sources : List[Union[str, Dict[str, Any]]], default: []
769
- List of secret specs, defining how the secrets are to be retrieved
770
- role : str, optional, default: None
771
- Role to use for fetching secrets
772
- """
773
- ...
774
-
775
- @typing.overload
776
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
777
- """
778
- Specifies that the step will success under all circumstances.
779
-
780
- The decorator will create an optional artifact, specified by `var`, which
781
- contains the exception raised. You can use it to detect the presence
782
- of errors, indicating that all happy-path artifacts produced by the step
783
- are missing.
784
-
785
-
786
- Parameters
787
- ----------
788
- var : str, optional, default None
789
- Name of the artifact in which to store the caught exception.
790
- If not specified, the exception is not stored.
791
- print_exception : bool, default True
792
- Determines whether or not the exception is printed to
793
- stdout when caught.
794
- """
795
- ...
796
-
797
- @typing.overload
798
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
799
- ...
800
-
801
- @typing.overload
802
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
803
- ...
804
-
805
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
806
- """
807
- Specifies that the step will success under all circumstances.
808
-
809
- The decorator will create an optional artifact, specified by `var`, which
810
- contains the exception raised. You can use it to detect the presence
811
- of errors, indicating that all happy-path artifacts produced by the step
812
- are missing.
813
-
814
-
815
- Parameters
816
- ----------
817
- var : str, optional, default None
818
- Name of the artifact in which to store the caught exception.
819
- If not specified, the exception is not stored.
820
- print_exception : bool, default True
821
- Determines whether or not the exception is printed to
822
- stdout when caught.
823
- """
824
- ...
825
-
826
555
  @typing.overload
827
556
  def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
828
557
  """
@@ -878,114 +607,328 @@ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
878
607
  """
879
608
  ...
880
609
 
881
- @typing.overload
882
- def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
610
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
883
611
  """
884
- Specifies the flow(s) that this flow depends on.
885
-
886
- ```
887
- @trigger_on_finish(flow='FooFlow')
888
- ```
889
- or
890
- ```
891
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
892
- ```
893
- This decorator respects the @project decorator and triggers the flow
894
- when upstream runs within the same namespace complete successfully
895
-
896
- Additionally, you can specify project aware upstream flow dependencies
897
- by specifying the fully qualified project_flow_name.
898
- ```
899
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
900
- ```
901
- or
902
- ```
903
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
904
- ```
905
-
906
- You can also specify just the project or project branch (other values will be
907
- inferred from the current project or project branch):
908
- ```
909
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
910
- ```
911
-
912
- Note that `branch` is typically one of:
913
- - `prod`
914
- - `user.bob`
915
- - `test.my_experiment`
916
- - `prod.staging`
612
+ Specifies that this step should execute on Kubernetes.
917
613
 
918
614
 
919
615
  Parameters
920
616
  ----------
921
- flow : Union[str, Dict[str, str]], optional, default None
922
- Upstream flow dependency for this flow.
923
- flows : List[Union[str, Dict[str, str]]], default []
924
- Upstream flow dependencies for this flow.
925
- options : Dict[str, Any], default {}
926
- Backend-specific configuration for tuning eventing behavior.
927
- """
928
- ...
929
-
617
+ cpu : int, default 1
618
+ Number of CPUs required for this step. If `@resources` is
619
+ also present, the maximum value from all decorators is used.
620
+ memory : int, default 4096
621
+ Memory size (in MB) required for this step. If
622
+ `@resources` is also present, the maximum value from all decorators is
623
+ used.
624
+ disk : int, default 10240
625
+ Disk size (in MB) required for this step. If
626
+ `@resources` is also present, the maximum value from all decorators is
627
+ used.
628
+ image : str, optional, default None
629
+ Docker image to use when launching on Kubernetes. If not specified, and
630
+ METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
631
+ not, a default Docker image mapping to the current version of Python is used.
632
+ image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
633
+ If given, the imagePullPolicy to be applied to the Docker image of the step.
634
+ image_pull_secrets: List[str], default []
635
+ The default is extracted from METAFLOW_KUBERNETES_IMAGE_PULL_SECRETS.
636
+ Kubernetes image pull secrets to use when pulling container images
637
+ in Kubernetes.
638
+ service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
639
+ Kubernetes service account to use when launching pod in Kubernetes.
640
+ secrets : List[str], optional, default None
641
+ Kubernetes secrets to use when launching pod in Kubernetes. These
642
+ secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
643
+ in Metaflow configuration.
644
+ node_selector: Union[Dict[str,str], str], optional, default None
645
+ Kubernetes node selector(s) to apply to the pod running the task.
646
+ Can be passed in as a comma separated string of values e.g.
647
+ 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
648
+ {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
649
+ namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
650
+ Kubernetes namespace to use when launching pod in Kubernetes.
651
+ gpu : int, optional, default None
652
+ Number of GPUs required for this step. A value of zero implies that
653
+ the scheduled node should not have GPUs.
654
+ gpu_vendor : str, default KUBERNETES_GPU_VENDOR
655
+ The vendor of the GPUs to be used for this step.
656
+ tolerations : List[str], default []
657
+ The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
658
+ Kubernetes tolerations to use when launching pod in Kubernetes.
659
+ labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
660
+ Kubernetes labels to use when launching pod in Kubernetes.
661
+ annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
662
+ Kubernetes annotations to use when launching pod in Kubernetes.
663
+ use_tmpfs : bool, default False
664
+ This enables an explicit tmpfs mount for this step.
665
+ tmpfs_tempdir : bool, default True
666
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
667
+ tmpfs_size : int, optional, default: None
668
+ The value for the size (in MiB) of the tmpfs mount for this step.
669
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
670
+ memory allocated for this step.
671
+ tmpfs_path : str, optional, default /metaflow_temp
672
+ Path to tmpfs mount for this step.
673
+ persistent_volume_claims : Dict[str, str], optional, default None
674
+ A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
675
+ volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
676
+ shared_memory: int, optional
677
+ Shared memory size (in MiB) required for this step
678
+ port: int, optional
679
+ Port number to specify in the Kubernetes job object
680
+ compute_pool : str, optional, default None
681
+ Compute pool to be used for for this step.
682
+ If not specified, any accessible compute pool within the perimeter is used.
683
+ hostname_resolution_timeout: int, default 10 * 60
684
+ Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
685
+ Only applicable when @parallel is used.
686
+ qos: str, default: Burstable
687
+ Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
688
+
689
+ security_context: Dict[str, Any], optional, default None
690
+ Container security context. Applies to the task container. Allows the following keys:
691
+ - privileged: bool, optional, default None
692
+ - allow_privilege_escalation: bool, optional, default None
693
+ - run_as_user: int, optional, default None
694
+ - run_as_group: int, optional, default None
695
+ - run_as_non_root: bool, optional, default None
696
+ """
697
+ ...
698
+
930
699
  @typing.overload
931
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
700
+ def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
701
+ """
702
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
703
+
704
+
705
+ Parameters
706
+ ----------
707
+ cpu : int, default 1
708
+ Number of CPUs required for this step. If `@resources` is
709
+ also present, the maximum value from all decorators is used.
710
+ gpu : int, default 0
711
+ Number of GPUs required for this step. If `@resources` is
712
+ also present, the maximum value from all decorators is used.
713
+ memory : int, default 4096
714
+ Memory size (in MB) required for this step. If
715
+ `@resources` is also present, the maximum value from all decorators is
716
+ used.
717
+ image : str, optional, default None
718
+ Docker image to use when launching on AWS Batch. If not specified, and
719
+ METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
720
+ not, a default Docker image mapping to the current version of Python is used.
721
+ queue : str, default METAFLOW_BATCH_JOB_QUEUE
722
+ AWS Batch Job Queue to submit the job to.
723
+ iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
724
+ AWS IAM role that AWS Batch container uses to access AWS cloud resources.
725
+ execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
726
+ AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
727
+ (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
728
+ shared_memory : int, optional, default None
729
+ The value for the size (in MiB) of the /dev/shm volume for this step.
730
+ This parameter maps to the `--shm-size` option in Docker.
731
+ max_swap : int, optional, default None
732
+ The total amount of swap memory (in MiB) a container can use for this
733
+ step. This parameter is translated to the `--memory-swap` option in
734
+ Docker where the value is the sum of the container memory plus the
735
+ `max_swap` value.
736
+ swappiness : int, optional, default None
737
+ This allows you to tune memory swappiness behavior for this step.
738
+ A swappiness value of 0 causes swapping not to happen unless absolutely
739
+ necessary. A swappiness value of 100 causes pages to be swapped very
740
+ aggressively. Accepted values are whole numbers between 0 and 100.
741
+ use_tmpfs : bool, default False
742
+ This enables an explicit tmpfs mount for this step. Note that tmpfs is
743
+ not available on Fargate compute environments
744
+ tmpfs_tempdir : bool, default True
745
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
746
+ tmpfs_size : int, optional, default None
747
+ The value for the size (in MiB) of the tmpfs mount for this step.
748
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
749
+ memory allocated for this step.
750
+ tmpfs_path : str, optional, default None
751
+ Path to tmpfs mount for this step. Defaults to /metaflow_temp.
752
+ inferentia : int, default 0
753
+ Number of Inferentia chips required for this step.
754
+ trainium : int, default None
755
+ Alias for inferentia. Use only one of the two.
756
+ efa : int, default 0
757
+ Number of elastic fabric adapter network devices to attach to container
758
+ ephemeral_storage : int, default None
759
+ The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
760
+ This is only relevant for Fargate compute environments
761
+ log_driver: str, optional, default None
762
+ The log driver to use for the Amazon ECS container.
763
+ log_options: List[str], optional, default None
764
+ List of strings containing options for the chosen log driver. The configurable values
765
+ depend on the `log driver` chosen. Validation of these options is not supported yet.
766
+ Example: [`awslogs-group:aws/batch/job`]
767
+ """
932
768
  ...
933
769
 
934
- def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
770
+ @typing.overload
771
+ def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
772
+ ...
773
+
774
+ @typing.overload
775
+ def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
776
+ ...
777
+
778
+ def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
935
779
  """
936
- Specifies the flow(s) that this flow depends on.
780
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
937
781
 
938
- ```
939
- @trigger_on_finish(flow='FooFlow')
940
- ```
941
- or
942
- ```
943
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
944
- ```
945
- This decorator respects the @project decorator and triggers the flow
946
- when upstream runs within the same namespace complete successfully
947
782
 
948
- Additionally, you can specify project aware upstream flow dependencies
949
- by specifying the fully qualified project_flow_name.
950
- ```
951
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
952
- ```
953
- or
954
- ```
955
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
956
- ```
783
+ Parameters
784
+ ----------
785
+ cpu : int, default 1
786
+ Number of CPUs required for this step. If `@resources` is
787
+ also present, the maximum value from all decorators is used.
788
+ gpu : int, default 0
789
+ Number of GPUs required for this step. If `@resources` is
790
+ also present, the maximum value from all decorators is used.
791
+ memory : int, default 4096
792
+ Memory size (in MB) required for this step. If
793
+ `@resources` is also present, the maximum value from all decorators is
794
+ used.
795
+ image : str, optional, default None
796
+ Docker image to use when launching on AWS Batch. If not specified, and
797
+ METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
798
+ not, a default Docker image mapping to the current version of Python is used.
799
+ queue : str, default METAFLOW_BATCH_JOB_QUEUE
800
+ AWS Batch Job Queue to submit the job to.
801
+ iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
802
+ AWS IAM role that AWS Batch container uses to access AWS cloud resources.
803
+ execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
804
+ AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
805
+ (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
806
+ shared_memory : int, optional, default None
807
+ The value for the size (in MiB) of the /dev/shm volume for this step.
808
+ This parameter maps to the `--shm-size` option in Docker.
809
+ max_swap : int, optional, default None
810
+ The total amount of swap memory (in MiB) a container can use for this
811
+ step. This parameter is translated to the `--memory-swap` option in
812
+ Docker where the value is the sum of the container memory plus the
813
+ `max_swap` value.
814
+ swappiness : int, optional, default None
815
+ This allows you to tune memory swappiness behavior for this step.
816
+ A swappiness value of 0 causes swapping not to happen unless absolutely
817
+ necessary. A swappiness value of 100 causes pages to be swapped very
818
+ aggressively. Accepted values are whole numbers between 0 and 100.
819
+ use_tmpfs : bool, default False
820
+ This enables an explicit tmpfs mount for this step. Note that tmpfs is
821
+ not available on Fargate compute environments
822
+ tmpfs_tempdir : bool, default True
823
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
824
+ tmpfs_size : int, optional, default None
825
+ The value for the size (in MiB) of the tmpfs mount for this step.
826
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
827
+ memory allocated for this step.
828
+ tmpfs_path : str, optional, default None
829
+ Path to tmpfs mount for this step. Defaults to /metaflow_temp.
830
+ inferentia : int, default 0
831
+ Number of Inferentia chips required for this step.
832
+ trainium : int, default None
833
+ Alias for inferentia. Use only one of the two.
834
+ efa : int, default 0
835
+ Number of elastic fabric adapter network devices to attach to container
836
+ ephemeral_storage : int, default None
837
+ The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
838
+ This is only relevant for Fargate compute environments
839
+ log_driver: str, optional, default None
840
+ The log driver to use for the Amazon ECS container.
841
+ log_options: List[str], optional, default None
842
+ List of strings containing options for the chosen log driver. The configurable values
843
+ depend on the `log driver` chosen. Validation of these options is not supported yet.
844
+ Example: [`awslogs-group:aws/batch/job`]
845
+ """
846
+ ...
847
+
848
+ @typing.overload
849
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
850
+ """
851
+ Specifies secrets to be retrieved and injected as environment variables prior to
852
+ the execution of a step.
957
853
 
958
- You can also specify just the project or project branch (other values will be
959
- inferred from the current project or project branch):
960
- ```
961
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
962
- ```
963
854
 
964
- Note that `branch` is typically one of:
965
- - `prod`
966
- - `user.bob`
967
- - `test.my_experiment`
968
- - `prod.staging`
855
+ Parameters
856
+ ----------
857
+ sources : List[Union[str, Dict[str, Any]]], default: []
858
+ List of secret specs, defining how the secrets are to be retrieved
859
+ role : str, optional, default: None
860
+ Role to use for fetching secrets
861
+ """
862
+ ...
863
+
864
+ @typing.overload
865
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
866
+ ...
867
+
868
+ @typing.overload
869
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
870
+ ...
871
+
872
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
873
+ """
874
+ Specifies secrets to be retrieved and injected as environment variables prior to
875
+ the execution of a step.
969
876
 
970
877
 
971
878
  Parameters
972
879
  ----------
973
- flow : Union[str, Dict[str, str]], optional, default None
974
- Upstream flow dependency for this flow.
975
- flows : List[Union[str, Dict[str, str]]], default []
976
- Upstream flow dependencies for this flow.
977
- options : Dict[str, Any], default {}
978
- Backend-specific configuration for tuning eventing behavior.
880
+ sources : List[Union[str, Dict[str, Any]]], default: []
881
+ List of secret specs, defining how the secrets are to be retrieved
882
+ role : str, optional, default: None
883
+ Role to use for fetching secrets
979
884
  """
980
885
  ...
981
886
 
982
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
887
+ @typing.overload
888
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
983
889
  """
984
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
985
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
986
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
987
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
988
- starts only after all sensors finish.
890
+ Specifies the PyPI packages for all steps of the flow.
891
+
892
+ Use `@pypi_base` to set common packages required by all
893
+ steps and use `@pypi` to specify step-specific overrides.
894
+
895
+ Parameters
896
+ ----------
897
+ packages : Dict[str, str], default: {}
898
+ Packages to use for this flow. The key is the name of the package
899
+ and the value is the version to use.
900
+ python : str, optional, default: None
901
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
902
+ that the version used will correspond to the version of the Python interpreter used to start the run.
903
+ """
904
+ ...
905
+
906
+ @typing.overload
907
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
908
+ ...
909
+
910
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
911
+ """
912
+ Specifies the PyPI packages for all steps of the flow.
913
+
914
+ Use `@pypi_base` to set common packages required by all
915
+ steps and use `@pypi` to specify step-specific overrides.
916
+
917
+ Parameters
918
+ ----------
919
+ packages : Dict[str, str], default: {}
920
+ Packages to use for this flow. The key is the name of the package
921
+ and the value is the version to use.
922
+ python : str, optional, default: None
923
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
924
+ that the version used will correspond to the version of the Python interpreter used to start the run.
925
+ """
926
+ ...
927
+
928
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
929
+ """
930
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
931
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
989
932
 
990
933
 
991
934
  Parameters
@@ -1007,76 +950,117 @@ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, expone
1007
950
  Name of the sensor on Airflow
1008
951
  description : str
1009
952
  Description of sensor in the Airflow UI
1010
- bucket_key : Union[str, List[str]]
1011
- The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1012
- When it's specified as a full s3:// url, please leave `bucket_name` as None
1013
- bucket_name : str
1014
- Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1015
- When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1016
- wildcard_match : bool
1017
- whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1018
- aws_conn_id : str
1019
- a reference to the s3 connection on Airflow. (Default: None)
1020
- verify : bool
1021
- Whether or not to verify SSL certificates for S3 connection. (Default: None)
953
+ external_dag_id : str
954
+ The dag_id that contains the task you want to wait for.
955
+ external_task_ids : List[str]
956
+ The list of task_ids that you want to wait for.
957
+ If None (default value) the sensor waits for the DAG. (Default: None)
958
+ allowed_states : List[str]
959
+ Iterable of allowed states, (Default: ['success'])
960
+ failed_states : List[str]
961
+ Iterable of failed or dis-allowed states. (Default: None)
962
+ execution_delta : datetime.timedelta
963
+ time difference with the previous execution to look at,
964
+ the default is the same logical date as the current task or DAG. (Default: None)
965
+ check_existence: bool
966
+ Set to True to check if the external task exists or check if
967
+ the DAG to wait for exists. (Default: True)
1022
968
  """
1023
969
  ...
1024
970
 
1025
- @typing.overload
1026
- def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
971
+ def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1027
972
  """
1028
- Specifies the Conda environment for all steps of the flow.
973
+ Specifies what flows belong to the same project.
1029
974
 
1030
- Use `@conda_base` to set common libraries required by all
1031
- steps and use `@conda` to specify step-specific additions.
975
+ A project-specific namespace is created for all flows that
976
+ use the same `@project(name)`.
1032
977
 
1033
978
 
1034
979
  Parameters
1035
980
  ----------
1036
- packages : Dict[str, str], default {}
1037
- Packages to use for this flow. The key is the name of the package
1038
- and the value is the version to use.
1039
- libraries : Dict[str, str], default {}
1040
- Supported for backward compatibility. When used with packages, packages will take precedence.
1041
- python : str, optional, default None
1042
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1043
- that the version used will correspond to the version of the Python interpreter used to start the run.
1044
- disabled : bool, default False
1045
- If set to True, disables Conda.
981
+ name : str
982
+ Project name. Make sure that the name is unique amongst all
983
+ projects that use the same production scheduler. The name may
984
+ contain only lowercase alphanumeric characters and underscores.
985
+
986
+ branch : Optional[str], default None
987
+ The branch to use. If not specified, the branch is set to
988
+ `user.<username>` unless `production` is set to `True`. This can
989
+ also be set on the command line using `--branch` as a top-level option.
990
+ It is an error to specify `branch` in the decorator and on the command line.
991
+
992
+ production : bool, default False
993
+ Whether or not the branch is the production branch. This can also be set on the
994
+ command line using `--production` as a top-level option. It is an error to specify
995
+ `production` in the decorator and on the command line.
996
+ The project branch name will be:
997
+ - if `branch` is specified:
998
+ - if `production` is True: `prod.<branch>`
999
+ - if `production` is False: `test.<branch>`
1000
+ - if `branch` is not specified:
1001
+ - if `production` is True: `prod`
1002
+ - if `production` is False: `user.<username>`
1046
1003
  """
1047
1004
  ...
1048
1005
 
1049
1006
  @typing.overload
1050
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1007
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1008
+ """
1009
+ Specifies the times when the flow should be run when running on a
1010
+ production scheduler.
1011
+
1012
+
1013
+ Parameters
1014
+ ----------
1015
+ hourly : bool, default False
1016
+ Run the workflow hourly.
1017
+ daily : bool, default True
1018
+ Run the workflow daily.
1019
+ weekly : bool, default False
1020
+ Run the workflow weekly.
1021
+ cron : str, optional, default None
1022
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1023
+ specified by this expression.
1024
+ timezone : str, optional, default None
1025
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1026
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1027
+ """
1051
1028
  ...
1052
1029
 
1053
- def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1030
+ @typing.overload
1031
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1032
+ ...
1033
+
1034
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1054
1035
  """
1055
- Specifies the Conda environment for all steps of the flow.
1056
-
1057
- Use `@conda_base` to set common libraries required by all
1058
- steps and use `@conda` to specify step-specific additions.
1036
+ Specifies the times when the flow should be run when running on a
1037
+ production scheduler.
1059
1038
 
1060
1039
 
1061
1040
  Parameters
1062
1041
  ----------
1063
- packages : Dict[str, str], default {}
1064
- Packages to use for this flow. The key is the name of the package
1065
- and the value is the version to use.
1066
- libraries : Dict[str, str], default {}
1067
- Supported for backward compatibility. When used with packages, packages will take precedence.
1068
- python : str, optional, default None
1069
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1070
- that the version used will correspond to the version of the Python interpreter used to start the run.
1071
- disabled : bool, default False
1072
- If set to True, disables Conda.
1042
+ hourly : bool, default False
1043
+ Run the workflow hourly.
1044
+ daily : bool, default True
1045
+ Run the workflow daily.
1046
+ weekly : bool, default False
1047
+ Run the workflow weekly.
1048
+ cron : str, optional, default None
1049
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1050
+ specified by this expression.
1051
+ timezone : str, optional, default None
1052
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1053
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1073
1054
  """
1074
1055
  ...
1075
1056
 
1076
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1057
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1077
1058
  """
1078
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1079
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1059
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1060
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1061
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1062
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
1063
+ starts only after all sensors finish.
1080
1064
 
1081
1065
 
1082
1066
  Parameters
@@ -1098,62 +1082,18 @@ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str,
1098
1082
  Name of the sensor on Airflow
1099
1083
  description : str
1100
1084
  Description of sensor in the Airflow UI
1101
- external_dag_id : str
1102
- The dag_id that contains the task you want to wait for.
1103
- external_task_ids : List[str]
1104
- The list of task_ids that you want to wait for.
1105
- If None (default value) the sensor waits for the DAG. (Default: None)
1106
- allowed_states : List[str]
1107
- Iterable of allowed states, (Default: ['success'])
1108
- failed_states : List[str]
1109
- Iterable of failed or dis-allowed states. (Default: None)
1110
- execution_delta : datetime.timedelta
1111
- time difference with the previous execution to look at,
1112
- the default is the same logical date as the current task or DAG. (Default: None)
1113
- check_existence: bool
1114
- Set to True to check if the external task exists or check if
1115
- the DAG to wait for exists. (Default: True)
1116
- """
1117
- ...
1118
-
1119
- @typing.overload
1120
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1121
- """
1122
- Specifies the PyPI packages for all steps of the flow.
1123
-
1124
- Use `@pypi_base` to set common packages required by all
1125
- steps and use `@pypi` to specify step-specific overrides.
1126
-
1127
- Parameters
1128
- ----------
1129
- packages : Dict[str, str], default: {}
1130
- Packages to use for this flow. The key is the name of the package
1131
- and the value is the version to use.
1132
- python : str, optional, default: None
1133
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1134
- that the version used will correspond to the version of the Python interpreter used to start the run.
1135
- """
1136
- ...
1137
-
1138
- @typing.overload
1139
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1140
- ...
1141
-
1142
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1143
- """
1144
- Specifies the PyPI packages for all steps of the flow.
1145
-
1146
- Use `@pypi_base` to set common packages required by all
1147
- steps and use `@pypi` to specify step-specific overrides.
1148
-
1149
- Parameters
1150
- ----------
1151
- packages : Dict[str, str], default: {}
1152
- Packages to use for this flow. The key is the name of the package
1153
- and the value is the version to use.
1154
- python : str, optional, default: None
1155
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1156
- that the version used will correspond to the version of the Python interpreter used to start the run.
1085
+ bucket_key : Union[str, List[str]]
1086
+ The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1087
+ When it's specified as a full s3:// url, please leave `bucket_name` as None
1088
+ bucket_name : str
1089
+ Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1090
+ When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1091
+ wildcard_match : bool
1092
+ whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1093
+ aws_conn_id : str
1094
+ a reference to the s3 connection on Airflow. (Default: None)
1095
+ verify : bool
1096
+ Whether or not to verify SSL certificates for S3 connection. (Default: None)
1157
1097
  """
1158
1098
  ...
1159
1099
 
@@ -1251,88 +1191,154 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
1251
1191
  ...
1252
1192
 
1253
1193
  @typing.overload
1254
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1194
+ def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1255
1195
  """
1256
- Specifies the times when the flow should be run when running on a
1257
- production scheduler.
1196
+ Specifies the flow(s) that this flow depends on.
1197
+
1198
+ ```
1199
+ @trigger_on_finish(flow='FooFlow')
1200
+ ```
1201
+ or
1202
+ ```
1203
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1204
+ ```
1205
+ This decorator respects the @project decorator and triggers the flow
1206
+ when upstream runs within the same namespace complete successfully
1207
+
1208
+ Additionally, you can specify project aware upstream flow dependencies
1209
+ by specifying the fully qualified project_flow_name.
1210
+ ```
1211
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1212
+ ```
1213
+ or
1214
+ ```
1215
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1216
+ ```
1217
+
1218
+ You can also specify just the project or project branch (other values will be
1219
+ inferred from the current project or project branch):
1220
+ ```
1221
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1222
+ ```
1223
+
1224
+ Note that `branch` is typically one of:
1225
+ - `prod`
1226
+ - `user.bob`
1227
+ - `test.my_experiment`
1228
+ - `prod.staging`
1258
1229
 
1259
1230
 
1260
1231
  Parameters
1261
1232
  ----------
1262
- hourly : bool, default False
1263
- Run the workflow hourly.
1264
- daily : bool, default True
1265
- Run the workflow daily.
1266
- weekly : bool, default False
1267
- Run the workflow weekly.
1268
- cron : str, optional, default None
1269
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1270
- specified by this expression.
1271
- timezone : str, optional, default None
1272
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1273
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1233
+ flow : Union[str, Dict[str, str]], optional, default None
1234
+ Upstream flow dependency for this flow.
1235
+ flows : List[Union[str, Dict[str, str]]], default []
1236
+ Upstream flow dependencies for this flow.
1237
+ options : Dict[str, Any], default {}
1238
+ Backend-specific configuration for tuning eventing behavior.
1274
1239
  """
1275
1240
  ...
1276
1241
 
1277
1242
  @typing.overload
1278
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1243
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1279
1244
  ...
1280
1245
 
1281
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1246
+ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1282
1247
  """
1283
- Specifies the times when the flow should be run when running on a
1284
- production scheduler.
1248
+ Specifies the flow(s) that this flow depends on.
1249
+
1250
+ ```
1251
+ @trigger_on_finish(flow='FooFlow')
1252
+ ```
1253
+ or
1254
+ ```
1255
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1256
+ ```
1257
+ This decorator respects the @project decorator and triggers the flow
1258
+ when upstream runs within the same namespace complete successfully
1259
+
1260
+ Additionally, you can specify project aware upstream flow dependencies
1261
+ by specifying the fully qualified project_flow_name.
1262
+ ```
1263
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1264
+ ```
1265
+ or
1266
+ ```
1267
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1268
+ ```
1269
+
1270
+ You can also specify just the project or project branch (other values will be
1271
+ inferred from the current project or project branch):
1272
+ ```
1273
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1274
+ ```
1275
+
1276
+ Note that `branch` is typically one of:
1277
+ - `prod`
1278
+ - `user.bob`
1279
+ - `test.my_experiment`
1280
+ - `prod.staging`
1285
1281
 
1286
1282
 
1287
1283
  Parameters
1288
1284
  ----------
1289
- hourly : bool, default False
1290
- Run the workflow hourly.
1291
- daily : bool, default True
1292
- Run the workflow daily.
1293
- weekly : bool, default False
1294
- Run the workflow weekly.
1295
- cron : str, optional, default None
1296
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1297
- specified by this expression.
1298
- timezone : str, optional, default None
1299
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1300
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1285
+ flow : Union[str, Dict[str, str]], optional, default None
1286
+ Upstream flow dependency for this flow.
1287
+ flows : List[Union[str, Dict[str, str]]], default []
1288
+ Upstream flow dependencies for this flow.
1289
+ options : Dict[str, Any], default {}
1290
+ Backend-specific configuration for tuning eventing behavior.
1301
1291
  """
1302
1292
  ...
1303
1293
 
1304
- def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1294
+ @typing.overload
1295
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1305
1296
  """
1306
- Specifies what flows belong to the same project.
1297
+ Specifies the Conda environment for all steps of the flow.
1307
1298
 
1308
- A project-specific namespace is created for all flows that
1309
- use the same `@project(name)`.
1299
+ Use `@conda_base` to set common libraries required by all
1300
+ steps and use `@conda` to specify step-specific additions.
1310
1301
 
1311
1302
 
1312
1303
  Parameters
1313
1304
  ----------
1314
- name : str
1315
- Project name. Make sure that the name is unique amongst all
1316
- projects that use the same production scheduler. The name may
1317
- contain only lowercase alphanumeric characters and underscores.
1305
+ packages : Dict[str, str], default {}
1306
+ Packages to use for this flow. The key is the name of the package
1307
+ and the value is the version to use.
1308
+ libraries : Dict[str, str], default {}
1309
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1310
+ python : str, optional, default None
1311
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1312
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1313
+ disabled : bool, default False
1314
+ If set to True, disables Conda.
1315
+ """
1316
+ ...
1317
+
1318
+ @typing.overload
1319
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1320
+ ...
1321
+
1322
+ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1323
+ """
1324
+ Specifies the Conda environment for all steps of the flow.
1318
1325
 
1319
- branch : Optional[str], default None
1320
- The branch to use. If not specified, the branch is set to
1321
- `user.<username>` unless `production` is set to `True`. This can
1322
- also be set on the command line using `--branch` as a top-level option.
1323
- It is an error to specify `branch` in the decorator and on the command line.
1326
+ Use `@conda_base` to set common libraries required by all
1327
+ steps and use `@conda` to specify step-specific additions.
1324
1328
 
1325
- production : bool, default False
1326
- Whether or not the branch is the production branch. This can also be set on the
1327
- command line using `--production` as a top-level option. It is an error to specify
1328
- `production` in the decorator and on the command line.
1329
- The project branch name will be:
1330
- - if `branch` is specified:
1331
- - if `production` is True: `prod.<branch>`
1332
- - if `production` is False: `test.<branch>`
1333
- - if `branch` is not specified:
1334
- - if `production` is True: `prod`
1335
- - if `production` is False: `user.<username>`
1329
+
1330
+ Parameters
1331
+ ----------
1332
+ packages : Dict[str, str], default {}
1333
+ Packages to use for this flow. The key is the name of the package
1334
+ and the value is the version to use.
1335
+ libraries : Dict[str, str], default {}
1336
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1337
+ python : str, optional, default None
1338
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1339
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1340
+ disabled : bool, default False
1341
+ If set to True, disables Conda.
1336
1342
  """
1337
1343
  ...
1338
1344