metaflow-stubs 2.18.4__py2.py3-none-any.whl → 2.18.6__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of metaflow-stubs might be problematic. Click here for more details.

Files changed (166) hide show
  1. metaflow-stubs/__init__.pyi +642 -642
  2. metaflow-stubs/cards.pyi +2 -2
  3. metaflow-stubs/cli.pyi +2 -2
  4. metaflow-stubs/cli_components/__init__.pyi +2 -2
  5. metaflow-stubs/cli_components/utils.pyi +2 -2
  6. metaflow-stubs/client/__init__.pyi +2 -2
  7. metaflow-stubs/client/core.pyi +4 -4
  8. metaflow-stubs/client/filecache.pyi +2 -2
  9. metaflow-stubs/events.pyi +3 -3
  10. metaflow-stubs/exception.pyi +2 -2
  11. metaflow-stubs/flowspec.pyi +4 -4
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +2 -2
  14. metaflow-stubs/meta_files.pyi +2 -2
  15. metaflow-stubs/metadata_provider/__init__.pyi +2 -2
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +2 -2
  17. metaflow-stubs/metadata_provider/metadata.pyi +2 -2
  18. metaflow-stubs/metadata_provider/util.pyi +2 -2
  19. metaflow-stubs/metaflow_config.pyi +2 -2
  20. metaflow-stubs/metaflow_current.pyi +3 -3
  21. metaflow-stubs/metaflow_git.pyi +2 -2
  22. metaflow-stubs/multicore_utils.pyi +2 -2
  23. metaflow-stubs/packaging_sys/__init__.pyi +5 -5
  24. metaflow-stubs/packaging_sys/backend.pyi +2 -2
  25. metaflow-stubs/packaging_sys/distribution_support.pyi +3 -3
  26. metaflow-stubs/packaging_sys/tar_backend.pyi +4 -4
  27. metaflow-stubs/packaging_sys/utils.pyi +2 -2
  28. metaflow-stubs/packaging_sys/v1.pyi +2 -2
  29. metaflow-stubs/parameters.pyi +2 -2
  30. metaflow-stubs/plugins/__init__.pyi +11 -11
  31. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  32. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
  33. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  34. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  35. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  36. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
  37. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
  38. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
  39. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  40. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  41. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  42. metaflow-stubs/plugins/argo/argo_workflows.pyi +3 -3
  43. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +3 -3
  44. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +3 -3
  45. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +2 -2
  46. metaflow-stubs/plugins/argo/exit_hooks.pyi +2 -2
  47. metaflow-stubs/plugins/aws/__init__.pyi +4 -4
  48. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  49. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  50. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  51. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  52. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  53. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
  54. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  55. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +4 -4
  56. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  57. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
  58. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  59. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  60. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  61. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  62. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
  63. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +3 -3
  64. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +2 -2
  65. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  66. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  67. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  68. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +4 -4
  69. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  70. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  71. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  72. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  73. metaflow-stubs/plugins/cards/card_client.pyi +2 -2
  74. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  75. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  76. metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
  77. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  78. metaflow-stubs/plugins/cards/card_modules/basic.pyi +2 -2
  79. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  80. metaflow-stubs/plugins/cards/card_modules/components.pyi +3 -3
  81. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  82. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  83. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  84. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  85. metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
  86. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  87. metaflow-stubs/plugins/catch_decorator.pyi +2 -2
  88. metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
  89. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  90. metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
  91. metaflow-stubs/plugins/datatools/s3/s3.pyi +3 -3
  92. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  93. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  94. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  95. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  96. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  97. metaflow-stubs/plugins/events_decorator.pyi +45 -4
  98. metaflow-stubs/plugins/exit_hook/__init__.pyi +2 -2
  99. metaflow-stubs/plugins/exit_hook/exit_hook_decorator.pyi +2 -2
  100. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  101. metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
  102. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  103. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +4 -4
  104. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  105. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  106. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  107. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  108. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  109. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +2 -2
  110. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
  111. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  112. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
  113. metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +2 -2
  114. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
  115. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +2 -2
  116. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  117. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  118. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  119. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  120. metaflow-stubs/plugins/pypi/conda_environment.pyi +6 -6
  121. metaflow-stubs/plugins/pypi/parsers.pyi +2 -2
  122. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  123. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  124. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  125. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  126. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  127. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  128. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +4 -4
  129. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  130. metaflow-stubs/plugins/secrets/secrets_func.pyi +2 -2
  131. metaflow-stubs/plugins/secrets/secrets_spec.pyi +2 -2
  132. metaflow-stubs/plugins/secrets/utils.pyi +2 -2
  133. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  134. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +3 -3
  135. metaflow-stubs/plugins/timeout_decorator.pyi +2 -2
  136. metaflow-stubs/plugins/uv/__init__.pyi +2 -2
  137. metaflow-stubs/plugins/uv/uv_environment.pyi +2 -2
  138. metaflow-stubs/pylint_wrapper.pyi +2 -2
  139. metaflow-stubs/runner/__init__.pyi +2 -2
  140. metaflow-stubs/runner/deployer.pyi +4 -4
  141. metaflow-stubs/runner/deployer_impl.pyi +2 -2
  142. metaflow-stubs/runner/metaflow_runner.pyi +3 -3
  143. metaflow-stubs/runner/nbdeploy.pyi +2 -2
  144. metaflow-stubs/runner/nbrun.pyi +2 -2
  145. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  146. metaflow-stubs/runner/utils.pyi +3 -3
  147. metaflow-stubs/system/__init__.pyi +2 -2
  148. metaflow-stubs/system/system_logger.pyi +2 -2
  149. metaflow-stubs/system/system_monitor.pyi +2 -2
  150. metaflow-stubs/tagging_util.pyi +2 -2
  151. metaflow-stubs/tuple_util.pyi +2 -2
  152. metaflow-stubs/user_configs/__init__.pyi +2 -2
  153. metaflow-stubs/user_configs/config_options.pyi +2 -2
  154. metaflow-stubs/user_configs/config_parameters.pyi +7 -7
  155. metaflow-stubs/user_decorators/__init__.pyi +2 -2
  156. metaflow-stubs/user_decorators/common.pyi +2 -2
  157. metaflow-stubs/user_decorators/mutable_flow.pyi +5 -5
  158. metaflow-stubs/user_decorators/mutable_step.pyi +5 -5
  159. metaflow-stubs/user_decorators/user_flow_decorator.pyi +4 -4
  160. metaflow-stubs/user_decorators/user_step_decorator.pyi +6 -6
  161. metaflow-stubs/version.pyi +2 -2
  162. {metaflow_stubs-2.18.4.dist-info → metaflow_stubs-2.18.6.dist-info}/METADATA +2 -2
  163. metaflow_stubs-2.18.6.dist-info/RECORD +166 -0
  164. metaflow_stubs-2.18.4.dist-info/RECORD +0 -166
  165. {metaflow_stubs-2.18.4.dist-info → metaflow_stubs-2.18.6.dist-info}/WHEEL +0 -0
  166. {metaflow_stubs-2.18.4.dist-info → metaflow_stubs-2.18.6.dist-info}/top_level.txt +0 -0
@@ -1,7 +1,7 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.18.4 #
4
- # Generated on 2025-09-12T00:00:14.759393 #
3
+ # MF version: 2.18.6 #
4
+ # Generated on 2025-09-17T19:37:30.074072 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
@@ -39,17 +39,17 @@ from .user_decorators.user_step_decorator import UserStepDecorator as UserStepDe
39
39
  from .user_decorators.user_step_decorator import StepMutator as StepMutator
40
40
  from .user_decorators.user_step_decorator import user_step_decorator as user_step_decorator
41
41
  from .user_decorators.user_flow_decorator import FlowMutator as FlowMutator
42
- from . import metaflow_git as metaflow_git
43
- from . import tuple_util as tuple_util
44
42
  from . import events as events
43
+ from . import tuple_util as tuple_util
44
+ from . import metaflow_git as metaflow_git
45
45
  from . import runner as runner
46
46
  from . import plugins as plugins
47
47
  from .plugins.datatools.s3.s3 import S3 as S3
48
48
  from . import includefile as includefile
49
49
  from .includefile import IncludeFile as IncludeFile
50
50
  from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
51
- from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
52
51
  from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
52
+ from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
53
53
  from . import cards as cards
54
54
  from . import client as client
55
55
  from .client.core import namespace as namespace
@@ -152,57 +152,6 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
152
152
  """
153
153
  ...
154
154
 
155
- @typing.overload
156
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
157
- """
158
- Specifies the PyPI packages for the step.
159
-
160
- Information in this decorator will augment any
161
- attributes set in the `@pyi_base` flow-level decorator. Hence,
162
- you can use `@pypi_base` to set packages required by all
163
- steps and use `@pypi` to specify step-specific overrides.
164
-
165
-
166
- Parameters
167
- ----------
168
- packages : Dict[str, str], default: {}
169
- Packages to use for this step. The key is the name of the package
170
- and the value is the version to use.
171
- python : str, optional, default: None
172
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
173
- that the version used will correspond to the version of the Python interpreter used to start the run.
174
- """
175
- ...
176
-
177
- @typing.overload
178
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
179
- ...
180
-
181
- @typing.overload
182
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
183
- ...
184
-
185
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
186
- """
187
- Specifies the PyPI packages for the step.
188
-
189
- Information in this decorator will augment any
190
- attributes set in the `@pyi_base` flow-level decorator. Hence,
191
- you can use `@pypi_base` to set packages required by all
192
- steps and use `@pypi` to specify step-specific overrides.
193
-
194
-
195
- Parameters
196
- ----------
197
- packages : Dict[str, str], default: {}
198
- Packages to use for this step. The key is the name of the package
199
- and the value is the version to use.
200
- python : str, optional, default: None
201
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
202
- that the version used will correspond to the version of the Python interpreter used to start the run.
203
- """
204
- ...
205
-
206
155
  @typing.overload
207
156
  def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
208
157
  """
@@ -259,27 +208,9 @@ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
259
208
  ...
260
209
 
261
210
  @typing.overload
262
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
263
- """
264
- Decorator prototype for all step decorators. This function gets specialized
265
- and imported for all decorators types by _import_plugin_decorators().
266
- """
267
- ...
268
-
269
- @typing.overload
270
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
271
- ...
272
-
273
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
274
- """
275
- Decorator prototype for all step decorators. This function gets specialized
276
- and imported for all decorators types by _import_plugin_decorators().
277
- """
278
- ...
279
-
280
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[typing.Dict[str, str]] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
211
+ def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, aws_batch_tags: typing.Optional[typing.Dict[str, str]] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
281
212
  """
282
- Specifies that this step should execute on Kubernetes.
213
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
283
214
 
284
215
 
285
216
  Parameters
@@ -287,190 +218,317 @@ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: ty
287
218
  cpu : int, default 1
288
219
  Number of CPUs required for this step. If `@resources` is
289
220
  also present, the maximum value from all decorators is used.
221
+ gpu : int, default 0
222
+ Number of GPUs required for this step. If `@resources` is
223
+ also present, the maximum value from all decorators is used.
290
224
  memory : int, default 4096
291
225
  Memory size (in MB) required for this step. If
292
226
  `@resources` is also present, the maximum value from all decorators is
293
227
  used.
294
- disk : int, default 10240
295
- Disk size (in MB) required for this step. If
296
- `@resources` is also present, the maximum value from all decorators is
297
- used.
298
228
  image : str, optional, default None
299
- Docker image to use when launching on Kubernetes. If not specified, and
300
- METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
229
+ Docker image to use when launching on AWS Batch. If not specified, and
230
+ METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
301
231
  not, a default Docker image mapping to the current version of Python is used.
302
- image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
303
- If given, the imagePullPolicy to be applied to the Docker image of the step.
304
- image_pull_secrets: List[str], default []
305
- The default is extracted from METAFLOW_KUBERNETES_IMAGE_PULL_SECRETS.
306
- Kubernetes image pull secrets to use when pulling container images
307
- in Kubernetes.
308
- service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
309
- Kubernetes service account to use when launching pod in Kubernetes.
310
- secrets : List[str], optional, default None
311
- Kubernetes secrets to use when launching pod in Kubernetes. These
312
- secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
313
- in Metaflow configuration.
314
- node_selector: Union[Dict[str,str], str], optional, default None
315
- Kubernetes node selector(s) to apply to the pod running the task.
316
- Can be passed in as a comma separated string of values e.g.
317
- 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
318
- {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
319
- namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
320
- Kubernetes namespace to use when launching pod in Kubernetes.
321
- gpu : int, optional, default None
322
- Number of GPUs required for this step. A value of zero implies that
323
- the scheduled node should not have GPUs.
324
- gpu_vendor : str, default KUBERNETES_GPU_VENDOR
325
- The vendor of the GPUs to be used for this step.
326
- tolerations : List[Dict[str,str]], default []
327
- The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
328
- Kubernetes tolerations to use when launching pod in Kubernetes.
329
- labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
330
- Kubernetes labels to use when launching pod in Kubernetes.
331
- annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
332
- Kubernetes annotations to use when launching pod in Kubernetes.
232
+ queue : str, default METAFLOW_BATCH_JOB_QUEUE
233
+ AWS Batch Job Queue to submit the job to.
234
+ iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
235
+ AWS IAM role that AWS Batch container uses to access AWS cloud resources.
236
+ execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
237
+ AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
238
+ (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
239
+ shared_memory : int, optional, default None
240
+ The value for the size (in MiB) of the /dev/shm volume for this step.
241
+ This parameter maps to the `--shm-size` option in Docker.
242
+ max_swap : int, optional, default None
243
+ The total amount of swap memory (in MiB) a container can use for this
244
+ step. This parameter is translated to the `--memory-swap` option in
245
+ Docker where the value is the sum of the container memory plus the
246
+ `max_swap` value.
247
+ swappiness : int, optional, default None
248
+ This allows you to tune memory swappiness behavior for this step.
249
+ A swappiness value of 0 causes swapping not to happen unless absolutely
250
+ necessary. A swappiness value of 100 causes pages to be swapped very
251
+ aggressively. Accepted values are whole numbers between 0 and 100.
252
+ aws_batch_tags: Dict[str, str], optional, default None
253
+ Sets arbitrary AWS tags on the AWS Batch compute environment.
254
+ Set as string key-value pairs.
333
255
  use_tmpfs : bool, default False
334
- This enables an explicit tmpfs mount for this step.
256
+ This enables an explicit tmpfs mount for this step. Note that tmpfs is
257
+ not available on Fargate compute environments
335
258
  tmpfs_tempdir : bool, default True
336
259
  sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
337
- tmpfs_size : int, optional, default: None
260
+ tmpfs_size : int, optional, default None
338
261
  The value for the size (in MiB) of the tmpfs mount for this step.
339
262
  This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
340
263
  memory allocated for this step.
341
- tmpfs_path : str, optional, default /metaflow_temp
342
- Path to tmpfs mount for this step.
343
- persistent_volume_claims : Dict[str, str], optional, default None
344
- A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
345
- volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
346
- shared_memory: int, optional
347
- Shared memory size (in MiB) required for this step
348
- port: int, optional
349
- Port number to specify in the Kubernetes job object
350
- compute_pool : str, optional, default None
351
- Compute pool to be used for for this step.
352
- If not specified, any accessible compute pool within the perimeter is used.
353
- hostname_resolution_timeout: int, default 10 * 60
354
- Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
355
- Only applicable when @parallel is used.
356
- qos: str, default: Burstable
357
- Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
358
-
359
- security_context: Dict[str, Any], optional, default None
360
- Container security context. Applies to the task container. Allows the following keys:
361
- - privileged: bool, optional, default None
362
- - allow_privilege_escalation: bool, optional, default None
363
- - run_as_user: int, optional, default None
364
- - run_as_group: int, optional, default None
365
- - run_as_non_root: bool, optional, default None
366
- """
367
- ...
368
-
369
- @typing.overload
370
- def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
371
- """
372
- Creates a human-readable report, a Metaflow Card, after this step completes.
373
-
374
- Note that you may add multiple `@card` decorators in a step with different parameters.
375
-
376
-
377
- Parameters
378
- ----------
379
- type : str, default 'default'
380
- Card type.
381
- id : str, optional, default None
382
- If multiple cards are present, use this id to identify this card.
383
- options : Dict[str, Any], default {}
384
- Options passed to the card. The contents depend on the card type.
385
- timeout : int, default 45
386
- Interrupt reporting if it takes more than this many seconds.
264
+ tmpfs_path : str, optional, default None
265
+ Path to tmpfs mount for this step. Defaults to /metaflow_temp.
266
+ inferentia : int, default 0
267
+ Number of Inferentia chips required for this step.
268
+ trainium : int, default None
269
+ Alias for inferentia. Use only one of the two.
270
+ efa : int, default 0
271
+ Number of elastic fabric adapter network devices to attach to container
272
+ ephemeral_storage : int, default None
273
+ The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
274
+ This is only relevant for Fargate compute environments
275
+ log_driver: str, optional, default None
276
+ The log driver to use for the Amazon ECS container.
277
+ log_options: List[str], optional, default None
278
+ List of strings containing options for the chosen log driver. The configurable values
279
+ depend on the `log driver` chosen. Validation of these options is not supported yet.
280
+ Example: [`awslogs-group:aws/batch/job`]
387
281
  """
388
282
  ...
389
283
 
390
284
  @typing.overload
391
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
285
+ def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
392
286
  ...
393
287
 
394
288
  @typing.overload
395
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
396
- ...
397
-
398
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
399
- """
400
- Creates a human-readable report, a Metaflow Card, after this step completes.
401
-
402
- Note that you may add multiple `@card` decorators in a step with different parameters.
403
-
404
-
405
- Parameters
406
- ----------
407
- type : str, default 'default'
408
- Card type.
409
- id : str, optional, default None
410
- If multiple cards are present, use this id to identify this card.
411
- options : Dict[str, Any], default {}
412
- Options passed to the card. The contents depend on the card type.
413
- timeout : int, default 45
414
- Interrupt reporting if it takes more than this many seconds.
415
- """
289
+ def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
416
290
  ...
417
291
 
418
- @typing.overload
419
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
292
+ def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, aws_batch_tags: typing.Optional[typing.Dict[str, str]] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
420
293
  """
421
- Specifies the Conda environment for the step.
422
-
423
- Information in this decorator will augment any
424
- attributes set in the `@conda_base` flow-level decorator. Hence,
425
- you can use `@conda_base` to set packages required by all
426
- steps and use `@conda` to specify step-specific overrides.
294
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
427
295
 
428
296
 
429
297
  Parameters
430
298
  ----------
431
- packages : Dict[str, str], default {}
432
- Packages to use for this step. The key is the name of the package
433
- and the value is the version to use.
434
- libraries : Dict[str, str], default {}
435
- Supported for backward compatibility. When used with packages, packages will take precedence.
436
- python : str, optional, default None
437
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
438
- that the version used will correspond to the version of the Python interpreter used to start the run.
439
- disabled : bool, default False
440
- If set to True, disables @conda.
299
+ cpu : int, default 1
300
+ Number of CPUs required for this step. If `@resources` is
301
+ also present, the maximum value from all decorators is used.
302
+ gpu : int, default 0
303
+ Number of GPUs required for this step. If `@resources` is
304
+ also present, the maximum value from all decorators is used.
305
+ memory : int, default 4096
306
+ Memory size (in MB) required for this step. If
307
+ `@resources` is also present, the maximum value from all decorators is
308
+ used.
309
+ image : str, optional, default None
310
+ Docker image to use when launching on AWS Batch. If not specified, and
311
+ METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
312
+ not, a default Docker image mapping to the current version of Python is used.
313
+ queue : str, default METAFLOW_BATCH_JOB_QUEUE
314
+ AWS Batch Job Queue to submit the job to.
315
+ iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
316
+ AWS IAM role that AWS Batch container uses to access AWS cloud resources.
317
+ execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
318
+ AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
319
+ (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
320
+ shared_memory : int, optional, default None
321
+ The value for the size (in MiB) of the /dev/shm volume for this step.
322
+ This parameter maps to the `--shm-size` option in Docker.
323
+ max_swap : int, optional, default None
324
+ The total amount of swap memory (in MiB) a container can use for this
325
+ step. This parameter is translated to the `--memory-swap` option in
326
+ Docker where the value is the sum of the container memory plus the
327
+ `max_swap` value.
328
+ swappiness : int, optional, default None
329
+ This allows you to tune memory swappiness behavior for this step.
330
+ A swappiness value of 0 causes swapping not to happen unless absolutely
331
+ necessary. A swappiness value of 100 causes pages to be swapped very
332
+ aggressively. Accepted values are whole numbers between 0 and 100.
333
+ aws_batch_tags: Dict[str, str], optional, default None
334
+ Sets arbitrary AWS tags on the AWS Batch compute environment.
335
+ Set as string key-value pairs.
336
+ use_tmpfs : bool, default False
337
+ This enables an explicit tmpfs mount for this step. Note that tmpfs is
338
+ not available on Fargate compute environments
339
+ tmpfs_tempdir : bool, default True
340
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
341
+ tmpfs_size : int, optional, default None
342
+ The value for the size (in MiB) of the tmpfs mount for this step.
343
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
344
+ memory allocated for this step.
345
+ tmpfs_path : str, optional, default None
346
+ Path to tmpfs mount for this step. Defaults to /metaflow_temp.
347
+ inferentia : int, default 0
348
+ Number of Inferentia chips required for this step.
349
+ trainium : int, default None
350
+ Alias for inferentia. Use only one of the two.
351
+ efa : int, default 0
352
+ Number of elastic fabric adapter network devices to attach to container
353
+ ephemeral_storage : int, default None
354
+ The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
355
+ This is only relevant for Fargate compute environments
356
+ log_driver: str, optional, default None
357
+ The log driver to use for the Amazon ECS container.
358
+ log_options: List[str], optional, default None
359
+ List of strings containing options for the chosen log driver. The configurable values
360
+ depend on the `log driver` chosen. Validation of these options is not supported yet.
361
+ Example: [`awslogs-group:aws/batch/job`]
441
362
  """
442
363
  ...
443
364
 
444
365
  @typing.overload
445
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
366
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
367
+ """
368
+ Decorator prototype for all step decorators. This function gets specialized
369
+ and imported for all decorators types by _import_plugin_decorators().
370
+ """
446
371
  ...
447
372
 
448
373
  @typing.overload
449
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
374
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
450
375
  ...
451
376
 
452
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
377
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
453
378
  """
454
- Specifies the Conda environment for the step.
379
+ Decorator prototype for all step decorators. This function gets specialized
380
+ and imported for all decorators types by _import_plugin_decorators().
381
+ """
382
+ ...
383
+
384
+ @typing.overload
385
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
386
+ """
387
+ Specifies the PyPI packages for the step.
455
388
 
456
389
  Information in this decorator will augment any
457
- attributes set in the `@conda_base` flow-level decorator. Hence,
458
- you can use `@conda_base` to set packages required by all
459
- steps and use `@conda` to specify step-specific overrides.
390
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
391
+ you can use `@pypi_base` to set packages required by all
392
+ steps and use `@pypi` to specify step-specific overrides.
460
393
 
461
394
 
462
395
  Parameters
463
396
  ----------
464
- packages : Dict[str, str], default {}
397
+ packages : Dict[str, str], default: {}
465
398
  Packages to use for this step. The key is the name of the package
466
399
  and the value is the version to use.
467
- libraries : Dict[str, str], default {}
468
- Supported for backward compatibility. When used with packages, packages will take precedence.
469
- python : str, optional, default None
400
+ python : str, optional, default: None
470
401
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
471
402
  that the version used will correspond to the version of the Python interpreter used to start the run.
472
- disabled : bool, default False
473
- If set to True, disables @conda.
403
+ """
404
+ ...
405
+
406
+ @typing.overload
407
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
408
+ ...
409
+
410
+ @typing.overload
411
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
412
+ ...
413
+
414
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
415
+ """
416
+ Specifies the PyPI packages for the step.
417
+
418
+ Information in this decorator will augment any
419
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
420
+ you can use `@pypi_base` to set packages required by all
421
+ steps and use `@pypi` to specify step-specific overrides.
422
+
423
+
424
+ Parameters
425
+ ----------
426
+ packages : Dict[str, str], default: {}
427
+ Packages to use for this step. The key is the name of the package
428
+ and the value is the version to use.
429
+ python : str, optional, default: None
430
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
431
+ that the version used will correspond to the version of the Python interpreter used to start the run.
432
+ """
433
+ ...
434
+
435
+ @typing.overload
436
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
437
+ """
438
+ Specifies that the step will success under all circumstances.
439
+
440
+ The decorator will create an optional artifact, specified by `var`, which
441
+ contains the exception raised. You can use it to detect the presence
442
+ of errors, indicating that all happy-path artifacts produced by the step
443
+ are missing.
444
+
445
+
446
+ Parameters
447
+ ----------
448
+ var : str, optional, default None
449
+ Name of the artifact in which to store the caught exception.
450
+ If not specified, the exception is not stored.
451
+ print_exception : bool, default True
452
+ Determines whether or not the exception is printed to
453
+ stdout when caught.
454
+ """
455
+ ...
456
+
457
+ @typing.overload
458
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
459
+ ...
460
+
461
+ @typing.overload
462
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
463
+ ...
464
+
465
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
466
+ """
467
+ Specifies that the step will success under all circumstances.
468
+
469
+ The decorator will create an optional artifact, specified by `var`, which
470
+ contains the exception raised. You can use it to detect the presence
471
+ of errors, indicating that all happy-path artifacts produced by the step
472
+ are missing.
473
+
474
+
475
+ Parameters
476
+ ----------
477
+ var : str, optional, default None
478
+ Name of the artifact in which to store the caught exception.
479
+ If not specified, the exception is not stored.
480
+ print_exception : bool, default True
481
+ Determines whether or not the exception is printed to
482
+ stdout when caught.
483
+ """
484
+ ...
485
+
486
+ @typing.overload
487
+ def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
488
+ """
489
+ Creates a human-readable report, a Metaflow Card, after this step completes.
490
+
491
+ Note that you may add multiple `@card` decorators in a step with different parameters.
492
+
493
+
494
+ Parameters
495
+ ----------
496
+ type : str, default 'default'
497
+ Card type.
498
+ id : str, optional, default None
499
+ If multiple cards are present, use this id to identify this card.
500
+ options : Dict[str, Any], default {}
501
+ Options passed to the card. The contents depend on the card type.
502
+ timeout : int, default 45
503
+ Interrupt reporting if it takes more than this many seconds.
504
+ """
505
+ ...
506
+
507
+ @typing.overload
508
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
509
+ ...
510
+
511
+ @typing.overload
512
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
513
+ ...
514
+
515
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
516
+ """
517
+ Creates a human-readable report, a Metaflow Card, after this step completes.
518
+
519
+ Note that you may add multiple `@card` decorators in a step with different parameters.
520
+
521
+
522
+ Parameters
523
+ ----------
524
+ type : str, default 'default'
525
+ Card type.
526
+ id : str, optional, default None
527
+ If multiple cards are present, use this id to identify this card.
528
+ options : Dict[str, Any], default {}
529
+ Options passed to the card. The contents depend on the card type.
530
+ timeout : int, default 45
531
+ Interrupt reporting if it takes more than this many seconds.
474
532
  """
475
533
  ...
476
534
 
@@ -533,161 +591,6 @@ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
533
591
  """
534
592
  ...
535
593
 
536
- @typing.overload
537
- def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, aws_batch_tags: typing.Optional[typing.Dict[str, str]] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
538
- """
539
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
540
-
541
-
542
- Parameters
543
- ----------
544
- cpu : int, default 1
545
- Number of CPUs required for this step. If `@resources` is
546
- also present, the maximum value from all decorators is used.
547
- gpu : int, default 0
548
- Number of GPUs required for this step. If `@resources` is
549
- also present, the maximum value from all decorators is used.
550
- memory : int, default 4096
551
- Memory size (in MB) required for this step. If
552
- `@resources` is also present, the maximum value from all decorators is
553
- used.
554
- image : str, optional, default None
555
- Docker image to use when launching on AWS Batch. If not specified, and
556
- METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
557
- not, a default Docker image mapping to the current version of Python is used.
558
- queue : str, default METAFLOW_BATCH_JOB_QUEUE
559
- AWS Batch Job Queue to submit the job to.
560
- iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
561
- AWS IAM role that AWS Batch container uses to access AWS cloud resources.
562
- execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
563
- AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
564
- (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
565
- shared_memory : int, optional, default None
566
- The value for the size (in MiB) of the /dev/shm volume for this step.
567
- This parameter maps to the `--shm-size` option in Docker.
568
- max_swap : int, optional, default None
569
- The total amount of swap memory (in MiB) a container can use for this
570
- step. This parameter is translated to the `--memory-swap` option in
571
- Docker where the value is the sum of the container memory plus the
572
- `max_swap` value.
573
- swappiness : int, optional, default None
574
- This allows you to tune memory swappiness behavior for this step.
575
- A swappiness value of 0 causes swapping not to happen unless absolutely
576
- necessary. A swappiness value of 100 causes pages to be swapped very
577
- aggressively. Accepted values are whole numbers between 0 and 100.
578
- aws_batch_tags: Dict[str, str], optional, default None
579
- Sets arbitrary AWS tags on the AWS Batch compute environment.
580
- Set as string key-value pairs.
581
- use_tmpfs : bool, default False
582
- This enables an explicit tmpfs mount for this step. Note that tmpfs is
583
- not available on Fargate compute environments
584
- tmpfs_tempdir : bool, default True
585
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
586
- tmpfs_size : int, optional, default None
587
- The value for the size (in MiB) of the tmpfs mount for this step.
588
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
589
- memory allocated for this step.
590
- tmpfs_path : str, optional, default None
591
- Path to tmpfs mount for this step. Defaults to /metaflow_temp.
592
- inferentia : int, default 0
593
- Number of Inferentia chips required for this step.
594
- trainium : int, default None
595
- Alias for inferentia. Use only one of the two.
596
- efa : int, default 0
597
- Number of elastic fabric adapter network devices to attach to container
598
- ephemeral_storage : int, default None
599
- The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
600
- This is only relevant for Fargate compute environments
601
- log_driver: str, optional, default None
602
- The log driver to use for the Amazon ECS container.
603
- log_options: List[str], optional, default None
604
- List of strings containing options for the chosen log driver. The configurable values
605
- depend on the `log driver` chosen. Validation of these options is not supported yet.
606
- Example: [`awslogs-group:aws/batch/job`]
607
- """
608
- ...
609
-
610
- @typing.overload
611
- def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
612
- ...
613
-
614
- @typing.overload
615
- def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
616
- ...
617
-
618
- def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, aws_batch_tags: typing.Optional[typing.Dict[str, str]] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
619
- """
620
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
621
-
622
-
623
- Parameters
624
- ----------
625
- cpu : int, default 1
626
- Number of CPUs required for this step. If `@resources` is
627
- also present, the maximum value from all decorators is used.
628
- gpu : int, default 0
629
- Number of GPUs required for this step. If `@resources` is
630
- also present, the maximum value from all decorators is used.
631
- memory : int, default 4096
632
- Memory size (in MB) required for this step. If
633
- `@resources` is also present, the maximum value from all decorators is
634
- used.
635
- image : str, optional, default None
636
- Docker image to use when launching on AWS Batch. If not specified, and
637
- METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
638
- not, a default Docker image mapping to the current version of Python is used.
639
- queue : str, default METAFLOW_BATCH_JOB_QUEUE
640
- AWS Batch Job Queue to submit the job to.
641
- iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
642
- AWS IAM role that AWS Batch container uses to access AWS cloud resources.
643
- execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
644
- AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
645
- (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
646
- shared_memory : int, optional, default None
647
- The value for the size (in MiB) of the /dev/shm volume for this step.
648
- This parameter maps to the `--shm-size` option in Docker.
649
- max_swap : int, optional, default None
650
- The total amount of swap memory (in MiB) a container can use for this
651
- step. This parameter is translated to the `--memory-swap` option in
652
- Docker where the value is the sum of the container memory plus the
653
- `max_swap` value.
654
- swappiness : int, optional, default None
655
- This allows you to tune memory swappiness behavior for this step.
656
- A swappiness value of 0 causes swapping not to happen unless absolutely
657
- necessary. A swappiness value of 100 causes pages to be swapped very
658
- aggressively. Accepted values are whole numbers between 0 and 100.
659
- aws_batch_tags: Dict[str, str], optional, default None
660
- Sets arbitrary AWS tags on the AWS Batch compute environment.
661
- Set as string key-value pairs.
662
- use_tmpfs : bool, default False
663
- This enables an explicit tmpfs mount for this step. Note that tmpfs is
664
- not available on Fargate compute environments
665
- tmpfs_tempdir : bool, default True
666
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
667
- tmpfs_size : int, optional, default None
668
- The value for the size (in MiB) of the tmpfs mount for this step.
669
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
670
- memory allocated for this step.
671
- tmpfs_path : str, optional, default None
672
- Path to tmpfs mount for this step. Defaults to /metaflow_temp.
673
- inferentia : int, default 0
674
- Number of Inferentia chips required for this step.
675
- trainium : int, default None
676
- Alias for inferentia. Use only one of the two.
677
- efa : int, default 0
678
- Number of elastic fabric adapter network devices to attach to container
679
- ephemeral_storage : int, default None
680
- The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
681
- This is only relevant for Fargate compute environments
682
- log_driver: str, optional, default None
683
- The log driver to use for the Amazon ECS container.
684
- log_options: List[str], optional, default None
685
- List of strings containing options for the chosen log driver. The configurable values
686
- depend on the `log driver` chosen. Validation of these options is not supported yet.
687
- Example: [`awslogs-group:aws/batch/job`]
688
- """
689
- ...
690
-
691
594
  @typing.overload
692
595
  def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
693
596
  """
@@ -768,53 +671,189 @@ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None]
768
671
  ...
769
672
 
770
673
  @typing.overload
771
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
674
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
675
+ """
676
+ Specifies the Conda environment for the step.
677
+
678
+ Information in this decorator will augment any
679
+ attributes set in the `@conda_base` flow-level decorator. Hence,
680
+ you can use `@conda_base` to set packages required by all
681
+ steps and use `@conda` to specify step-specific overrides.
682
+
683
+
684
+ Parameters
685
+ ----------
686
+ packages : Dict[str, str], default {}
687
+ Packages to use for this step. The key is the name of the package
688
+ and the value is the version to use.
689
+ libraries : Dict[str, str], default {}
690
+ Supported for backward compatibility. When used with packages, packages will take precedence.
691
+ python : str, optional, default None
692
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
693
+ that the version used will correspond to the version of the Python interpreter used to start the run.
694
+ disabled : bool, default False
695
+ If set to True, disables @conda.
696
+ """
697
+ ...
698
+
699
+ @typing.overload
700
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
701
+ ...
702
+
703
+ @typing.overload
704
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
705
+ ...
706
+
707
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
708
+ """
709
+ Specifies the Conda environment for the step.
710
+
711
+ Information in this decorator will augment any
712
+ attributes set in the `@conda_base` flow-level decorator. Hence,
713
+ you can use `@conda_base` to set packages required by all
714
+ steps and use `@conda` to specify step-specific overrides.
715
+
716
+
717
+ Parameters
718
+ ----------
719
+ packages : Dict[str, str], default {}
720
+ Packages to use for this step. The key is the name of the package
721
+ and the value is the version to use.
722
+ libraries : Dict[str, str], default {}
723
+ Supported for backward compatibility. When used with packages, packages will take precedence.
724
+ python : str, optional, default None
725
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
726
+ that the version used will correspond to the version of the Python interpreter used to start the run.
727
+ disabled : bool, default False
728
+ If set to True, disables @conda.
729
+ """
730
+ ...
731
+
732
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[typing.Dict[str, str]] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
733
+ """
734
+ Specifies that this step should execute on Kubernetes.
735
+
736
+
737
+ Parameters
738
+ ----------
739
+ cpu : int, default 1
740
+ Number of CPUs required for this step. If `@resources` is
741
+ also present, the maximum value from all decorators is used.
742
+ memory : int, default 4096
743
+ Memory size (in MB) required for this step. If
744
+ `@resources` is also present, the maximum value from all decorators is
745
+ used.
746
+ disk : int, default 10240
747
+ Disk size (in MB) required for this step. If
748
+ `@resources` is also present, the maximum value from all decorators is
749
+ used.
750
+ image : str, optional, default None
751
+ Docker image to use when launching on Kubernetes. If not specified, and
752
+ METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
753
+ not, a default Docker image mapping to the current version of Python is used.
754
+ image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
755
+ If given, the imagePullPolicy to be applied to the Docker image of the step.
756
+ image_pull_secrets: List[str], default []
757
+ The default is extracted from METAFLOW_KUBERNETES_IMAGE_PULL_SECRETS.
758
+ Kubernetes image pull secrets to use when pulling container images
759
+ in Kubernetes.
760
+ service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
761
+ Kubernetes service account to use when launching pod in Kubernetes.
762
+ secrets : List[str], optional, default None
763
+ Kubernetes secrets to use when launching pod in Kubernetes. These
764
+ secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
765
+ in Metaflow configuration.
766
+ node_selector: Union[Dict[str,str], str], optional, default None
767
+ Kubernetes node selector(s) to apply to the pod running the task.
768
+ Can be passed in as a comma separated string of values e.g.
769
+ 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
770
+ {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
771
+ namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
772
+ Kubernetes namespace to use when launching pod in Kubernetes.
773
+ gpu : int, optional, default None
774
+ Number of GPUs required for this step. A value of zero implies that
775
+ the scheduled node should not have GPUs.
776
+ gpu_vendor : str, default KUBERNETES_GPU_VENDOR
777
+ The vendor of the GPUs to be used for this step.
778
+ tolerations : List[Dict[str,str]], default []
779
+ The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
780
+ Kubernetes tolerations to use when launching pod in Kubernetes.
781
+ labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
782
+ Kubernetes labels to use when launching pod in Kubernetes.
783
+ annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
784
+ Kubernetes annotations to use when launching pod in Kubernetes.
785
+ use_tmpfs : bool, default False
786
+ This enables an explicit tmpfs mount for this step.
787
+ tmpfs_tempdir : bool, default True
788
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
789
+ tmpfs_size : int, optional, default: None
790
+ The value for the size (in MiB) of the tmpfs mount for this step.
791
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
792
+ memory allocated for this step.
793
+ tmpfs_path : str, optional, default /metaflow_temp
794
+ Path to tmpfs mount for this step.
795
+ persistent_volume_claims : Dict[str, str], optional, default None
796
+ A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
797
+ volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
798
+ shared_memory: int, optional
799
+ Shared memory size (in MiB) required for this step
800
+ port: int, optional
801
+ Port number to specify in the Kubernetes job object
802
+ compute_pool : str, optional, default None
803
+ Compute pool to be used for for this step.
804
+ If not specified, any accessible compute pool within the perimeter is used.
805
+ hostname_resolution_timeout: int, default 10 * 60
806
+ Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
807
+ Only applicable when @parallel is used.
808
+ qos: str, default: Burstable
809
+ Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
810
+
811
+ security_context: Dict[str, Any], optional, default None
812
+ Container security context. Applies to the task container. Allows the following keys:
813
+ - privileged: bool, optional, default None
814
+ - allow_privilege_escalation: bool, optional, default None
815
+ - run_as_user: int, optional, default None
816
+ - run_as_group: int, optional, default None
817
+ - run_as_non_root: bool, optional, default None
818
+ """
819
+ ...
820
+
821
+ @typing.overload
822
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
772
823
  """
773
- Specifies that the step will success under all circumstances.
774
-
775
- The decorator will create an optional artifact, specified by `var`, which
776
- contains the exception raised. You can use it to detect the presence
777
- of errors, indicating that all happy-path artifacts produced by the step
778
- are missing.
824
+ Specifies secrets to be retrieved and injected as environment variables prior to
825
+ the execution of a step.
779
826
 
780
827
 
781
828
  Parameters
782
829
  ----------
783
- var : str, optional, default None
784
- Name of the artifact in which to store the caught exception.
785
- If not specified, the exception is not stored.
786
- print_exception : bool, default True
787
- Determines whether or not the exception is printed to
788
- stdout when caught.
830
+ sources : List[Union[str, Dict[str, Any]]], default: []
831
+ List of secret specs, defining how the secrets are to be retrieved
832
+ role : str, optional, default: None
833
+ Role to use for fetching secrets
789
834
  """
790
835
  ...
791
836
 
792
837
  @typing.overload
793
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
838
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
794
839
  ...
795
840
 
796
841
  @typing.overload
797
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
842
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
798
843
  ...
799
844
 
800
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
845
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
801
846
  """
802
- Specifies that the step will success under all circumstances.
803
-
804
- The decorator will create an optional artifact, specified by `var`, which
805
- contains the exception raised. You can use it to detect the presence
806
- of errors, indicating that all happy-path artifacts produced by the step
807
- are missing.
847
+ Specifies secrets to be retrieved and injected as environment variables prior to
848
+ the execution of a step.
808
849
 
809
850
 
810
851
  Parameters
811
852
  ----------
812
- var : str, optional, default None
813
- Name of the artifact in which to store the caught exception.
814
- If not specified, the exception is not stored.
815
- print_exception : bool, default True
816
- Determines whether or not the exception is printed to
817
- stdout when caught.
853
+ sources : List[Union[str, Dict[str, Any]]], default: []
854
+ List of secret specs, defining how the secrets are to be retrieved
855
+ role : str, optional, default: None
856
+ Role to use for fetching secrets
818
857
  """
819
858
  ...
820
859
 
@@ -852,137 +891,147 @@ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], Non
852
891
  ...
853
892
 
854
893
  @typing.overload
855
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
856
- """
857
- Specifies secrets to be retrieved and injected as environment variables prior to
858
- the execution of a step.
859
-
860
-
861
- Parameters
862
- ----------
863
- sources : List[Union[str, Dict[str, Any]]], default: []
864
- List of secret specs, defining how the secrets are to be retrieved
865
- role : str, optional, default: None
866
- Role to use for fetching secrets
867
- """
868
- ...
869
-
870
- @typing.overload
871
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
872
- ...
873
-
874
- @typing.overload
875
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
876
- ...
877
-
878
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
879
- """
880
- Specifies secrets to be retrieved and injected as environment variables prior to
881
- the execution of a step.
882
-
883
-
884
- Parameters
885
- ----------
886
- sources : List[Union[str, Dict[str, Any]]], default: []
887
- List of secret specs, defining how the secrets are to be retrieved
888
- role : str, optional, default: None
889
- Role to use for fetching secrets
890
- """
891
- ...
892
-
893
- @typing.overload
894
- def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
894
+ def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
895
895
  """
896
- Specifies the event(s) that this flow depends on.
896
+ Specifies the flow(s) that this flow depends on.
897
897
 
898
898
  ```
899
- @trigger(event='foo')
899
+ @trigger_on_finish(flow='FooFlow')
900
900
  ```
901
901
  or
902
902
  ```
903
- @trigger(events=['foo', 'bar'])
903
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
904
904
  ```
905
+ This decorator respects the @project decorator and triggers the flow
906
+ when upstream runs within the same namespace complete successfully
905
907
 
906
- Additionally, you can specify the parameter mappings
907
- to map event payload to Metaflow parameters for the flow.
908
+ Additionally, you can specify project aware upstream flow dependencies
909
+ by specifying the fully qualified project_flow_name.
908
910
  ```
909
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
911
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
910
912
  ```
911
913
  or
912
914
  ```
913
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
914
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
915
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
915
916
  ```
916
917
 
917
- 'parameters' can also be a list of strings and tuples like so:
918
- ```
919
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
920
- ```
921
- This is equivalent to:
918
+ You can also specify just the project or project branch (other values will be
919
+ inferred from the current project or project branch):
922
920
  ```
923
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
921
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
924
922
  ```
925
923
 
924
+ Note that `branch` is typically one of:
925
+ - `prod`
926
+ - `user.bob`
927
+ - `test.my_experiment`
928
+ - `prod.staging`
929
+
926
930
 
927
931
  Parameters
928
932
  ----------
929
- event : Union[str, Dict[str, Any]], optional, default None
930
- Event dependency for this flow.
931
- events : List[Union[str, Dict[str, Any]]], default []
932
- Events dependency for this flow.
933
+ flow : Union[str, Dict[str, str]], optional, default None
934
+ Upstream flow dependency for this flow.
935
+ flows : List[Union[str, Dict[str, str]]], default []
936
+ Upstream flow dependencies for this flow.
933
937
  options : Dict[str, Any], default {}
934
938
  Backend-specific configuration for tuning eventing behavior.
935
939
  """
936
940
  ...
937
941
 
938
942
  @typing.overload
939
- def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
943
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
940
944
  ...
941
945
 
942
- def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
946
+ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
943
947
  """
944
- Specifies the event(s) that this flow depends on.
948
+ Specifies the flow(s) that this flow depends on.
945
949
 
946
950
  ```
947
- @trigger(event='foo')
951
+ @trigger_on_finish(flow='FooFlow')
948
952
  ```
949
953
  or
950
954
  ```
951
- @trigger(events=['foo', 'bar'])
955
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
952
956
  ```
957
+ This decorator respects the @project decorator and triggers the flow
958
+ when upstream runs within the same namespace complete successfully
953
959
 
954
- Additionally, you can specify the parameter mappings
955
- to map event payload to Metaflow parameters for the flow.
960
+ Additionally, you can specify project aware upstream flow dependencies
961
+ by specifying the fully qualified project_flow_name.
956
962
  ```
957
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
963
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
958
964
  ```
959
965
  or
960
966
  ```
961
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
962
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
967
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
963
968
  ```
964
969
 
965
- 'parameters' can also be a list of strings and tuples like so:
966
- ```
967
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
968
- ```
969
- This is equivalent to:
970
+ You can also specify just the project or project branch (other values will be
971
+ inferred from the current project or project branch):
970
972
  ```
971
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
973
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
972
974
  ```
973
975
 
976
+ Note that `branch` is typically one of:
977
+ - `prod`
978
+ - `user.bob`
979
+ - `test.my_experiment`
980
+ - `prod.staging`
981
+
974
982
 
975
983
  Parameters
976
984
  ----------
977
- event : Union[str, Dict[str, Any]], optional, default None
978
- Event dependency for this flow.
979
- events : List[Union[str, Dict[str, Any]]], default []
980
- Events dependency for this flow.
985
+ flow : Union[str, Dict[str, str]], optional, default None
986
+ Upstream flow dependency for this flow.
987
+ flows : List[Union[str, Dict[str, str]]], default []
988
+ Upstream flow dependencies for this flow.
981
989
  options : Dict[str, Any], default {}
982
990
  Backend-specific configuration for tuning eventing behavior.
983
991
  """
984
992
  ...
985
993
 
994
+ @typing.overload
995
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
996
+ """
997
+ Specifies the PyPI packages for all steps of the flow.
998
+
999
+ Use `@pypi_base` to set common packages required by all
1000
+ steps and use `@pypi` to specify step-specific overrides.
1001
+
1002
+ Parameters
1003
+ ----------
1004
+ packages : Dict[str, str], default: {}
1005
+ Packages to use for this flow. The key is the name of the package
1006
+ and the value is the version to use.
1007
+ python : str, optional, default: None
1008
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1009
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1010
+ """
1011
+ ...
1012
+
1013
+ @typing.overload
1014
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1015
+ ...
1016
+
1017
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1018
+ """
1019
+ Specifies the PyPI packages for all steps of the flow.
1020
+
1021
+ Use `@pypi_base` to set common packages required by all
1022
+ steps and use `@pypi` to specify step-specific overrides.
1023
+
1024
+ Parameters
1025
+ ----------
1026
+ packages : Dict[str, str], default: {}
1027
+ Packages to use for this flow. The key is the name of the package
1028
+ and the value is the version to use.
1029
+ python : str, optional, default: None
1030
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1031
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1032
+ """
1033
+ ...
1034
+
986
1035
  @typing.overload
987
1036
  def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
988
1037
  """
@@ -1034,44 +1083,38 @@ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packa
1034
1083
  """
1035
1084
  ...
1036
1085
 
1037
- @typing.overload
1038
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1086
+ def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1039
1087
  """
1040
- Specifies the PyPI packages for all steps of the flow.
1088
+ Specifies what flows belong to the same project.
1089
+
1090
+ A project-specific namespace is created for all flows that
1091
+ use the same `@project(name)`.
1041
1092
 
1042
- Use `@pypi_base` to set common packages required by all
1043
- steps and use `@pypi` to specify step-specific overrides.
1044
1093
 
1045
1094
  Parameters
1046
1095
  ----------
1047
- packages : Dict[str, str], default: {}
1048
- Packages to use for this flow. The key is the name of the package
1049
- and the value is the version to use.
1050
- python : str, optional, default: None
1051
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1052
- that the version used will correspond to the version of the Python interpreter used to start the run.
1053
- """
1054
- ...
1055
-
1056
- @typing.overload
1057
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1058
- ...
1059
-
1060
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1061
- """
1062
- Specifies the PyPI packages for all steps of the flow.
1096
+ name : str
1097
+ Project name. Make sure that the name is unique amongst all
1098
+ projects that use the same production scheduler. The name may
1099
+ contain only lowercase alphanumeric characters and underscores.
1063
1100
 
1064
- Use `@pypi_base` to set common packages required by all
1065
- steps and use `@pypi` to specify step-specific overrides.
1101
+ branch : Optional[str], default None
1102
+ The branch to use. If not specified, the branch is set to
1103
+ `user.<username>` unless `production` is set to `True`. This can
1104
+ also be set on the command line using `--branch` as a top-level option.
1105
+ It is an error to specify `branch` in the decorator and on the command line.
1066
1106
 
1067
- Parameters
1068
- ----------
1069
- packages : Dict[str, str], default: {}
1070
- Packages to use for this flow. The key is the name of the package
1071
- and the value is the version to use.
1072
- python : str, optional, default: None
1073
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1074
- that the version used will correspond to the version of the Python interpreter used to start the run.
1107
+ production : bool, default False
1108
+ Whether or not the branch is the production branch. This can also be set on the
1109
+ command line using `--production` as a top-level option. It is an error to specify
1110
+ `production` in the decorator and on the command line.
1111
+ The project branch name will be:
1112
+ - if `branch` is specified:
1113
+ - if `production` is True: `prod.<branch>`
1114
+ - if `production` is False: `test.<branch>`
1115
+ - if `branch` is not specified:
1116
+ - if `production` is True: `prod`
1117
+ - if `production` is False: `user.<username>`
1075
1118
  """
1076
1119
  ...
1077
1120
 
@@ -1118,6 +1161,99 @@ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str,
1118
1161
  """
1119
1162
  ...
1120
1163
 
1164
+ @typing.overload
1165
+ def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1166
+ """
1167
+ Specifies the event(s) that this flow depends on.
1168
+
1169
+ ```
1170
+ @trigger(event='foo')
1171
+ ```
1172
+ or
1173
+ ```
1174
+ @trigger(events=['foo', 'bar'])
1175
+ ```
1176
+
1177
+ Additionally, you can specify the parameter mappings
1178
+ to map event payload to Metaflow parameters for the flow.
1179
+ ```
1180
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1181
+ ```
1182
+ or
1183
+ ```
1184
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1185
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1186
+ ```
1187
+
1188
+ 'parameters' can also be a list of strings and tuples like so:
1189
+ ```
1190
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1191
+ ```
1192
+ This is equivalent to:
1193
+ ```
1194
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1195
+ ```
1196
+
1197
+
1198
+ Parameters
1199
+ ----------
1200
+ event : Union[str, Dict[str, Any]], optional, default None
1201
+ Event dependency for this flow.
1202
+ events : List[Union[str, Dict[str, Any]]], default []
1203
+ Events dependency for this flow.
1204
+ options : Dict[str, Any], default {}
1205
+ Backend-specific configuration for tuning eventing behavior.
1206
+ """
1207
+ ...
1208
+
1209
+ @typing.overload
1210
+ def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1211
+ ...
1212
+
1213
+ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1214
+ """
1215
+ Specifies the event(s) that this flow depends on.
1216
+
1217
+ ```
1218
+ @trigger(event='foo')
1219
+ ```
1220
+ or
1221
+ ```
1222
+ @trigger(events=['foo', 'bar'])
1223
+ ```
1224
+
1225
+ Additionally, you can specify the parameter mappings
1226
+ to map event payload to Metaflow parameters for the flow.
1227
+ ```
1228
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1229
+ ```
1230
+ or
1231
+ ```
1232
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1233
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1234
+ ```
1235
+
1236
+ 'parameters' can also be a list of strings and tuples like so:
1237
+ ```
1238
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1239
+ ```
1240
+ This is equivalent to:
1241
+ ```
1242
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1243
+ ```
1244
+
1245
+
1246
+ Parameters
1247
+ ----------
1248
+ event : Union[str, Dict[str, Any]], optional, default None
1249
+ Event dependency for this flow.
1250
+ events : List[Union[str, Dict[str, Any]]], default []
1251
+ Events dependency for this flow.
1252
+ options : Dict[str, Any], default {}
1253
+ Backend-specific configuration for tuning eventing behavior.
1254
+ """
1255
+ ...
1256
+
1121
1257
  @typing.overload
1122
1258
  def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1123
1259
  """
@@ -1212,139 +1348,3 @@ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, expone
1212
1348
  """
1213
1349
  ...
1214
1350
 
1215
- @typing.overload
1216
- def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1217
- """
1218
- Specifies the flow(s) that this flow depends on.
1219
-
1220
- ```
1221
- @trigger_on_finish(flow='FooFlow')
1222
- ```
1223
- or
1224
- ```
1225
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1226
- ```
1227
- This decorator respects the @project decorator and triggers the flow
1228
- when upstream runs within the same namespace complete successfully
1229
-
1230
- Additionally, you can specify project aware upstream flow dependencies
1231
- by specifying the fully qualified project_flow_name.
1232
- ```
1233
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1234
- ```
1235
- or
1236
- ```
1237
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1238
- ```
1239
-
1240
- You can also specify just the project or project branch (other values will be
1241
- inferred from the current project or project branch):
1242
- ```
1243
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1244
- ```
1245
-
1246
- Note that `branch` is typically one of:
1247
- - `prod`
1248
- - `user.bob`
1249
- - `test.my_experiment`
1250
- - `prod.staging`
1251
-
1252
-
1253
- Parameters
1254
- ----------
1255
- flow : Union[str, Dict[str, str]], optional, default None
1256
- Upstream flow dependency for this flow.
1257
- flows : List[Union[str, Dict[str, str]]], default []
1258
- Upstream flow dependencies for this flow.
1259
- options : Dict[str, Any], default {}
1260
- Backend-specific configuration for tuning eventing behavior.
1261
- """
1262
- ...
1263
-
1264
- @typing.overload
1265
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1266
- ...
1267
-
1268
- def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1269
- """
1270
- Specifies the flow(s) that this flow depends on.
1271
-
1272
- ```
1273
- @trigger_on_finish(flow='FooFlow')
1274
- ```
1275
- or
1276
- ```
1277
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1278
- ```
1279
- This decorator respects the @project decorator and triggers the flow
1280
- when upstream runs within the same namespace complete successfully
1281
-
1282
- Additionally, you can specify project aware upstream flow dependencies
1283
- by specifying the fully qualified project_flow_name.
1284
- ```
1285
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1286
- ```
1287
- or
1288
- ```
1289
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1290
- ```
1291
-
1292
- You can also specify just the project or project branch (other values will be
1293
- inferred from the current project or project branch):
1294
- ```
1295
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1296
- ```
1297
-
1298
- Note that `branch` is typically one of:
1299
- - `prod`
1300
- - `user.bob`
1301
- - `test.my_experiment`
1302
- - `prod.staging`
1303
-
1304
-
1305
- Parameters
1306
- ----------
1307
- flow : Union[str, Dict[str, str]], optional, default None
1308
- Upstream flow dependency for this flow.
1309
- flows : List[Union[str, Dict[str, str]]], default []
1310
- Upstream flow dependencies for this flow.
1311
- options : Dict[str, Any], default {}
1312
- Backend-specific configuration for tuning eventing behavior.
1313
- """
1314
- ...
1315
-
1316
- def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1317
- """
1318
- Specifies what flows belong to the same project.
1319
-
1320
- A project-specific namespace is created for all flows that
1321
- use the same `@project(name)`.
1322
-
1323
-
1324
- Parameters
1325
- ----------
1326
- name : str
1327
- Project name. Make sure that the name is unique amongst all
1328
- projects that use the same production scheduler. The name may
1329
- contain only lowercase alphanumeric characters and underscores.
1330
-
1331
- branch : Optional[str], default None
1332
- The branch to use. If not specified, the branch is set to
1333
- `user.<username>` unless `production` is set to `True`. This can
1334
- also be set on the command line using `--branch` as a top-level option.
1335
- It is an error to specify `branch` in the decorator and on the command line.
1336
-
1337
- production : bool, default False
1338
- Whether or not the branch is the production branch. This can also be set on the
1339
- command line using `--production` as a top-level option. It is an error to specify
1340
- `production` in the decorator and on the command line.
1341
- The project branch name will be:
1342
- - if `branch` is specified:
1343
- - if `production` is True: `prod.<branch>`
1344
- - if `production` is False: `test.<branch>`
1345
- - if `branch` is not specified:
1346
- - if `production` is True: `prod`
1347
- - if `production` is False: `user.<username>`
1348
- """
1349
- ...
1350
-