metaflow-stubs 2.16.7__py2.py3-none-any.whl → 2.17.0__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of metaflow-stubs might be problematic. Click here for more details.

Files changed (166) hide show
  1. metaflow-stubs/__init__.pyi +692 -692
  2. metaflow-stubs/cards.pyi +2 -2
  3. metaflow-stubs/cli.pyi +2 -2
  4. metaflow-stubs/cli_components/__init__.pyi +2 -2
  5. metaflow-stubs/cli_components/utils.pyi +2 -2
  6. metaflow-stubs/client/__init__.pyi +2 -2
  7. metaflow-stubs/client/core.pyi +5 -5
  8. metaflow-stubs/client/filecache.pyi +3 -3
  9. metaflow-stubs/events.pyi +2 -2
  10. metaflow-stubs/exception.pyi +2 -2
  11. metaflow-stubs/flowspec.pyi +5 -5
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +4 -4
  14. metaflow-stubs/meta_files.pyi +2 -2
  15. metaflow-stubs/metadata_provider/__init__.pyi +2 -2
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +2 -2
  17. metaflow-stubs/metadata_provider/metadata.pyi +2 -2
  18. metaflow-stubs/metadata_provider/util.pyi +2 -2
  19. metaflow-stubs/metaflow_config.pyi +2 -2
  20. metaflow-stubs/metaflow_current.pyi +35 -35
  21. metaflow-stubs/metaflow_git.pyi +2 -2
  22. metaflow-stubs/multicore_utils.pyi +2 -2
  23. metaflow-stubs/packaging_sys/__init__.pyi +6 -6
  24. metaflow-stubs/packaging_sys/backend.pyi +4 -4
  25. metaflow-stubs/packaging_sys/distribution_support.pyi +4 -4
  26. metaflow-stubs/packaging_sys/tar_backend.pyi +4 -4
  27. metaflow-stubs/packaging_sys/utils.pyi +2 -2
  28. metaflow-stubs/packaging_sys/v1.pyi +3 -3
  29. metaflow-stubs/parameters.pyi +4 -4
  30. metaflow-stubs/plugins/__init__.pyi +14 -14
  31. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  32. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
  33. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  34. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  35. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  36. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
  37. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
  38. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
  39. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  40. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  41. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  42. metaflow-stubs/plugins/argo/argo_workflows.pyi +32 -3
  43. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +4 -4
  44. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +3 -3
  45. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +3 -3
  46. metaflow-stubs/plugins/argo/exit_hooks.pyi +2 -2
  47. metaflow-stubs/plugins/aws/__init__.pyi +3 -3
  48. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  49. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  50. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  51. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  52. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  53. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
  54. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  55. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +4 -4
  56. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  57. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
  58. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  59. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  60. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  61. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  62. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
  63. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +3 -3
  64. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +3 -3
  65. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  66. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  67. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  68. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +4 -4
  69. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  70. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  71. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  72. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  73. metaflow-stubs/plugins/cards/card_client.pyi +2 -2
  74. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  75. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  76. metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
  77. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  78. metaflow-stubs/plugins/cards/card_modules/basic.pyi +2 -2
  79. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  80. metaflow-stubs/plugins/cards/card_modules/components.pyi +3 -3
  81. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  82. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  83. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  84. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  85. metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
  86. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  87. metaflow-stubs/plugins/catch_decorator.pyi +3 -3
  88. metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
  89. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  90. metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
  91. metaflow-stubs/plugins/datatools/s3/s3.pyi +2 -2
  92. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  93. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  94. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  95. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  96. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  97. metaflow-stubs/plugins/events_decorator.pyi +2 -2
  98. metaflow-stubs/plugins/exit_hook/__init__.pyi +2 -2
  99. metaflow-stubs/plugins/exit_hook/exit_hook_decorator.pyi +2 -2
  100. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  101. metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
  102. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  103. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +4 -4
  104. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  105. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  106. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  107. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  108. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  109. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +2 -2
  110. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
  111. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  112. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
  113. metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +2 -2
  114. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +3 -2
  115. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +2 -2
  116. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  117. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  118. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  119. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  120. metaflow-stubs/plugins/pypi/conda_environment.pyi +5 -5
  121. metaflow-stubs/plugins/pypi/parsers.pyi +2 -2
  122. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  123. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  124. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  125. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  126. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  127. metaflow-stubs/plugins/secrets/__init__.pyi +3 -3
  128. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +3 -3
  129. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  130. metaflow-stubs/plugins/secrets/secrets_func.pyi +2 -2
  131. metaflow-stubs/plugins/secrets/secrets_spec.pyi +2 -2
  132. metaflow-stubs/plugins/secrets/utils.pyi +2 -2
  133. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  134. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +2 -2
  135. metaflow-stubs/plugins/timeout_decorator.pyi +3 -3
  136. metaflow-stubs/plugins/uv/__init__.pyi +2 -2
  137. metaflow-stubs/plugins/uv/uv_environment.pyi +3 -3
  138. metaflow-stubs/pylint_wrapper.pyi +2 -2
  139. metaflow-stubs/runner/__init__.pyi +2 -2
  140. metaflow-stubs/runner/deployer.pyi +32 -32
  141. metaflow-stubs/runner/deployer_impl.pyi +3 -3
  142. metaflow-stubs/runner/metaflow_runner.pyi +3 -3
  143. metaflow-stubs/runner/nbdeploy.pyi +2 -2
  144. metaflow-stubs/runner/nbrun.pyi +2 -2
  145. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  146. metaflow-stubs/runner/utils.pyi +4 -4
  147. metaflow-stubs/system/__init__.pyi +2 -2
  148. metaflow-stubs/system/system_logger.pyi +3 -3
  149. metaflow-stubs/system/system_monitor.pyi +2 -2
  150. metaflow-stubs/tagging_util.pyi +2 -2
  151. metaflow-stubs/tuple_util.pyi +2 -2
  152. metaflow-stubs/user_configs/__init__.pyi +2 -2
  153. metaflow-stubs/user_configs/config_options.pyi +3 -3
  154. metaflow-stubs/user_configs/config_parameters.pyi +6 -6
  155. metaflow-stubs/user_decorators/__init__.pyi +2 -2
  156. metaflow-stubs/user_decorators/common.pyi +2 -2
  157. metaflow-stubs/user_decorators/mutable_flow.pyi +6 -6
  158. metaflow-stubs/user_decorators/mutable_step.pyi +5 -5
  159. metaflow-stubs/user_decorators/user_flow_decorator.pyi +5 -5
  160. metaflow-stubs/user_decorators/user_step_decorator.pyi +4 -4
  161. metaflow-stubs/version.pyi +2 -2
  162. {metaflow_stubs-2.16.7.dist-info → metaflow_stubs-2.17.0.dist-info}/METADATA +2 -2
  163. metaflow_stubs-2.17.0.dist-info/RECORD +166 -0
  164. metaflow_stubs-2.16.7.dist-info/RECORD +0 -166
  165. {metaflow_stubs-2.16.7.dist-info → metaflow_stubs-2.17.0.dist-info}/WHEEL +0 -0
  166. {metaflow_stubs-2.16.7.dist-info → metaflow_stubs-2.17.0.dist-info}/top_level.txt +0 -0
@@ -1,15 +1,15 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.16.7 #
4
- # Generated on 2025-07-29T01:50:18.781263 #
3
+ # MF version: 2.17.0 #
4
+ # Generated on 2025-08-06T11:05:04.016056 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import datetime
12
11
  import typing
12
+ import datetime
13
13
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
14
14
  StepFlag = typing.NewType("StepFlag", bool)
15
15
 
@@ -39,17 +39,17 @@ from .user_decorators.user_step_decorator import UserStepDecorator as UserStepDe
39
39
  from .user_decorators.user_step_decorator import StepMutator as StepMutator
40
40
  from .user_decorators.user_step_decorator import user_step_decorator as user_step_decorator
41
41
  from .user_decorators.user_flow_decorator import FlowMutator as FlowMutator
42
+ from . import tuple_util as tuple_util
42
43
  from . import metaflow_git as metaflow_git
43
44
  from . import events as events
44
- from . import tuple_util as tuple_util
45
45
  from . import runner as runner
46
46
  from . import plugins as plugins
47
47
  from .plugins.datatools.s3.s3 import S3 as S3
48
48
  from . import includefile as includefile
49
49
  from .includefile import IncludeFile as IncludeFile
50
50
  from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
51
- from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
52
51
  from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
52
+ from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
53
53
  from . import cards as cards
54
54
  from . import client as client
55
55
  from .client.core import namespace as namespace
@@ -153,109 +153,170 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
153
153
  ...
154
154
 
155
155
  @typing.overload
156
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
156
+ def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
157
157
  """
158
- Specifies secrets to be retrieved and injected as environment variables prior to
159
- the execution of a step.
158
+ Specifies the resources needed when executing this step.
160
159
 
160
+ Use `@resources` to specify the resource requirements
161
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
161
162
 
162
- Parameters
163
- ----------
164
- sources : List[Union[str, Dict[str, Any]]], default: []
165
- List of secret specs, defining how the secrets are to be retrieved
166
- role : str, optional, default: None
167
- Role to use for fetching secrets
168
- """
169
- ...
170
-
171
- @typing.overload
172
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
173
- ...
174
-
175
- @typing.overload
176
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
177
- ...
178
-
179
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
180
- """
181
- Specifies secrets to be retrieved and injected as environment variables prior to
182
- the execution of a step.
163
+ You can choose the compute layer on the command line by executing e.g.
164
+ ```
165
+ python myflow.py run --with batch
166
+ ```
167
+ or
168
+ ```
169
+ python myflow.py run --with kubernetes
170
+ ```
171
+ which executes the flow on the desired system using the
172
+ requirements specified in `@resources`.
183
173
 
184
174
 
185
175
  Parameters
186
176
  ----------
187
- sources : List[Union[str, Dict[str, Any]]], default: []
188
- List of secret specs, defining how the secrets are to be retrieved
189
- role : str, optional, default: None
190
- Role to use for fetching secrets
177
+ cpu : int, default 1
178
+ Number of CPUs required for this step.
179
+ gpu : int, optional, default None
180
+ Number of GPUs required for this step.
181
+ disk : int, optional, default None
182
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
183
+ memory : int, default 4096
184
+ Memory size (in MB) required for this step.
185
+ shared_memory : int, optional, default None
186
+ The value for the size (in MiB) of the /dev/shm volume for this step.
187
+ This parameter maps to the `--shm-size` option in Docker.
191
188
  """
192
189
  ...
193
190
 
194
191
  @typing.overload
195
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
196
- """
197
- Decorator prototype for all step decorators. This function gets specialized
198
- and imported for all decorators types by _import_plugin_decorators().
199
- """
192
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
200
193
  ...
201
194
 
202
195
  @typing.overload
203
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
204
- ...
205
-
206
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
207
- """
208
- Decorator prototype for all step decorators. This function gets specialized
209
- and imported for all decorators types by _import_plugin_decorators().
210
- """
196
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
211
197
  ...
212
198
 
213
- @typing.overload
214
- def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
199
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
215
200
  """
216
- Creates a human-readable report, a Metaflow Card, after this step completes.
201
+ Specifies the resources needed when executing this step.
217
202
 
218
- Note that you may add multiple `@card` decorators in a step with different parameters.
203
+ Use `@resources` to specify the resource requirements
204
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
205
+
206
+ You can choose the compute layer on the command line by executing e.g.
207
+ ```
208
+ python myflow.py run --with batch
209
+ ```
210
+ or
211
+ ```
212
+ python myflow.py run --with kubernetes
213
+ ```
214
+ which executes the flow on the desired system using the
215
+ requirements specified in `@resources`.
219
216
 
220
217
 
221
218
  Parameters
222
219
  ----------
223
- type : str, default 'default'
224
- Card type.
225
- id : str, optional, default None
226
- If multiple cards are present, use this id to identify this card.
227
- options : Dict[str, Any], default {}
228
- Options passed to the card. The contents depend on the card type.
229
- timeout : int, default 45
230
- Interrupt reporting if it takes more than this many seconds.
220
+ cpu : int, default 1
221
+ Number of CPUs required for this step.
222
+ gpu : int, optional, default None
223
+ Number of GPUs required for this step.
224
+ disk : int, optional, default None
225
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
226
+ memory : int, default 4096
227
+ Memory size (in MB) required for this step.
228
+ shared_memory : int, optional, default None
229
+ The value for the size (in MiB) of the /dev/shm volume for this step.
230
+ This parameter maps to the `--shm-size` option in Docker.
231
231
  """
232
232
  ...
233
233
 
234
- @typing.overload
235
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
236
- ...
237
-
238
- @typing.overload
239
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
240
- ...
241
-
242
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
234
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[typing.Dict[str, str]] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
243
235
  """
244
- Creates a human-readable report, a Metaflow Card, after this step completes.
245
-
246
- Note that you may add multiple `@card` decorators in a step with different parameters.
236
+ Specifies that this step should execute on Kubernetes.
247
237
 
248
238
 
249
239
  Parameters
250
240
  ----------
251
- type : str, default 'default'
252
- Card type.
253
- id : str, optional, default None
254
- If multiple cards are present, use this id to identify this card.
255
- options : Dict[str, Any], default {}
256
- Options passed to the card. The contents depend on the card type.
257
- timeout : int, default 45
258
- Interrupt reporting if it takes more than this many seconds.
241
+ cpu : int, default 1
242
+ Number of CPUs required for this step. If `@resources` is
243
+ also present, the maximum value from all decorators is used.
244
+ memory : int, default 4096
245
+ Memory size (in MB) required for this step. If
246
+ `@resources` is also present, the maximum value from all decorators is
247
+ used.
248
+ disk : int, default 10240
249
+ Disk size (in MB) required for this step. If
250
+ `@resources` is also present, the maximum value from all decorators is
251
+ used.
252
+ image : str, optional, default None
253
+ Docker image to use when launching on Kubernetes. If not specified, and
254
+ METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
255
+ not, a default Docker image mapping to the current version of Python is used.
256
+ image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
257
+ If given, the imagePullPolicy to be applied to the Docker image of the step.
258
+ image_pull_secrets: List[str], default []
259
+ The default is extracted from METAFLOW_KUBERNETES_IMAGE_PULL_SECRETS.
260
+ Kubernetes image pull secrets to use when pulling container images
261
+ in Kubernetes.
262
+ service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
263
+ Kubernetes service account to use when launching pod in Kubernetes.
264
+ secrets : List[str], optional, default None
265
+ Kubernetes secrets to use when launching pod in Kubernetes. These
266
+ secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
267
+ in Metaflow configuration.
268
+ node_selector: Union[Dict[str,str], str], optional, default None
269
+ Kubernetes node selector(s) to apply to the pod running the task.
270
+ Can be passed in as a comma separated string of values e.g.
271
+ 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
272
+ {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
273
+ namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
274
+ Kubernetes namespace to use when launching pod in Kubernetes.
275
+ gpu : int, optional, default None
276
+ Number of GPUs required for this step. A value of zero implies that
277
+ the scheduled node should not have GPUs.
278
+ gpu_vendor : str, default KUBERNETES_GPU_VENDOR
279
+ The vendor of the GPUs to be used for this step.
280
+ tolerations : List[Dict[str,str]], default []
281
+ The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
282
+ Kubernetes tolerations to use when launching pod in Kubernetes.
283
+ labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
284
+ Kubernetes labels to use when launching pod in Kubernetes.
285
+ annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
286
+ Kubernetes annotations to use when launching pod in Kubernetes.
287
+ use_tmpfs : bool, default False
288
+ This enables an explicit tmpfs mount for this step.
289
+ tmpfs_tempdir : bool, default True
290
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
291
+ tmpfs_size : int, optional, default: None
292
+ The value for the size (in MiB) of the tmpfs mount for this step.
293
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
294
+ memory allocated for this step.
295
+ tmpfs_path : str, optional, default /metaflow_temp
296
+ Path to tmpfs mount for this step.
297
+ persistent_volume_claims : Dict[str, str], optional, default None
298
+ A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
299
+ volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
300
+ shared_memory: int, optional
301
+ Shared memory size (in MiB) required for this step
302
+ port: int, optional
303
+ Port number to specify in the Kubernetes job object
304
+ compute_pool : str, optional, default None
305
+ Compute pool to be used for for this step.
306
+ If not specified, any accessible compute pool within the perimeter is used.
307
+ hostname_resolution_timeout: int, default 10 * 60
308
+ Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
309
+ Only applicable when @parallel is used.
310
+ qos: str, default: Burstable
311
+ Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
312
+
313
+ security_context: Dict[str, Any], optional, default None
314
+ Container security context. Applies to the task container. Allows the following keys:
315
+ - privileged: bool, optional, default None
316
+ - allow_privilege_escalation: bool, optional, default None
317
+ - run_as_user: int, optional, default None
318
+ - run_as_group: int, optional, default None
319
+ - run_as_non_root: bool, optional, default None
259
320
  """
260
321
  ...
261
322
 
@@ -319,132 +380,200 @@ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
319
380
  ...
320
381
 
321
382
  @typing.overload
322
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
323
- """
324
- Specifies that the step will success under all circumstances.
325
-
326
- The decorator will create an optional artifact, specified by `var`, which
327
- contains the exception raised. You can use it to detect the presence
328
- of errors, indicating that all happy-path artifacts produced by the step
329
- are missing.
330
-
331
-
332
- Parameters
333
- ----------
334
- var : str, optional, default None
335
- Name of the artifact in which to store the caught exception.
336
- If not specified, the exception is not stored.
337
- print_exception : bool, default True
338
- Determines whether or not the exception is printed to
339
- stdout when caught.
340
- """
341
- ...
342
-
343
- @typing.overload
344
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
345
- ...
346
-
347
- @typing.overload
348
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
349
- ...
350
-
351
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
383
+ def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
352
384
  """
353
- Specifies that the step will success under all circumstances.
354
-
355
- The decorator will create an optional artifact, specified by `var`, which
356
- contains the exception raised. You can use it to detect the presence
357
- of errors, indicating that all happy-path artifacts produced by the step
358
- are missing.
385
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
359
386
 
360
387
 
361
388
  Parameters
362
389
  ----------
363
- var : str, optional, default None
364
- Name of the artifact in which to store the caught exception.
365
- If not specified, the exception is not stored.
366
- print_exception : bool, default True
367
- Determines whether or not the exception is printed to
368
- stdout when caught.
390
+ cpu : int, default 1
391
+ Number of CPUs required for this step. If `@resources` is
392
+ also present, the maximum value from all decorators is used.
393
+ gpu : int, default 0
394
+ Number of GPUs required for this step. If `@resources` is
395
+ also present, the maximum value from all decorators is used.
396
+ memory : int, default 4096
397
+ Memory size (in MB) required for this step. If
398
+ `@resources` is also present, the maximum value from all decorators is
399
+ used.
400
+ image : str, optional, default None
401
+ Docker image to use when launching on AWS Batch. If not specified, and
402
+ METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
403
+ not, a default Docker image mapping to the current version of Python is used.
404
+ queue : str, default METAFLOW_BATCH_JOB_QUEUE
405
+ AWS Batch Job Queue to submit the job to.
406
+ iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
407
+ AWS IAM role that AWS Batch container uses to access AWS cloud resources.
408
+ execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
409
+ AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
410
+ (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
411
+ shared_memory : int, optional, default None
412
+ The value for the size (in MiB) of the /dev/shm volume for this step.
413
+ This parameter maps to the `--shm-size` option in Docker.
414
+ max_swap : int, optional, default None
415
+ The total amount of swap memory (in MiB) a container can use for this
416
+ step. This parameter is translated to the `--memory-swap` option in
417
+ Docker where the value is the sum of the container memory plus the
418
+ `max_swap` value.
419
+ swappiness : int, optional, default None
420
+ This allows you to tune memory swappiness behavior for this step.
421
+ A swappiness value of 0 causes swapping not to happen unless absolutely
422
+ necessary. A swappiness value of 100 causes pages to be swapped very
423
+ aggressively. Accepted values are whole numbers between 0 and 100.
424
+ use_tmpfs : bool, default False
425
+ This enables an explicit tmpfs mount for this step. Note that tmpfs is
426
+ not available on Fargate compute environments
427
+ tmpfs_tempdir : bool, default True
428
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
429
+ tmpfs_size : int, optional, default None
430
+ The value for the size (in MiB) of the tmpfs mount for this step.
431
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
432
+ memory allocated for this step.
433
+ tmpfs_path : str, optional, default None
434
+ Path to tmpfs mount for this step. Defaults to /metaflow_temp.
435
+ inferentia : int, default 0
436
+ Number of Inferentia chips required for this step.
437
+ trainium : int, default None
438
+ Alias for inferentia. Use only one of the two.
439
+ efa : int, default 0
440
+ Number of elastic fabric adapter network devices to attach to container
441
+ ephemeral_storage : int, default None
442
+ The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
443
+ This is only relevant for Fargate compute environments
444
+ log_driver: str, optional, default None
445
+ The log driver to use for the Amazon ECS container.
446
+ log_options: List[str], optional, default None
447
+ List of strings containing options for the chosen log driver. The configurable values
448
+ depend on the `log driver` chosen. Validation of these options is not supported yet.
449
+ Example: [`awslogs-group:aws/batch/job`]
369
450
  """
370
451
  ...
371
452
 
372
453
  @typing.overload
373
- def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
454
+ def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
455
+ ...
456
+
457
+ @typing.overload
458
+ def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
459
+ ...
460
+
461
+ def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
374
462
  """
375
- Specifies the resources needed when executing this step.
376
-
377
- Use `@resources` to specify the resource requirements
378
- independently of the specific compute layer (`@batch`, `@kubernetes`).
379
-
380
- You can choose the compute layer on the command line by executing e.g.
381
- ```
382
- python myflow.py run --with batch
383
- ```
384
- or
385
- ```
386
- python myflow.py run --with kubernetes
387
- ```
388
- which executes the flow on the desired system using the
389
- requirements specified in `@resources`.
463
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
390
464
 
391
465
 
392
466
  Parameters
393
467
  ----------
394
468
  cpu : int, default 1
395
- Number of CPUs required for this step.
396
- gpu : int, optional, default None
397
- Number of GPUs required for this step.
398
- disk : int, optional, default None
399
- Disk size (in MB) required for this step. Only applies on Kubernetes.
469
+ Number of CPUs required for this step. If `@resources` is
470
+ also present, the maximum value from all decorators is used.
471
+ gpu : int, default 0
472
+ Number of GPUs required for this step. If `@resources` is
473
+ also present, the maximum value from all decorators is used.
400
474
  memory : int, default 4096
401
- Memory size (in MB) required for this step.
475
+ Memory size (in MB) required for this step. If
476
+ `@resources` is also present, the maximum value from all decorators is
477
+ used.
478
+ image : str, optional, default None
479
+ Docker image to use when launching on AWS Batch. If not specified, and
480
+ METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
481
+ not, a default Docker image mapping to the current version of Python is used.
482
+ queue : str, default METAFLOW_BATCH_JOB_QUEUE
483
+ AWS Batch Job Queue to submit the job to.
484
+ iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
485
+ AWS IAM role that AWS Batch container uses to access AWS cloud resources.
486
+ execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
487
+ AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
488
+ (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
402
489
  shared_memory : int, optional, default None
403
490
  The value for the size (in MiB) of the /dev/shm volume for this step.
404
491
  This parameter maps to the `--shm-size` option in Docker.
492
+ max_swap : int, optional, default None
493
+ The total amount of swap memory (in MiB) a container can use for this
494
+ step. This parameter is translated to the `--memory-swap` option in
495
+ Docker where the value is the sum of the container memory plus the
496
+ `max_swap` value.
497
+ swappiness : int, optional, default None
498
+ This allows you to tune memory swappiness behavior for this step.
499
+ A swappiness value of 0 causes swapping not to happen unless absolutely
500
+ necessary. A swappiness value of 100 causes pages to be swapped very
501
+ aggressively. Accepted values are whole numbers between 0 and 100.
502
+ use_tmpfs : bool, default False
503
+ This enables an explicit tmpfs mount for this step. Note that tmpfs is
504
+ not available on Fargate compute environments
505
+ tmpfs_tempdir : bool, default True
506
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
507
+ tmpfs_size : int, optional, default None
508
+ The value for the size (in MiB) of the tmpfs mount for this step.
509
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
510
+ memory allocated for this step.
511
+ tmpfs_path : str, optional, default None
512
+ Path to tmpfs mount for this step. Defaults to /metaflow_temp.
513
+ inferentia : int, default 0
514
+ Number of Inferentia chips required for this step.
515
+ trainium : int, default None
516
+ Alias for inferentia. Use only one of the two.
517
+ efa : int, default 0
518
+ Number of elastic fabric adapter network devices to attach to container
519
+ ephemeral_storage : int, default None
520
+ The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
521
+ This is only relevant for Fargate compute environments
522
+ log_driver: str, optional, default None
523
+ The log driver to use for the Amazon ECS container.
524
+ log_options: List[str], optional, default None
525
+ List of strings containing options for the chosen log driver. The configurable values
526
+ depend on the `log driver` chosen. Validation of these options is not supported yet.
527
+ Example: [`awslogs-group:aws/batch/job`]
405
528
  """
406
529
  ...
407
530
 
408
531
  @typing.overload
409
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
532
+ def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
533
+ """
534
+ Creates a human-readable report, a Metaflow Card, after this step completes.
535
+
536
+ Note that you may add multiple `@card` decorators in a step with different parameters.
537
+
538
+
539
+ Parameters
540
+ ----------
541
+ type : str, default 'default'
542
+ Card type.
543
+ id : str, optional, default None
544
+ If multiple cards are present, use this id to identify this card.
545
+ options : Dict[str, Any], default {}
546
+ Options passed to the card. The contents depend on the card type.
547
+ timeout : int, default 45
548
+ Interrupt reporting if it takes more than this many seconds.
549
+ """
410
550
  ...
411
551
 
412
552
  @typing.overload
413
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
553
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
414
554
  ...
415
555
 
416
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
556
+ @typing.overload
557
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
558
+ ...
559
+
560
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
417
561
  """
418
- Specifies the resources needed when executing this step.
419
-
420
- Use `@resources` to specify the resource requirements
421
- independently of the specific compute layer (`@batch`, `@kubernetes`).
562
+ Creates a human-readable report, a Metaflow Card, after this step completes.
422
563
 
423
- You can choose the compute layer on the command line by executing e.g.
424
- ```
425
- python myflow.py run --with batch
426
- ```
427
- or
428
- ```
429
- python myflow.py run --with kubernetes
430
- ```
431
- which executes the flow on the desired system using the
432
- requirements specified in `@resources`.
564
+ Note that you may add multiple `@card` decorators in a step with different parameters.
433
565
 
434
566
 
435
567
  Parameters
436
568
  ----------
437
- cpu : int, default 1
438
- Number of CPUs required for this step.
439
- gpu : int, optional, default None
440
- Number of GPUs required for this step.
441
- disk : int, optional, default None
442
- Disk size (in MB) required for this step. Only applies on Kubernetes.
443
- memory : int, default 4096
444
- Memory size (in MB) required for this step.
445
- shared_memory : int, optional, default None
446
- The value for the size (in MiB) of the /dev/shm volume for this step.
447
- This parameter maps to the `--shm-size` option in Docker.
569
+ type : str, default 'default'
570
+ Card type.
571
+ id : str, optional, default None
572
+ If multiple cards are present, use this id to identify this card.
573
+ options : Dict[str, Any], default {}
574
+ Options passed to the card. The contents depend on the card type.
575
+ timeout : int, default 45
576
+ Interrupt reporting if it takes more than this many seconds.
448
577
  """
449
578
  ...
450
579
 
@@ -508,41 +637,132 @@ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
508
637
  ...
509
638
 
510
639
  @typing.overload
511
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
640
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
512
641
  """
513
- Specifies the number of times the task corresponding
514
- to a step needs to be retried.
515
-
516
- This decorator is useful for handling transient errors, such as networking issues.
517
- If your task contains operations that can't be retried safely, e.g. database updates,
518
- it is advisable to annotate it with `@retry(times=0)`.
519
-
520
- This can be used in conjunction with the `@catch` decorator. The `@catch`
521
- decorator will execute a no-op task after all retries have been exhausted,
522
- ensuring that the flow execution can continue.
642
+ Decorator prototype for all step decorators. This function gets specialized
643
+ and imported for all decorators types by _import_plugin_decorators().
644
+ """
645
+ ...
646
+
647
+ @typing.overload
648
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
649
+ ...
650
+
651
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
652
+ """
653
+ Decorator prototype for all step decorators. This function gets specialized
654
+ and imported for all decorators types by _import_plugin_decorators().
655
+ """
656
+ ...
657
+
658
+ @typing.overload
659
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
660
+ """
661
+ Specifies environment variables to be set prior to the execution of a step.
523
662
 
524
663
 
525
664
  Parameters
526
665
  ----------
527
- times : int, default 3
528
- Number of times to retry this task.
529
- minutes_between_retries : int, default 2
530
- Number of minutes between retries.
666
+ vars : Dict[str, str], default {}
667
+ Dictionary of environment variables to set.
531
668
  """
532
669
  ...
533
670
 
534
671
  @typing.overload
535
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
672
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
536
673
  ...
537
674
 
538
675
  @typing.overload
539
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
676
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
540
677
  ...
541
678
 
542
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
679
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
543
680
  """
544
- Specifies the number of times the task corresponding
545
- to a step needs to be retried.
681
+ Specifies environment variables to be set prior to the execution of a step.
682
+
683
+
684
+ Parameters
685
+ ----------
686
+ vars : Dict[str, str], default {}
687
+ Dictionary of environment variables to set.
688
+ """
689
+ ...
690
+
691
+ @typing.overload
692
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
693
+ """
694
+ Specifies secrets to be retrieved and injected as environment variables prior to
695
+ the execution of a step.
696
+
697
+
698
+ Parameters
699
+ ----------
700
+ sources : List[Union[str, Dict[str, Any]]], default: []
701
+ List of secret specs, defining how the secrets are to be retrieved
702
+ role : str, optional, default: None
703
+ Role to use for fetching secrets
704
+ """
705
+ ...
706
+
707
+ @typing.overload
708
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
709
+ ...
710
+
711
+ @typing.overload
712
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
713
+ ...
714
+
715
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
716
+ """
717
+ Specifies secrets to be retrieved and injected as environment variables prior to
718
+ the execution of a step.
719
+
720
+
721
+ Parameters
722
+ ----------
723
+ sources : List[Union[str, Dict[str, Any]]], default: []
724
+ List of secret specs, defining how the secrets are to be retrieved
725
+ role : str, optional, default: None
726
+ Role to use for fetching secrets
727
+ """
728
+ ...
729
+
730
+ @typing.overload
731
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
732
+ """
733
+ Specifies the number of times the task corresponding
734
+ to a step needs to be retried.
735
+
736
+ This decorator is useful for handling transient errors, such as networking issues.
737
+ If your task contains operations that can't be retried safely, e.g. database updates,
738
+ it is advisable to annotate it with `@retry(times=0)`.
739
+
740
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
741
+ decorator will execute a no-op task after all retries have been exhausted,
742
+ ensuring that the flow execution can continue.
743
+
744
+
745
+ Parameters
746
+ ----------
747
+ times : int, default 3
748
+ Number of times to retry this task.
749
+ minutes_between_retries : int, default 2
750
+ Number of minutes between retries.
751
+ """
752
+ ...
753
+
754
+ @typing.overload
755
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
756
+ ...
757
+
758
+ @typing.overload
759
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
760
+ ...
761
+
762
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
763
+ """
764
+ Specifies the number of times the task corresponding
765
+ to a step needs to be retried.
546
766
 
547
767
  This decorator is useful for handling transient errors, such as networking issues.
548
768
  If your task contains operations that can't be retried safely, e.g. database updates,
@@ -614,528 +834,88 @@ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
614
834
  ...
615
835
 
616
836
  @typing.overload
617
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
837
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
618
838
  """
619
- Specifies environment variables to be set prior to the execution of a step.
839
+ Specifies that the step will success under all circumstances.
840
+
841
+ The decorator will create an optional artifact, specified by `var`, which
842
+ contains the exception raised. You can use it to detect the presence
843
+ of errors, indicating that all happy-path artifacts produced by the step
844
+ are missing.
620
845
 
621
846
 
622
847
  Parameters
623
848
  ----------
624
- vars : Dict[str, str], default {}
625
- Dictionary of environment variables to set.
849
+ var : str, optional, default None
850
+ Name of the artifact in which to store the caught exception.
851
+ If not specified, the exception is not stored.
852
+ print_exception : bool, default True
853
+ Determines whether or not the exception is printed to
854
+ stdout when caught.
626
855
  """
627
856
  ...
628
857
 
629
858
  @typing.overload
630
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
859
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
631
860
  ...
632
861
 
633
862
  @typing.overload
634
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
635
- ...
636
-
637
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
638
- """
639
- Specifies environment variables to be set prior to the execution of a step.
640
-
641
-
642
- Parameters
643
- ----------
644
- vars : Dict[str, str], default {}
645
- Dictionary of environment variables to set.
646
- """
863
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
647
864
  ...
648
865
 
649
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[typing.Dict[str, str]] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
866
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
650
867
  """
651
- Specifies that this step should execute on Kubernetes.
652
-
653
-
654
- Parameters
655
- ----------
656
- cpu : int, default 1
657
- Number of CPUs required for this step. If `@resources` is
658
- also present, the maximum value from all decorators is used.
659
- memory : int, default 4096
660
- Memory size (in MB) required for this step. If
661
- `@resources` is also present, the maximum value from all decorators is
662
- used.
663
- disk : int, default 10240
664
- Disk size (in MB) required for this step. If
665
- `@resources` is also present, the maximum value from all decorators is
666
- used.
667
- image : str, optional, default None
668
- Docker image to use when launching on Kubernetes. If not specified, and
669
- METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
670
- not, a default Docker image mapping to the current version of Python is used.
671
- image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
672
- If given, the imagePullPolicy to be applied to the Docker image of the step.
673
- image_pull_secrets: List[str], default []
674
- The default is extracted from METAFLOW_KUBERNETES_IMAGE_PULL_SECRETS.
675
- Kubernetes image pull secrets to use when pulling container images
676
- in Kubernetes.
677
- service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
678
- Kubernetes service account to use when launching pod in Kubernetes.
679
- secrets : List[str], optional, default None
680
- Kubernetes secrets to use when launching pod in Kubernetes. These
681
- secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
682
- in Metaflow configuration.
683
- node_selector: Union[Dict[str,str], str], optional, default None
684
- Kubernetes node selector(s) to apply to the pod running the task.
685
- Can be passed in as a comma separated string of values e.g.
686
- 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
687
- {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
688
- namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
689
- Kubernetes namespace to use when launching pod in Kubernetes.
690
- gpu : int, optional, default None
691
- Number of GPUs required for this step. A value of zero implies that
692
- the scheduled node should not have GPUs.
693
- gpu_vendor : str, default KUBERNETES_GPU_VENDOR
694
- The vendor of the GPUs to be used for this step.
695
- tolerations : List[Dict[str,str]], default []
696
- The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
697
- Kubernetes tolerations to use when launching pod in Kubernetes.
698
- labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
699
- Kubernetes labels to use when launching pod in Kubernetes.
700
- annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
701
- Kubernetes annotations to use when launching pod in Kubernetes.
702
- use_tmpfs : bool, default False
703
- This enables an explicit tmpfs mount for this step.
704
- tmpfs_tempdir : bool, default True
705
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
706
- tmpfs_size : int, optional, default: None
707
- The value for the size (in MiB) of the tmpfs mount for this step.
708
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
709
- memory allocated for this step.
710
- tmpfs_path : str, optional, default /metaflow_temp
711
- Path to tmpfs mount for this step.
712
- persistent_volume_claims : Dict[str, str], optional, default None
713
- A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
714
- volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
715
- shared_memory: int, optional
716
- Shared memory size (in MiB) required for this step
717
- port: int, optional
718
- Port number to specify in the Kubernetes job object
719
- compute_pool : str, optional, default None
720
- Compute pool to be used for for this step.
721
- If not specified, any accessible compute pool within the perimeter is used.
722
- hostname_resolution_timeout: int, default 10 * 60
723
- Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
724
- Only applicable when @parallel is used.
725
- qos: str, default: Burstable
726
- Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
868
+ Specifies that the step will success under all circumstances.
727
869
 
728
- security_context: Dict[str, Any], optional, default None
729
- Container security context. Applies to the task container. Allows the following keys:
730
- - privileged: bool, optional, default None
731
- - allow_privilege_escalation: bool, optional, default None
732
- - run_as_user: int, optional, default None
733
- - run_as_group: int, optional, default None
734
- - run_as_non_root: bool, optional, default None
735
- """
736
- ...
737
-
738
- @typing.overload
739
- def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
740
- """
741
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
870
+ The decorator will create an optional artifact, specified by `var`, which
871
+ contains the exception raised. You can use it to detect the presence
872
+ of errors, indicating that all happy-path artifacts produced by the step
873
+ are missing.
742
874
 
743
875
 
744
876
  Parameters
745
877
  ----------
746
- cpu : int, default 1
747
- Number of CPUs required for this step. If `@resources` is
748
- also present, the maximum value from all decorators is used.
749
- gpu : int, default 0
750
- Number of GPUs required for this step. If `@resources` is
751
- also present, the maximum value from all decorators is used.
752
- memory : int, default 4096
753
- Memory size (in MB) required for this step. If
754
- `@resources` is also present, the maximum value from all decorators is
755
- used.
756
- image : str, optional, default None
757
- Docker image to use when launching on AWS Batch. If not specified, and
758
- METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
759
- not, a default Docker image mapping to the current version of Python is used.
760
- queue : str, default METAFLOW_BATCH_JOB_QUEUE
761
- AWS Batch Job Queue to submit the job to.
762
- iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
763
- AWS IAM role that AWS Batch container uses to access AWS cloud resources.
764
- execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
765
- AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
766
- (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
767
- shared_memory : int, optional, default None
768
- The value for the size (in MiB) of the /dev/shm volume for this step.
769
- This parameter maps to the `--shm-size` option in Docker.
770
- max_swap : int, optional, default None
771
- The total amount of swap memory (in MiB) a container can use for this
772
- step. This parameter is translated to the `--memory-swap` option in
773
- Docker where the value is the sum of the container memory plus the
774
- `max_swap` value.
775
- swappiness : int, optional, default None
776
- This allows you to tune memory swappiness behavior for this step.
777
- A swappiness value of 0 causes swapping not to happen unless absolutely
778
- necessary. A swappiness value of 100 causes pages to be swapped very
779
- aggressively. Accepted values are whole numbers between 0 and 100.
780
- use_tmpfs : bool, default False
781
- This enables an explicit tmpfs mount for this step. Note that tmpfs is
782
- not available on Fargate compute environments
783
- tmpfs_tempdir : bool, default True
784
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
785
- tmpfs_size : int, optional, default None
786
- The value for the size (in MiB) of the tmpfs mount for this step.
787
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
788
- memory allocated for this step.
789
- tmpfs_path : str, optional, default None
790
- Path to tmpfs mount for this step. Defaults to /metaflow_temp.
791
- inferentia : int, default 0
792
- Number of Inferentia chips required for this step.
793
- trainium : int, default None
794
- Alias for inferentia. Use only one of the two.
795
- efa : int, default 0
796
- Number of elastic fabric adapter network devices to attach to container
797
- ephemeral_storage : int, default None
798
- The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
799
- This is only relevant for Fargate compute environments
800
- log_driver: str, optional, default None
801
- The log driver to use for the Amazon ECS container.
802
- log_options: List[str], optional, default None
803
- List of strings containing options for the chosen log driver. The configurable values
804
- depend on the `log driver` chosen. Validation of these options is not supported yet.
805
- Example: [`awslogs-group:aws/batch/job`]
806
- """
807
- ...
808
-
809
- @typing.overload
810
- def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
811
- ...
812
-
813
- @typing.overload
814
- def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
815
- ...
816
-
817
- def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
818
- """
819
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
820
-
821
-
822
- Parameters
823
- ----------
824
- cpu : int, default 1
825
- Number of CPUs required for this step. If `@resources` is
826
- also present, the maximum value from all decorators is used.
827
- gpu : int, default 0
828
- Number of GPUs required for this step. If `@resources` is
829
- also present, the maximum value from all decorators is used.
830
- memory : int, default 4096
831
- Memory size (in MB) required for this step. If
832
- `@resources` is also present, the maximum value from all decorators is
833
- used.
834
- image : str, optional, default None
835
- Docker image to use when launching on AWS Batch. If not specified, and
836
- METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
837
- not, a default Docker image mapping to the current version of Python is used.
838
- queue : str, default METAFLOW_BATCH_JOB_QUEUE
839
- AWS Batch Job Queue to submit the job to.
840
- iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
841
- AWS IAM role that AWS Batch container uses to access AWS cloud resources.
842
- execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
843
- AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
844
- (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
845
- shared_memory : int, optional, default None
846
- The value for the size (in MiB) of the /dev/shm volume for this step.
847
- This parameter maps to the `--shm-size` option in Docker.
848
- max_swap : int, optional, default None
849
- The total amount of swap memory (in MiB) a container can use for this
850
- step. This parameter is translated to the `--memory-swap` option in
851
- Docker where the value is the sum of the container memory plus the
852
- `max_swap` value.
853
- swappiness : int, optional, default None
854
- This allows you to tune memory swappiness behavior for this step.
855
- A swappiness value of 0 causes swapping not to happen unless absolutely
856
- necessary. A swappiness value of 100 causes pages to be swapped very
857
- aggressively. Accepted values are whole numbers between 0 and 100.
858
- use_tmpfs : bool, default False
859
- This enables an explicit tmpfs mount for this step. Note that tmpfs is
860
- not available on Fargate compute environments
861
- tmpfs_tempdir : bool, default True
862
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
863
- tmpfs_size : int, optional, default None
864
- The value for the size (in MiB) of the tmpfs mount for this step.
865
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
866
- memory allocated for this step.
867
- tmpfs_path : str, optional, default None
868
- Path to tmpfs mount for this step. Defaults to /metaflow_temp.
869
- inferentia : int, default 0
870
- Number of Inferentia chips required for this step.
871
- trainium : int, default None
872
- Alias for inferentia. Use only one of the two.
873
- efa : int, default 0
874
- Number of elastic fabric adapter network devices to attach to container
875
- ephemeral_storage : int, default None
876
- The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
877
- This is only relevant for Fargate compute environments
878
- log_driver: str, optional, default None
879
- The log driver to use for the Amazon ECS container.
880
- log_options: List[str], optional, default None
881
- List of strings containing options for the chosen log driver. The configurable values
882
- depend on the `log driver` chosen. Validation of these options is not supported yet.
883
- Example: [`awslogs-group:aws/batch/job`]
884
- """
885
- ...
886
-
887
- @typing.overload
888
- def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
889
- """
890
- Specifies the event(s) that this flow depends on.
891
-
892
- ```
893
- @trigger(event='foo')
894
- ```
895
- or
896
- ```
897
- @trigger(events=['foo', 'bar'])
898
- ```
899
-
900
- Additionally, you can specify the parameter mappings
901
- to map event payload to Metaflow parameters for the flow.
902
- ```
903
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
904
- ```
905
- or
906
- ```
907
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
908
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
909
- ```
910
-
911
- 'parameters' can also be a list of strings and tuples like so:
912
- ```
913
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
914
- ```
915
- This is equivalent to:
916
- ```
917
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
918
- ```
919
-
920
-
921
- Parameters
922
- ----------
923
- event : Union[str, Dict[str, Any]], optional, default None
924
- Event dependency for this flow.
925
- events : List[Union[str, Dict[str, Any]]], default []
926
- Events dependency for this flow.
927
- options : Dict[str, Any], default {}
928
- Backend-specific configuration for tuning eventing behavior.
929
- """
930
- ...
931
-
932
- @typing.overload
933
- def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
934
- ...
935
-
936
- def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
937
- """
938
- Specifies the event(s) that this flow depends on.
939
-
940
- ```
941
- @trigger(event='foo')
942
- ```
943
- or
944
- ```
945
- @trigger(events=['foo', 'bar'])
946
- ```
947
-
948
- Additionally, you can specify the parameter mappings
949
- to map event payload to Metaflow parameters for the flow.
950
- ```
951
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
952
- ```
953
- or
954
- ```
955
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
956
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
957
- ```
958
-
959
- 'parameters' can also be a list of strings and tuples like so:
960
- ```
961
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
962
- ```
963
- This is equivalent to:
964
- ```
965
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
966
- ```
967
-
968
-
969
- Parameters
970
- ----------
971
- event : Union[str, Dict[str, Any]], optional, default None
972
- Event dependency for this flow.
973
- events : List[Union[str, Dict[str, Any]]], default []
974
- Events dependency for this flow.
975
- options : Dict[str, Any], default {}
976
- Backend-specific configuration for tuning eventing behavior.
977
- """
978
- ...
979
-
980
- def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
981
- """
982
- Specifies what flows belong to the same project.
983
-
984
- A project-specific namespace is created for all flows that
985
- use the same `@project(name)`.
986
-
987
-
988
- Parameters
989
- ----------
990
- name : str
991
- Project name. Make sure that the name is unique amongst all
992
- projects that use the same production scheduler. The name may
993
- contain only lowercase alphanumeric characters and underscores.
994
-
995
- branch : Optional[str], default None
996
- The branch to use. If not specified, the branch is set to
997
- `user.<username>` unless `production` is set to `True`. This can
998
- also be set on the command line using `--branch` as a top-level option.
999
- It is an error to specify `branch` in the decorator and on the command line.
1000
-
1001
- production : bool, default False
1002
- Whether or not the branch is the production branch. This can also be set on the
1003
- command line using `--production` as a top-level option. It is an error to specify
1004
- `production` in the decorator and on the command line.
1005
- The project branch name will be:
1006
- - if `branch` is specified:
1007
- - if `production` is True: `prod.<branch>`
1008
- - if `production` is False: `test.<branch>`
1009
- - if `branch` is not specified:
1010
- - if `production` is True: `prod`
1011
- - if `production` is False: `user.<username>`
1012
- """
1013
- ...
1014
-
1015
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1016
- """
1017
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1018
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1019
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1020
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
1021
- starts only after all sensors finish.
1022
-
1023
-
1024
- Parameters
1025
- ----------
1026
- timeout : int
1027
- Time, in seconds before the task times out and fails. (Default: 3600)
1028
- poke_interval : int
1029
- Time in seconds that the job should wait in between each try. (Default: 60)
1030
- mode : str
1031
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1032
- exponential_backoff : bool
1033
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1034
- pool : str
1035
- the slot pool this task should run in,
1036
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1037
- soft_fail : bool
1038
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1039
- name : str
1040
- Name of the sensor on Airflow
1041
- description : str
1042
- Description of sensor in the Airflow UI
1043
- bucket_key : Union[str, List[str]]
1044
- The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1045
- When it's specified as a full s3:// url, please leave `bucket_name` as None
1046
- bucket_name : str
1047
- Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1048
- When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1049
- wildcard_match : bool
1050
- whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1051
- aws_conn_id : str
1052
- a reference to the s3 connection on Airflow. (Default: None)
1053
- verify : bool
1054
- Whether or not to verify SSL certificates for S3 connection. (Default: None)
1055
- """
1056
- ...
1057
-
1058
- @typing.overload
1059
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1060
- """
1061
- Specifies the PyPI packages for all steps of the flow.
1062
-
1063
- Use `@pypi_base` to set common packages required by all
1064
- steps and use `@pypi` to specify step-specific overrides.
1065
-
1066
- Parameters
1067
- ----------
1068
- packages : Dict[str, str], default: {}
1069
- Packages to use for this flow. The key is the name of the package
1070
- and the value is the version to use.
1071
- python : str, optional, default: None
1072
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1073
- that the version used will correspond to the version of the Python interpreter used to start the run.
1074
- """
1075
- ...
1076
-
1077
- @typing.overload
1078
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1079
- ...
1080
-
1081
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1082
- """
1083
- Specifies the PyPI packages for all steps of the flow.
1084
-
1085
- Use `@pypi_base` to set common packages required by all
1086
- steps and use `@pypi` to specify step-specific overrides.
1087
-
1088
- Parameters
1089
- ----------
1090
- packages : Dict[str, str], default: {}
1091
- Packages to use for this flow. The key is the name of the package
1092
- and the value is the version to use.
1093
- python : str, optional, default: None
1094
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1095
- that the version used will correspond to the version of the Python interpreter used to start the run.
1096
- """
1097
- ...
1098
-
1099
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1100
- """
1101
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1102
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1103
-
1104
-
1105
- Parameters
1106
- ----------
1107
- timeout : int
1108
- Time, in seconds before the task times out and fails. (Default: 3600)
1109
- poke_interval : int
1110
- Time in seconds that the job should wait in between each try. (Default: 60)
1111
- mode : str
1112
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1113
- exponential_backoff : bool
1114
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1115
- pool : str
1116
- the slot pool this task should run in,
1117
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1118
- soft_fail : bool
1119
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1120
- name : str
1121
- Name of the sensor on Airflow
1122
- description : str
1123
- Description of sensor in the Airflow UI
1124
- external_dag_id : str
1125
- The dag_id that contains the task you want to wait for.
1126
- external_task_ids : List[str]
1127
- The list of task_ids that you want to wait for.
1128
- If None (default value) the sensor waits for the DAG. (Default: None)
1129
- allowed_states : List[str]
1130
- Iterable of allowed states, (Default: ['success'])
1131
- failed_states : List[str]
1132
- Iterable of failed or dis-allowed states. (Default: None)
1133
- execution_delta : datetime.timedelta
1134
- time difference with the previous execution to look at,
1135
- the default is the same logical date as the current task or DAG. (Default: None)
1136
- check_existence: bool
1137
- Set to True to check if the external task exists or check if
1138
- the DAG to wait for exists. (Default: True)
878
+ var : str, optional, default None
879
+ Name of the artifact in which to store the caught exception.
880
+ If not specified, the exception is not stored.
881
+ print_exception : bool, default True
882
+ Determines whether or not the exception is printed to
883
+ stdout when caught.
884
+ """
885
+ ...
886
+
887
+ def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
888
+ """
889
+ Specifies what flows belong to the same project.
890
+
891
+ A project-specific namespace is created for all flows that
892
+ use the same `@project(name)`.
893
+
894
+
895
+ Parameters
896
+ ----------
897
+ name : str
898
+ Project name. Make sure that the name is unique amongst all
899
+ projects that use the same production scheduler. The name may
900
+ contain only lowercase alphanumeric characters and underscores.
901
+
902
+ branch : Optional[str], default None
903
+ The branch to use. If not specified, the branch is set to
904
+ `user.<username>` unless `production` is set to `True`. This can
905
+ also be set on the command line using `--branch` as a top-level option.
906
+ It is an error to specify `branch` in the decorator and on the command line.
907
+
908
+ production : bool, default False
909
+ Whether or not the branch is the production branch. This can also be set on the
910
+ command line using `--production` as a top-level option. It is an error to specify
911
+ `production` in the decorator and on the command line.
912
+ The project branch name will be:
913
+ - if `branch` is specified:
914
+ - if `production` is True: `prod.<branch>`
915
+ - if `production` is False: `test.<branch>`
916
+ - if `branch` is not specified:
917
+ - if `production` is True: `prod`
918
+ - if `production` is False: `user.<username>`
1139
919
  """
1140
920
  ...
1141
921
 
@@ -1342,3 +1122,223 @@ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packa
1342
1122
  """
1343
1123
  ...
1344
1124
 
1125
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1126
+ """
1127
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1128
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1129
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1130
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
1131
+ starts only after all sensors finish.
1132
+
1133
+
1134
+ Parameters
1135
+ ----------
1136
+ timeout : int
1137
+ Time, in seconds before the task times out and fails. (Default: 3600)
1138
+ poke_interval : int
1139
+ Time in seconds that the job should wait in between each try. (Default: 60)
1140
+ mode : str
1141
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1142
+ exponential_backoff : bool
1143
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1144
+ pool : str
1145
+ the slot pool this task should run in,
1146
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1147
+ soft_fail : bool
1148
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1149
+ name : str
1150
+ Name of the sensor on Airflow
1151
+ description : str
1152
+ Description of sensor in the Airflow UI
1153
+ bucket_key : Union[str, List[str]]
1154
+ The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1155
+ When it's specified as a full s3:// url, please leave `bucket_name` as None
1156
+ bucket_name : str
1157
+ Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1158
+ When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1159
+ wildcard_match : bool
1160
+ whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1161
+ aws_conn_id : str
1162
+ a reference to the s3 connection on Airflow. (Default: None)
1163
+ verify : bool
1164
+ Whether or not to verify SSL certificates for S3 connection. (Default: None)
1165
+ """
1166
+ ...
1167
+
1168
+ @typing.overload
1169
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1170
+ """
1171
+ Specifies the PyPI packages for all steps of the flow.
1172
+
1173
+ Use `@pypi_base` to set common packages required by all
1174
+ steps and use `@pypi` to specify step-specific overrides.
1175
+
1176
+ Parameters
1177
+ ----------
1178
+ packages : Dict[str, str], default: {}
1179
+ Packages to use for this flow. The key is the name of the package
1180
+ and the value is the version to use.
1181
+ python : str, optional, default: None
1182
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1183
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1184
+ """
1185
+ ...
1186
+
1187
+ @typing.overload
1188
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1189
+ ...
1190
+
1191
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1192
+ """
1193
+ Specifies the PyPI packages for all steps of the flow.
1194
+
1195
+ Use `@pypi_base` to set common packages required by all
1196
+ steps and use `@pypi` to specify step-specific overrides.
1197
+
1198
+ Parameters
1199
+ ----------
1200
+ packages : Dict[str, str], default: {}
1201
+ Packages to use for this flow. The key is the name of the package
1202
+ and the value is the version to use.
1203
+ python : str, optional, default: None
1204
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1205
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1206
+ """
1207
+ ...
1208
+
1209
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1210
+ """
1211
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1212
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1213
+
1214
+
1215
+ Parameters
1216
+ ----------
1217
+ timeout : int
1218
+ Time, in seconds before the task times out and fails. (Default: 3600)
1219
+ poke_interval : int
1220
+ Time in seconds that the job should wait in between each try. (Default: 60)
1221
+ mode : str
1222
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1223
+ exponential_backoff : bool
1224
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1225
+ pool : str
1226
+ the slot pool this task should run in,
1227
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1228
+ soft_fail : bool
1229
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1230
+ name : str
1231
+ Name of the sensor on Airflow
1232
+ description : str
1233
+ Description of sensor in the Airflow UI
1234
+ external_dag_id : str
1235
+ The dag_id that contains the task you want to wait for.
1236
+ external_task_ids : List[str]
1237
+ The list of task_ids that you want to wait for.
1238
+ If None (default value) the sensor waits for the DAG. (Default: None)
1239
+ allowed_states : List[str]
1240
+ Iterable of allowed states, (Default: ['success'])
1241
+ failed_states : List[str]
1242
+ Iterable of failed or dis-allowed states. (Default: None)
1243
+ execution_delta : datetime.timedelta
1244
+ time difference with the previous execution to look at,
1245
+ the default is the same logical date as the current task or DAG. (Default: None)
1246
+ check_existence: bool
1247
+ Set to True to check if the external task exists or check if
1248
+ the DAG to wait for exists. (Default: True)
1249
+ """
1250
+ ...
1251
+
1252
+ @typing.overload
1253
+ def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1254
+ """
1255
+ Specifies the event(s) that this flow depends on.
1256
+
1257
+ ```
1258
+ @trigger(event='foo')
1259
+ ```
1260
+ or
1261
+ ```
1262
+ @trigger(events=['foo', 'bar'])
1263
+ ```
1264
+
1265
+ Additionally, you can specify the parameter mappings
1266
+ to map event payload to Metaflow parameters for the flow.
1267
+ ```
1268
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1269
+ ```
1270
+ or
1271
+ ```
1272
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1273
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1274
+ ```
1275
+
1276
+ 'parameters' can also be a list of strings and tuples like so:
1277
+ ```
1278
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1279
+ ```
1280
+ This is equivalent to:
1281
+ ```
1282
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1283
+ ```
1284
+
1285
+
1286
+ Parameters
1287
+ ----------
1288
+ event : Union[str, Dict[str, Any]], optional, default None
1289
+ Event dependency for this flow.
1290
+ events : List[Union[str, Dict[str, Any]]], default []
1291
+ Events dependency for this flow.
1292
+ options : Dict[str, Any], default {}
1293
+ Backend-specific configuration for tuning eventing behavior.
1294
+ """
1295
+ ...
1296
+
1297
+ @typing.overload
1298
+ def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1299
+ ...
1300
+
1301
+ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1302
+ """
1303
+ Specifies the event(s) that this flow depends on.
1304
+
1305
+ ```
1306
+ @trigger(event='foo')
1307
+ ```
1308
+ or
1309
+ ```
1310
+ @trigger(events=['foo', 'bar'])
1311
+ ```
1312
+
1313
+ Additionally, you can specify the parameter mappings
1314
+ to map event payload to Metaflow parameters for the flow.
1315
+ ```
1316
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1317
+ ```
1318
+ or
1319
+ ```
1320
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1321
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1322
+ ```
1323
+
1324
+ 'parameters' can also be a list of strings and tuples like so:
1325
+ ```
1326
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1327
+ ```
1328
+ This is equivalent to:
1329
+ ```
1330
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1331
+ ```
1332
+
1333
+
1334
+ Parameters
1335
+ ----------
1336
+ event : Union[str, Dict[str, Any]], optional, default None
1337
+ Event dependency for this flow.
1338
+ events : List[Union[str, Dict[str, Any]]], default []
1339
+ Events dependency for this flow.
1340
+ options : Dict[str, Any], default {}
1341
+ Backend-specific configuration for tuning eventing behavior.
1342
+ """
1343
+ ...
1344
+