metaflow-stubs 2.12.31__py2.py3-none-any.whl → 2.12.33__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- metaflow-stubs/__init__.pyi +247 -247
- metaflow-stubs/cards.pyi +2 -2
- metaflow-stubs/cli.pyi +2 -2
- metaflow-stubs/client/__init__.pyi +2 -2
- metaflow-stubs/client/core.pyi +5 -5
- metaflow-stubs/client/filecache.pyi +3 -3
- metaflow-stubs/clone_util.pyi +2 -2
- metaflow-stubs/events.pyi +3 -3
- metaflow-stubs/exception.pyi +2 -2
- metaflow-stubs/flowspec.pyi +5 -5
- metaflow-stubs/generated_for.txt +1 -1
- metaflow-stubs/includefile.pyi +4 -4
- metaflow-stubs/info_file.pyi +2 -2
- metaflow-stubs/metadata_provider/__init__.pyi +2 -2
- metaflow-stubs/metadata_provider/heartbeat.pyi +2 -2
- metaflow-stubs/metadata_provider/metadata.pyi +2 -2
- metaflow-stubs/metadata_provider/util.pyi +2 -2
- metaflow-stubs/metaflow_config.pyi +2 -2
- metaflow-stubs/metaflow_current.pyi +5 -5
- metaflow-stubs/mflog/__init__.pyi +2 -2
- metaflow-stubs/mflog/mflog.pyi +2 -2
- metaflow-stubs/multicore_utils.pyi +2 -2
- metaflow-stubs/parameters.pyi +4 -4
- metaflow-stubs/plugins/__init__.pyi +11 -11
- metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
- metaflow-stubs/plugins/airflow/airflow.pyi +2 -2
- metaflow-stubs/plugins/airflow/airflow_cli.pyi +2 -2
- metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
- metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
- metaflow-stubs/plugins/airflow/exception.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
- metaflow-stubs/plugins/argo/__init__.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_workflows.pyi +3 -3
- metaflow-stubs/plugins/argo/argo_workflows_cli.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +3 -3
- metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +4 -4
- metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +3 -3
- metaflow-stubs/plugins/aws/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
- metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch_cli.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
- metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +3 -3
- metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/production_token.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions_cli.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +4 -4
- metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +3 -3
- metaflow-stubs/plugins/azure/__init__.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +3 -3
- metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
- metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
- metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
- metaflow-stubs/plugins/cards/__init__.pyi +2 -2
- metaflow-stubs/plugins/cards/card_cli.pyi +2 -2
- metaflow-stubs/plugins/cards/card_client.pyi +2 -2
- metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
- metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
- metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/basic.pyi +3 -3
- metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/chevron/__init__.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/chevron/main.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/chevron/metadata.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/chevron/renderer.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/chevron/tokenizer.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/components.pyi +4 -4
- metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
- metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
- metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
- metaflow-stubs/plugins/cards/exception.pyi +2 -2
- metaflow-stubs/plugins/catch_decorator.pyi +3 -3
- metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
- metaflow-stubs/plugins/datatools/local.pyi +2 -2
- metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
- metaflow-stubs/plugins/datatools/s3/s3.pyi +5 -5
- metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
- metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
- metaflow-stubs/plugins/debug_logger.pyi +2 -2
- metaflow-stubs/plugins/debug_monitor.pyi +2 -2
- metaflow-stubs/plugins/environment_decorator.pyi +2 -2
- metaflow-stubs/plugins/events_decorator.pyi +2 -12
- metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
- metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
- metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
- metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +3 -3
- metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
- metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
- metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
- metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kube_utils.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes.pyi +3 -3
- metaflow-stubs/plugins/kubernetes/kubernetes_cli.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
- metaflow-stubs/plugins/logs_cli.pyi +2 -2
- metaflow-stubs/plugins/package_cli.pyi +2 -2
- metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
- metaflow-stubs/plugins/project_decorator.pyi +2 -2
- metaflow-stubs/plugins/pypi/__init__.pyi +3 -3
- metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
- metaflow-stubs/plugins/pypi/conda_environment.pyi +5 -5
- metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
- metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
- metaflow-stubs/plugins/pypi/utils.pyi +2 -2
- metaflow-stubs/plugins/resources_decorator.pyi +2 -2
- metaflow-stubs/plugins/retry_decorator.pyi +2 -2
- metaflow-stubs/plugins/secrets/__init__.pyi +3 -3
- metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +3 -3
- metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
- metaflow-stubs/plugins/storage_executor.pyi +2 -2
- metaflow-stubs/plugins/tag_cli.pyi +2 -2
- metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +2 -2
- metaflow-stubs/plugins/timeout_decorator.pyi +3 -3
- metaflow-stubs/procpoll.pyi +2 -2
- metaflow-stubs/pylint_wrapper.pyi +2 -2
- metaflow-stubs/runner/__init__.pyi +2 -2
- metaflow-stubs/runner/deployer.pyi +30 -30
- metaflow-stubs/runner/deployer_impl.pyi +3 -3
- metaflow-stubs/runner/metaflow_runner.pyi +3 -3
- metaflow-stubs/runner/nbdeploy.pyi +2 -2
- metaflow-stubs/runner/nbrun.pyi +2 -2
- metaflow-stubs/runner/subprocess_manager.pyi +2 -2
- metaflow-stubs/runner/utils.pyi +4 -4
- metaflow-stubs/system/__init__.pyi +2 -2
- metaflow-stubs/system/system_logger.pyi +3 -3
- metaflow-stubs/system/system_monitor.pyi +2 -2
- metaflow-stubs/tagging_util.pyi +2 -2
- metaflow-stubs/tuple_util.pyi +2 -2
- metaflow-stubs/version.pyi +2 -2
- {metaflow_stubs-2.12.31.dist-info → metaflow_stubs-2.12.33.dist-info}/METADATA +2 -2
- metaflow_stubs-2.12.33.dist-info/RECORD +158 -0
- metaflow_stubs-2.12.31.dist-info/RECORD +0 -158
- {metaflow_stubs-2.12.31.dist-info → metaflow_stubs-2.12.33.dist-info}/WHEEL +0 -0
- {metaflow_stubs-2.12.31.dist-info → metaflow_stubs-2.12.33.dist-info}/top_level.txt +0 -0
metaflow-stubs/__init__.pyi
CHANGED
@@ -1,15 +1,15 @@
|
|
1
1
|
######################################################################################################
|
2
2
|
# Auto-generated Metaflow stub file #
|
3
|
-
# MF version: 2.12.
|
4
|
-
# Generated on 2024-11-
|
3
|
+
# MF version: 2.12.33 #
|
4
|
+
# Generated on 2024-11-27T23:43:46.597947 #
|
5
5
|
######################################################################################################
|
6
6
|
|
7
7
|
from __future__ import annotations
|
8
8
|
|
9
9
|
import typing
|
10
10
|
if typing.TYPE_CHECKING:
|
11
|
-
import datetime
|
12
11
|
import typing
|
12
|
+
import datetime
|
13
13
|
FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
|
14
14
|
StepFlag = typing.NewType("StepFlag", bool)
|
15
15
|
|
@@ -138,60 +138,66 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
|
|
138
138
|
...
|
139
139
|
|
140
140
|
@typing.overload
|
141
|
-
def
|
141
|
+
def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
142
142
|
"""
|
143
|
-
|
143
|
+
Creates a human-readable report, a Metaflow Card, after this step completes.
|
144
144
|
|
145
|
-
|
146
|
-
attributes set in the `@conda_base` flow-level decorator. Hence,
|
147
|
-
you can use `@conda_base` to set packages required by all
|
148
|
-
steps and use `@conda` to specify step-specific overrides.
|
145
|
+
Note that you may add multiple `@card` decorators in a step with different parameters.
|
149
146
|
"""
|
150
147
|
...
|
151
148
|
|
152
149
|
@typing.overload
|
153
|
-
def
|
150
|
+
def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
154
151
|
...
|
155
152
|
|
156
153
|
@typing.overload
|
157
|
-
def
|
154
|
+
def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
158
155
|
...
|
159
156
|
|
160
|
-
def
|
157
|
+
def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
|
161
158
|
"""
|
162
|
-
|
159
|
+
Creates a human-readable report, a Metaflow Card, after this step completes.
|
163
160
|
|
164
|
-
|
165
|
-
attributes set in the `@conda_base` flow-level decorator. Hence,
|
166
|
-
you can use `@conda_base` to set packages required by all
|
167
|
-
steps and use `@conda` to specify step-specific overrides.
|
161
|
+
Note that you may add multiple `@card` decorators in a step with different parameters.
|
168
162
|
"""
|
169
163
|
...
|
170
164
|
|
171
165
|
@typing.overload
|
172
|
-
def
|
166
|
+
def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
173
167
|
"""
|
174
|
-
Specifies
|
168
|
+
Specifies a timeout for your step.
|
169
|
+
|
170
|
+
This decorator is useful if this step may hang indefinitely.
|
171
|
+
|
172
|
+
This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
|
173
|
+
A timeout is considered to be an exception thrown by the step. It will cause the step to be
|
174
|
+
retried if needed and the exception will be caught by the `@catch` decorator, if present.
|
175
|
+
|
176
|
+
Note that all the values specified in parameters are added together so if you specify
|
177
|
+
60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
|
175
178
|
"""
|
176
179
|
...
|
177
180
|
|
178
181
|
@typing.overload
|
179
|
-
def
|
182
|
+
def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
180
183
|
...
|
181
184
|
|
182
185
|
@typing.overload
|
183
|
-
def
|
184
|
-
...
|
185
|
-
|
186
|
-
def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
|
187
|
-
"""
|
188
|
-
Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
|
189
|
-
"""
|
186
|
+
def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
190
187
|
...
|
191
188
|
|
192
|
-
def
|
189
|
+
def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
|
193
190
|
"""
|
194
|
-
Specifies
|
191
|
+
Specifies a timeout for your step.
|
192
|
+
|
193
|
+
This decorator is useful if this step may hang indefinitely.
|
194
|
+
|
195
|
+
This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
|
196
|
+
A timeout is considered to be an exception thrown by the step. It will cause the step to be
|
197
|
+
retried if needed and the exception will be caught by the `@catch` decorator, if present.
|
198
|
+
|
199
|
+
Note that all the values specified in parameters are added together so if you specify
|
200
|
+
60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
|
195
201
|
"""
|
196
202
|
...
|
197
203
|
|
@@ -227,80 +233,72 @@ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
|
|
227
233
|
...
|
228
234
|
|
229
235
|
@typing.overload
|
230
|
-
def
|
236
|
+
def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
231
237
|
"""
|
232
|
-
Specifies
|
233
|
-
|
234
|
-
This decorator is useful if this step may hang indefinitely.
|
238
|
+
Specifies the resources needed when executing this step.
|
235
239
|
|
236
|
-
|
237
|
-
|
238
|
-
retried if needed and the exception will be caught by the `@catch` decorator, if present.
|
240
|
+
Use `@resources` to specify the resource requirements
|
241
|
+
independently of the specific compute layer (`@batch`, `@kubernetes`).
|
239
242
|
|
240
|
-
|
241
|
-
|
243
|
+
You can choose the compute layer on the command line by executing e.g.
|
244
|
+
```
|
245
|
+
python myflow.py run --with batch
|
246
|
+
```
|
247
|
+
or
|
248
|
+
```
|
249
|
+
python myflow.py run --with kubernetes
|
250
|
+
```
|
251
|
+
which executes the flow on the desired system using the
|
252
|
+
requirements specified in `@resources`.
|
242
253
|
"""
|
243
254
|
...
|
244
255
|
|
245
256
|
@typing.overload
|
246
|
-
def
|
257
|
+
def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
247
258
|
...
|
248
259
|
|
249
260
|
@typing.overload
|
250
|
-
def
|
261
|
+
def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
251
262
|
...
|
252
263
|
|
253
|
-
def
|
264
|
+
def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
|
254
265
|
"""
|
255
|
-
Specifies
|
256
|
-
|
257
|
-
This decorator is useful if this step may hang indefinitely.
|
266
|
+
Specifies the resources needed when executing this step.
|
258
267
|
|
259
|
-
|
260
|
-
|
261
|
-
retried if needed and the exception will be caught by the `@catch` decorator, if present.
|
268
|
+
Use `@resources` to specify the resource requirements
|
269
|
+
independently of the specific compute layer (`@batch`, `@kubernetes`).
|
262
270
|
|
263
|
-
|
264
|
-
|
271
|
+
You can choose the compute layer on the command line by executing e.g.
|
272
|
+
```
|
273
|
+
python myflow.py run --with batch
|
274
|
+
```
|
275
|
+
or
|
276
|
+
```
|
277
|
+
python myflow.py run --with kubernetes
|
278
|
+
```
|
279
|
+
which executes the flow on the desired system using the
|
280
|
+
requirements specified in `@resources`.
|
265
281
|
"""
|
266
282
|
...
|
267
283
|
|
268
284
|
@typing.overload
|
269
|
-
def
|
285
|
+
def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
270
286
|
"""
|
271
|
-
Specifies
|
272
|
-
to a step needs to be retried.
|
273
|
-
|
274
|
-
This decorator is useful for handling transient errors, such as networking issues.
|
275
|
-
If your task contains operations that can't be retried safely, e.g. database updates,
|
276
|
-
it is advisable to annotate it with `@retry(times=0)`.
|
277
|
-
|
278
|
-
This can be used in conjunction with the `@catch` decorator. The `@catch`
|
279
|
-
decorator will execute a no-op task after all retries have been exhausted,
|
280
|
-
ensuring that the flow execution can continue.
|
287
|
+
Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
|
281
288
|
"""
|
282
289
|
...
|
283
290
|
|
284
291
|
@typing.overload
|
285
|
-
def
|
292
|
+
def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
286
293
|
...
|
287
294
|
|
288
295
|
@typing.overload
|
289
|
-
def
|
296
|
+
def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
290
297
|
...
|
291
298
|
|
292
|
-
def
|
299
|
+
def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
|
293
300
|
"""
|
294
|
-
Specifies
|
295
|
-
to a step needs to be retried.
|
296
|
-
|
297
|
-
This decorator is useful for handling transient errors, such as networking issues.
|
298
|
-
If your task contains operations that can't be retried safely, e.g. database updates,
|
299
|
-
it is advisable to annotate it with `@retry(times=0)`.
|
300
|
-
|
301
|
-
This can be used in conjunction with the `@catch` decorator. The `@catch`
|
302
|
-
decorator will execute a no-op task after all retries have been exhausted,
|
303
|
-
ensuring that the flow execution can continue.
|
301
|
+
Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
|
304
302
|
"""
|
305
303
|
...
|
306
304
|
|
@@ -325,6 +323,12 @@ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], Non
|
|
325
323
|
"""
|
326
324
|
...
|
327
325
|
|
326
|
+
def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
327
|
+
"""
|
328
|
+
Specifies that this step should execute on Kubernetes.
|
329
|
+
"""
|
330
|
+
...
|
331
|
+
|
328
332
|
@typing.overload
|
329
333
|
def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
330
334
|
"""
|
@@ -349,27 +353,103 @@ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
|
|
349
353
|
...
|
350
354
|
|
351
355
|
@typing.overload
|
352
|
-
def
|
356
|
+
def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
353
357
|
"""
|
354
|
-
|
358
|
+
Specifies the PyPI packages for the step.
|
355
359
|
|
356
|
-
|
360
|
+
Information in this decorator will augment any
|
361
|
+
attributes set in the `@pyi_base` flow-level decorator. Hence,
|
362
|
+
you can use `@pypi_base` to set packages required by all
|
363
|
+
steps and use `@pypi` to specify step-specific overrides.
|
357
364
|
"""
|
358
365
|
...
|
359
366
|
|
360
367
|
@typing.overload
|
361
|
-
def
|
368
|
+
def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
362
369
|
...
|
363
370
|
|
364
371
|
@typing.overload
|
365
|
-
def
|
372
|
+
def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
366
373
|
...
|
367
374
|
|
368
|
-
def
|
375
|
+
def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
|
369
376
|
"""
|
370
|
-
|
377
|
+
Specifies the PyPI packages for the step.
|
371
378
|
|
372
|
-
|
379
|
+
Information in this decorator will augment any
|
380
|
+
attributes set in the `@pyi_base` flow-level decorator. Hence,
|
381
|
+
you can use `@pypi_base` to set packages required by all
|
382
|
+
steps and use `@pypi` to specify step-specific overrides.
|
383
|
+
"""
|
384
|
+
...
|
385
|
+
|
386
|
+
@typing.overload
|
387
|
+
def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
388
|
+
"""
|
389
|
+
Specifies the number of times the task corresponding
|
390
|
+
to a step needs to be retried.
|
391
|
+
|
392
|
+
This decorator is useful for handling transient errors, such as networking issues.
|
393
|
+
If your task contains operations that can't be retried safely, e.g. database updates,
|
394
|
+
it is advisable to annotate it with `@retry(times=0)`.
|
395
|
+
|
396
|
+
This can be used in conjunction with the `@catch` decorator. The `@catch`
|
397
|
+
decorator will execute a no-op task after all retries have been exhausted,
|
398
|
+
ensuring that the flow execution can continue.
|
399
|
+
"""
|
400
|
+
...
|
401
|
+
|
402
|
+
@typing.overload
|
403
|
+
def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
404
|
+
...
|
405
|
+
|
406
|
+
@typing.overload
|
407
|
+
def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
408
|
+
...
|
409
|
+
|
410
|
+
def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
|
411
|
+
"""
|
412
|
+
Specifies the number of times the task corresponding
|
413
|
+
to a step needs to be retried.
|
414
|
+
|
415
|
+
This decorator is useful for handling transient errors, such as networking issues.
|
416
|
+
If your task contains operations that can't be retried safely, e.g. database updates,
|
417
|
+
it is advisable to annotate it with `@retry(times=0)`.
|
418
|
+
|
419
|
+
This can be used in conjunction with the `@catch` decorator. The `@catch`
|
420
|
+
decorator will execute a no-op task after all retries have been exhausted,
|
421
|
+
ensuring that the flow execution can continue.
|
422
|
+
"""
|
423
|
+
...
|
424
|
+
|
425
|
+
@typing.overload
|
426
|
+
def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
427
|
+
"""
|
428
|
+
Specifies the Conda environment for the step.
|
429
|
+
|
430
|
+
Information in this decorator will augment any
|
431
|
+
attributes set in the `@conda_base` flow-level decorator. Hence,
|
432
|
+
you can use `@conda_base` to set packages required by all
|
433
|
+
steps and use `@conda` to specify step-specific overrides.
|
434
|
+
"""
|
435
|
+
...
|
436
|
+
|
437
|
+
@typing.overload
|
438
|
+
def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
439
|
+
...
|
440
|
+
|
441
|
+
@typing.overload
|
442
|
+
def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
443
|
+
...
|
444
|
+
|
445
|
+
def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
|
446
|
+
"""
|
447
|
+
Specifies the Conda environment for the step.
|
448
|
+
|
449
|
+
Information in this decorator will augment any
|
450
|
+
attributes set in the `@conda_base` flow-level decorator. Hence,
|
451
|
+
you can use `@conda_base` to set packages required by all
|
452
|
+
steps and use `@conda` to specify step-specific overrides.
|
373
453
|
"""
|
374
454
|
...
|
375
455
|
|
@@ -393,85 +473,127 @@ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
|
|
393
473
|
...
|
394
474
|
|
395
475
|
@typing.overload
|
396
|
-
def
|
476
|
+
def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
397
477
|
"""
|
398
|
-
Specifies the
|
478
|
+
Specifies the event(s) that this flow depends on.
|
399
479
|
|
400
|
-
|
401
|
-
|
480
|
+
```
|
481
|
+
@trigger(event='foo')
|
482
|
+
```
|
483
|
+
or
|
484
|
+
```
|
485
|
+
@trigger(events=['foo', 'bar'])
|
486
|
+
```
|
402
487
|
|
403
|
-
|
488
|
+
Additionally, you can specify the parameter mappings
|
489
|
+
to map event payload to Metaflow parameters for the flow.
|
404
490
|
```
|
405
|
-
|
491
|
+
@trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
|
406
492
|
```
|
407
493
|
or
|
408
494
|
```
|
409
|
-
|
495
|
+
@trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
|
496
|
+
{'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
|
497
|
+
```
|
498
|
+
|
499
|
+
'parameters' can also be a list of strings and tuples like so:
|
500
|
+
```
|
501
|
+
@trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
|
502
|
+
```
|
503
|
+
This is equivalent to:
|
504
|
+
```
|
505
|
+
@trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
|
410
506
|
```
|
411
|
-
which executes the flow on the desired system using the
|
412
|
-
requirements specified in `@resources`.
|
413
507
|
"""
|
414
508
|
...
|
415
509
|
|
416
510
|
@typing.overload
|
417
|
-
def
|
418
|
-
...
|
419
|
-
|
420
|
-
@typing.overload
|
421
|
-
def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
511
|
+
def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
422
512
|
...
|
423
513
|
|
424
|
-
def
|
514
|
+
def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
|
425
515
|
"""
|
426
|
-
Specifies the
|
516
|
+
Specifies the event(s) that this flow depends on.
|
427
517
|
|
428
|
-
|
429
|
-
|
518
|
+
```
|
519
|
+
@trigger(event='foo')
|
520
|
+
```
|
521
|
+
or
|
522
|
+
```
|
523
|
+
@trigger(events=['foo', 'bar'])
|
524
|
+
```
|
430
525
|
|
431
|
-
|
526
|
+
Additionally, you can specify the parameter mappings
|
527
|
+
to map event payload to Metaflow parameters for the flow.
|
432
528
|
```
|
433
|
-
|
529
|
+
@trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
|
434
530
|
```
|
435
531
|
or
|
436
532
|
```
|
437
|
-
|
533
|
+
@trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
|
534
|
+
{'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
|
535
|
+
```
|
536
|
+
|
537
|
+
'parameters' can also be a list of strings and tuples like so:
|
538
|
+
```
|
539
|
+
@trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
|
540
|
+
```
|
541
|
+
This is equivalent to:
|
542
|
+
```
|
543
|
+
@trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
|
438
544
|
```
|
439
|
-
which executes the flow on the desired system using the
|
440
|
-
requirements specified in `@resources`.
|
441
545
|
"""
|
442
546
|
...
|
443
547
|
|
444
|
-
|
445
|
-
def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
548
|
+
def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
446
549
|
"""
|
447
|
-
Specifies
|
550
|
+
Specifies what flows belong to the same project.
|
448
551
|
|
449
|
-
|
450
|
-
|
451
|
-
|
452
|
-
|
552
|
+
A project-specific namespace is created for all flows that
|
553
|
+
use the same `@project(name)`.
|
554
|
+
"""
|
555
|
+
...
|
556
|
+
|
557
|
+
def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
558
|
+
"""
|
559
|
+
The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
|
560
|
+
This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
|
453
561
|
"""
|
454
562
|
...
|
455
563
|
|
456
564
|
@typing.overload
|
457
|
-
def
|
565
|
+
def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
566
|
+
"""
|
567
|
+
Specifies the PyPI packages for all steps of the flow.
|
568
|
+
|
569
|
+
Use `@pypi_base` to set common packages required by all
|
570
|
+
steps and use `@pypi` to specify step-specific overrides.
|
571
|
+
"""
|
458
572
|
...
|
459
573
|
|
460
574
|
@typing.overload
|
461
|
-
def
|
575
|
+
def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
462
576
|
...
|
463
577
|
|
464
|
-
def
|
578
|
+
def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
|
465
579
|
"""
|
466
|
-
Specifies the PyPI packages for the
|
580
|
+
Specifies the PyPI packages for all steps of the flow.
|
467
581
|
|
468
|
-
|
469
|
-
attributes set in the `@pyi_base` flow-level decorator. Hence,
|
470
|
-
you can use `@pypi_base` to set packages required by all
|
582
|
+
Use `@pypi_base` to set common packages required by all
|
471
583
|
steps and use `@pypi` to specify step-specific overrides.
|
472
584
|
"""
|
473
585
|
...
|
474
586
|
|
587
|
+
def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
588
|
+
"""
|
589
|
+
The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
|
590
|
+
before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
|
591
|
+
and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
|
592
|
+
added as a flow decorators. Adding more than one decorator will ensure that `start` step
|
593
|
+
starts only after all sensors finish.
|
594
|
+
"""
|
595
|
+
...
|
596
|
+
|
475
597
|
@typing.overload
|
476
598
|
def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
477
599
|
"""
|
@@ -495,36 +617,22 @@ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packa
|
|
495
617
|
"""
|
496
618
|
...
|
497
619
|
|
498
|
-
def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
499
|
-
"""
|
500
|
-
The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
|
501
|
-
before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
|
502
|
-
and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
|
503
|
-
added as a flow decorators. Adding more than one decorator will ensure that `start` step
|
504
|
-
starts only after all sensors finish.
|
505
|
-
"""
|
506
|
-
...
|
507
|
-
|
508
620
|
@typing.overload
|
509
|
-
def
|
621
|
+
def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
510
622
|
"""
|
511
|
-
Specifies the
|
512
|
-
|
513
|
-
Use `@pypi_base` to set common packages required by all
|
514
|
-
steps and use `@pypi` to specify step-specific overrides.
|
623
|
+
Specifies the times when the flow should be run when running on a
|
624
|
+
production scheduler.
|
515
625
|
"""
|
516
626
|
...
|
517
627
|
|
518
628
|
@typing.overload
|
519
|
-
def
|
629
|
+
def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
520
630
|
...
|
521
631
|
|
522
|
-
def
|
632
|
+
def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
|
523
633
|
"""
|
524
|
-
Specifies the
|
525
|
-
|
526
|
-
Use `@pypi_base` to set common packages required by all
|
527
|
-
steps and use `@pypi` to specify step-specific overrides.
|
634
|
+
Specifies the times when the flow should be run when running on a
|
635
|
+
production scheduler.
|
528
636
|
"""
|
529
637
|
...
|
530
638
|
|
@@ -609,111 +717,3 @@ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *
|
|
609
717
|
"""
|
610
718
|
...
|
611
719
|
|
612
|
-
def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
613
|
-
"""
|
614
|
-
The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
|
615
|
-
This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
|
616
|
-
"""
|
617
|
-
...
|
618
|
-
|
619
|
-
def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
620
|
-
"""
|
621
|
-
Specifies what flows belong to the same project.
|
622
|
-
|
623
|
-
A project-specific namespace is created for all flows that
|
624
|
-
use the same `@project(name)`.
|
625
|
-
"""
|
626
|
-
...
|
627
|
-
|
628
|
-
@typing.overload
|
629
|
-
def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
630
|
-
"""
|
631
|
-
Specifies the times when the flow should be run when running on a
|
632
|
-
production scheduler.
|
633
|
-
"""
|
634
|
-
...
|
635
|
-
|
636
|
-
@typing.overload
|
637
|
-
def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
638
|
-
...
|
639
|
-
|
640
|
-
def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
|
641
|
-
"""
|
642
|
-
Specifies the times when the flow should be run when running on a
|
643
|
-
production scheduler.
|
644
|
-
"""
|
645
|
-
...
|
646
|
-
|
647
|
-
@typing.overload
|
648
|
-
def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
649
|
-
"""
|
650
|
-
Specifies the event(s) that this flow depends on.
|
651
|
-
|
652
|
-
```
|
653
|
-
@trigger(event='foo')
|
654
|
-
```
|
655
|
-
or
|
656
|
-
```
|
657
|
-
@trigger(events=['foo', 'bar'])
|
658
|
-
```
|
659
|
-
|
660
|
-
Additionally, you can specify the parameter mappings
|
661
|
-
to map event payload to Metaflow parameters for the flow.
|
662
|
-
```
|
663
|
-
@trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
|
664
|
-
```
|
665
|
-
or
|
666
|
-
```
|
667
|
-
@trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
|
668
|
-
{'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
|
669
|
-
```
|
670
|
-
|
671
|
-
'parameters' can also be a list of strings and tuples like so:
|
672
|
-
```
|
673
|
-
@trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
|
674
|
-
```
|
675
|
-
This is equivalent to:
|
676
|
-
```
|
677
|
-
@trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
|
678
|
-
```
|
679
|
-
"""
|
680
|
-
...
|
681
|
-
|
682
|
-
@typing.overload
|
683
|
-
def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
684
|
-
...
|
685
|
-
|
686
|
-
def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
|
687
|
-
"""
|
688
|
-
Specifies the event(s) that this flow depends on.
|
689
|
-
|
690
|
-
```
|
691
|
-
@trigger(event='foo')
|
692
|
-
```
|
693
|
-
or
|
694
|
-
```
|
695
|
-
@trigger(events=['foo', 'bar'])
|
696
|
-
```
|
697
|
-
|
698
|
-
Additionally, you can specify the parameter mappings
|
699
|
-
to map event payload to Metaflow parameters for the flow.
|
700
|
-
```
|
701
|
-
@trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
|
702
|
-
```
|
703
|
-
or
|
704
|
-
```
|
705
|
-
@trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
|
706
|
-
{'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
|
707
|
-
```
|
708
|
-
|
709
|
-
'parameters' can also be a list of strings and tuples like so:
|
710
|
-
```
|
711
|
-
@trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
|
712
|
-
```
|
713
|
-
This is equivalent to:
|
714
|
-
```
|
715
|
-
@trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
|
716
|
-
```
|
717
|
-
"""
|
718
|
-
...
|
719
|
-
|