metaflow-stubs 2.19.0__py2.py3-none-any.whl → 2.19.1__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of metaflow-stubs might be problematic. Click here for more details.
- metaflow-stubs/__init__.pyi +583 -583
- metaflow-stubs/cards.pyi +2 -2
- metaflow-stubs/cli.pyi +2 -2
- metaflow-stubs/cli_components/__init__.pyi +2 -2
- metaflow-stubs/cli_components/utils.pyi +2 -2
- metaflow-stubs/client/__init__.pyi +2 -2
- metaflow-stubs/client/core.pyi +5 -5
- metaflow-stubs/client/filecache.pyi +3 -3
- metaflow-stubs/events.pyi +2 -2
- metaflow-stubs/exception.pyi +2 -2
- metaflow-stubs/flowspec.pyi +5 -5
- metaflow-stubs/generated_for.txt +1 -1
- metaflow-stubs/includefile.pyi +3 -3
- metaflow-stubs/meta_files.pyi +2 -2
- metaflow-stubs/metadata_provider/__init__.pyi +2 -2
- metaflow-stubs/metadata_provider/heartbeat.pyi +2 -2
- metaflow-stubs/metadata_provider/metadata.pyi +3 -3
- metaflow-stubs/metadata_provider/util.pyi +2 -2
- metaflow-stubs/metaflow_config.pyi +2 -2
- metaflow-stubs/metaflow_current.pyi +3 -3
- metaflow-stubs/metaflow_git.pyi +2 -2
- metaflow-stubs/multicore_utils.pyi +2 -2
- metaflow-stubs/packaging_sys/__init__.pyi +5 -5
- metaflow-stubs/packaging_sys/backend.pyi +3 -3
- metaflow-stubs/packaging_sys/distribution_support.pyi +3 -3
- metaflow-stubs/packaging_sys/tar_backend.pyi +6 -6
- metaflow-stubs/packaging_sys/utils.pyi +2 -2
- metaflow-stubs/packaging_sys/v1.pyi +4 -4
- metaflow-stubs/parameters.pyi +3 -3
- metaflow-stubs/plugins/__init__.pyi +13 -13
- metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
- metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
- metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
- metaflow-stubs/plugins/airflow/exception.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
- metaflow-stubs/plugins/argo/__init__.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_workflows.pyi +3 -3
- metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +4 -4
- metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +4 -4
- metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +4 -4
- metaflow-stubs/plugins/argo/exit_hooks.pyi +3 -3
- metaflow-stubs/plugins/aws/__init__.pyi +4 -4
- metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
- metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
- metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +4 -4
- metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +4 -4
- metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +3 -3
- metaflow-stubs/plugins/azure/__init__.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +4 -4
- metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
- metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
- metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
- metaflow-stubs/plugins/cards/__init__.pyi +2 -2
- metaflow-stubs/plugins/cards/card_client.pyi +2 -2
- metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
- metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
- metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/basic.pyi +3 -3
- metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/components.pyi +5 -5
- metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/json_viewer.pyi +4 -4
- metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
- metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
- metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
- metaflow-stubs/plugins/cards/exception.pyi +2 -2
- metaflow-stubs/plugins/catch_decorator.pyi +2 -2
- metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
- metaflow-stubs/plugins/datatools/local.pyi +2 -2
- metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
- metaflow-stubs/plugins/datatools/s3/s3.pyi +2 -2
- metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
- metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
- metaflow-stubs/plugins/debug_logger.pyi +2 -2
- metaflow-stubs/plugins/debug_monitor.pyi +2 -2
- metaflow-stubs/plugins/environment_decorator.pyi +2 -2
- metaflow-stubs/plugins/events_decorator.pyi +2 -2
- metaflow-stubs/plugins/exit_hook/__init__.pyi +2 -2
- metaflow-stubs/plugins/exit_hook/exit_hook_decorator.pyi +2 -2
- metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
- metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
- metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
- metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +4 -4
- metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
- metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
- metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
- metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kube_utils.pyi +3 -3
- metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +2 -2
- metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
- metaflow-stubs/plugins/parsers.pyi +2 -2
- metaflow-stubs/plugins/project_decorator.pyi +2 -2
- metaflow-stubs/plugins/pypi/__init__.pyi +3 -3
- metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
- metaflow-stubs/plugins/pypi/conda_environment.pyi +5 -5
- metaflow-stubs/plugins/pypi/parsers.pyi +2 -2
- metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
- metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
- metaflow-stubs/plugins/pypi/utils.pyi +2 -2
- metaflow-stubs/plugins/resources_decorator.pyi +2 -2
- metaflow-stubs/plugins/retry_decorator.pyi +2 -2
- metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
- metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +4 -4
- metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
- metaflow-stubs/plugins/secrets/secrets_func.pyi +2 -2
- metaflow-stubs/plugins/secrets/secrets_spec.pyi +2 -2
- metaflow-stubs/plugins/secrets/utils.pyi +2 -2
- metaflow-stubs/plugins/storage_executor.pyi +2 -2
- metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +2 -2
- metaflow-stubs/plugins/timeout_decorator.pyi +2 -2
- metaflow-stubs/plugins/uv/__init__.pyi +2 -2
- metaflow-stubs/plugins/uv/uv_environment.pyi +2 -2
- metaflow-stubs/pylint_wrapper.pyi +2 -2
- metaflow-stubs/runner/__init__.pyi +2 -2
- metaflow-stubs/runner/deployer.pyi +5 -5
- metaflow-stubs/runner/deployer_impl.pyi +2 -2
- metaflow-stubs/runner/metaflow_runner.pyi +4 -4
- metaflow-stubs/runner/nbdeploy.pyi +2 -2
- metaflow-stubs/runner/nbrun.pyi +2 -2
- metaflow-stubs/runner/subprocess_manager.pyi +2 -2
- metaflow-stubs/runner/utils.pyi +3 -3
- metaflow-stubs/system/__init__.pyi +2 -2
- metaflow-stubs/system/system_logger.pyi +2 -2
- metaflow-stubs/system/system_monitor.pyi +2 -2
- metaflow-stubs/tagging_util.pyi +2 -2
- metaflow-stubs/tuple_util.pyi +2 -2
- metaflow-stubs/user_configs/__init__.pyi +2 -2
- metaflow-stubs/user_configs/config_options.pyi +3 -3
- metaflow-stubs/user_configs/config_parameters.pyi +4 -4
- metaflow-stubs/user_decorators/__init__.pyi +2 -2
- metaflow-stubs/user_decorators/common.pyi +2 -2
- metaflow-stubs/user_decorators/mutable_flow.pyi +4 -4
- metaflow-stubs/user_decorators/mutable_step.pyi +3 -3
- metaflow-stubs/user_decorators/user_flow_decorator.pyi +3 -3
- metaflow-stubs/user_decorators/user_step_decorator.pyi +3 -3
- metaflow-stubs/version.pyi +2 -2
- {metaflow_stubs-2.19.0.dist-info → metaflow_stubs-2.19.1.dist-info}/METADATA +2 -2
- metaflow_stubs-2.19.1.dist-info/RECORD +168 -0
- metaflow_stubs-2.19.0.dist-info/RECORD +0 -168
- {metaflow_stubs-2.19.0.dist-info → metaflow_stubs-2.19.1.dist-info}/WHEEL +0 -0
- {metaflow_stubs-2.19.0.dist-info → metaflow_stubs-2.19.1.dist-info}/top_level.txt +0 -0
metaflow-stubs/__init__.pyi
CHANGED
|
@@ -1,15 +1,15 @@
|
|
|
1
1
|
######################################################################################################
|
|
2
2
|
# Auto-generated Metaflow stub file #
|
|
3
|
-
# MF version: 2.19.
|
|
4
|
-
# Generated on 2025-10-
|
|
3
|
+
# MF version: 2.19.1 #
|
|
4
|
+
# Generated on 2025-10-28T01:39:06.721172 #
|
|
5
5
|
######################################################################################################
|
|
6
6
|
|
|
7
7
|
from __future__ import annotations
|
|
8
8
|
|
|
9
9
|
import typing
|
|
10
10
|
if typing.TYPE_CHECKING:
|
|
11
|
-
import datetime
|
|
12
11
|
import typing
|
|
12
|
+
import datetime
|
|
13
13
|
FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
|
|
14
14
|
StepFlag = typing.NewType("StepFlag", bool)
|
|
15
15
|
|
|
@@ -47,8 +47,8 @@ from . import plugins as plugins
|
|
|
47
47
|
from .plugins.datatools.s3.s3 import S3 as S3
|
|
48
48
|
from . import includefile as includefile
|
|
49
49
|
from .includefile import IncludeFile as IncludeFile
|
|
50
|
-
from .plugins.parsers import yaml_parser as yaml_parser
|
|
51
50
|
from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
|
|
51
|
+
from .plugins.parsers import yaml_parser as yaml_parser
|
|
52
52
|
from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
|
|
53
53
|
from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
|
|
54
54
|
from . import cards as cards
|
|
@@ -155,131 +155,136 @@ def step(f: typing.Callable[[~FlowSpecDerived], NoneType] | typing.Callable[[~Fl
|
|
|
155
155
|
...
|
|
156
156
|
|
|
157
157
|
@typing.overload
|
|
158
|
-
def
|
|
159
|
-
"""
|
|
160
|
-
Decorator prototype for all step decorators. This function gets specialized
|
|
161
|
-
and imported for all decorators types by _import_plugin_decorators().
|
|
162
|
-
"""
|
|
163
|
-
...
|
|
164
|
-
|
|
165
|
-
@typing.overload
|
|
166
|
-
def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
167
|
-
...
|
|
168
|
-
|
|
169
|
-
def parallel(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None):
|
|
170
|
-
"""
|
|
171
|
-
Decorator prototype for all step decorators. This function gets specialized
|
|
172
|
-
and imported for all decorators types by _import_plugin_decorators().
|
|
173
|
-
"""
|
|
174
|
-
...
|
|
175
|
-
|
|
176
|
-
@typing.overload
|
|
177
|
-
def catch(*, var: str | None = None, print_exception: bool = True) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
|
|
158
|
+
def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
|
|
178
159
|
"""
|
|
179
|
-
Specifies
|
|
160
|
+
Specifies the number of times the task corresponding
|
|
161
|
+
to a step needs to be retried.
|
|
180
162
|
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
163
|
+
This decorator is useful for handling transient errors, such as networking issues.
|
|
164
|
+
If your task contains operations that can't be retried safely, e.g. database updates,
|
|
165
|
+
it is advisable to annotate it with `@retry(times=0)`.
|
|
166
|
+
|
|
167
|
+
This can be used in conjunction with the `@catch` decorator. The `@catch`
|
|
168
|
+
decorator will execute a no-op task after all retries have been exhausted,
|
|
169
|
+
ensuring that the flow execution can continue.
|
|
185
170
|
|
|
186
171
|
|
|
187
172
|
Parameters
|
|
188
173
|
----------
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
Determines whether or not the exception is printed to
|
|
194
|
-
stdout when caught.
|
|
174
|
+
times : int, default 3
|
|
175
|
+
Number of times to retry this task.
|
|
176
|
+
minutes_between_retries : int, default 2
|
|
177
|
+
Number of minutes between retries.
|
|
195
178
|
"""
|
|
196
179
|
...
|
|
197
180
|
|
|
198
181
|
@typing.overload
|
|
199
|
-
def
|
|
182
|
+
def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
200
183
|
...
|
|
201
184
|
|
|
202
185
|
@typing.overload
|
|
203
|
-
def
|
|
186
|
+
def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
204
187
|
...
|
|
205
188
|
|
|
206
|
-
def
|
|
189
|
+
def retry(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, times: int = 3, minutes_between_retries: int = 2):
|
|
207
190
|
"""
|
|
208
|
-
Specifies
|
|
191
|
+
Specifies the number of times the task corresponding
|
|
192
|
+
to a step needs to be retried.
|
|
209
193
|
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
194
|
+
This decorator is useful for handling transient errors, such as networking issues.
|
|
195
|
+
If your task contains operations that can't be retried safely, e.g. database updates,
|
|
196
|
+
it is advisable to annotate it with `@retry(times=0)`.
|
|
197
|
+
|
|
198
|
+
This can be used in conjunction with the `@catch` decorator. The `@catch`
|
|
199
|
+
decorator will execute a no-op task after all retries have been exhausted,
|
|
200
|
+
ensuring that the flow execution can continue.
|
|
214
201
|
|
|
215
202
|
|
|
216
203
|
Parameters
|
|
217
204
|
----------
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
Determines whether or not the exception is printed to
|
|
223
|
-
stdout when caught.
|
|
205
|
+
times : int, default 3
|
|
206
|
+
Number of times to retry this task.
|
|
207
|
+
minutes_between_retries : int, default 2
|
|
208
|
+
Number of minutes between retries.
|
|
224
209
|
"""
|
|
225
210
|
...
|
|
226
211
|
|
|
227
212
|
@typing.overload
|
|
228
|
-
def
|
|
213
|
+
def resources(*, cpu: int = 1, gpu: int | None = None, disk: int | None = None, memory: int = 4096, shared_memory: int | None = None) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
|
|
229
214
|
"""
|
|
230
|
-
Specifies the
|
|
215
|
+
Specifies the resources needed when executing this step.
|
|
231
216
|
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
217
|
+
Use `@resources` to specify the resource requirements
|
|
218
|
+
independently of the specific compute layer (`@batch`, `@kubernetes`).
|
|
219
|
+
|
|
220
|
+
You can choose the compute layer on the command line by executing e.g.
|
|
221
|
+
```
|
|
222
|
+
python myflow.py run --with batch
|
|
223
|
+
```
|
|
224
|
+
or
|
|
225
|
+
```
|
|
226
|
+
python myflow.py run --with kubernetes
|
|
227
|
+
```
|
|
228
|
+
which executes the flow on the desired system using the
|
|
229
|
+
requirements specified in `@resources`.
|
|
236
230
|
|
|
237
231
|
|
|
238
232
|
Parameters
|
|
239
233
|
----------
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
234
|
+
cpu : int, default 1
|
|
235
|
+
Number of CPUs required for this step.
|
|
236
|
+
gpu : int, optional, default None
|
|
237
|
+
Number of GPUs required for this step.
|
|
238
|
+
disk : int, optional, default None
|
|
239
|
+
Disk size (in MB) required for this step. Only applies on Kubernetes.
|
|
240
|
+
memory : int, default 4096
|
|
241
|
+
Memory size (in MB) required for this step.
|
|
242
|
+
shared_memory : int, optional, default None
|
|
243
|
+
The value for the size (in MiB) of the /dev/shm volume for this step.
|
|
244
|
+
This parameter maps to the `--shm-size` option in Docker.
|
|
250
245
|
"""
|
|
251
246
|
...
|
|
252
247
|
|
|
253
248
|
@typing.overload
|
|
254
|
-
def
|
|
249
|
+
def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
255
250
|
...
|
|
256
251
|
|
|
257
252
|
@typing.overload
|
|
258
|
-
def
|
|
253
|
+
def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
259
254
|
...
|
|
260
255
|
|
|
261
|
-
def
|
|
256
|
+
def resources(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, cpu: int = 1, gpu: int | None = None, disk: int | None = None, memory: int = 4096, shared_memory: int | None = None):
|
|
262
257
|
"""
|
|
263
|
-
Specifies the
|
|
258
|
+
Specifies the resources needed when executing this step.
|
|
264
259
|
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
260
|
+
Use `@resources` to specify the resource requirements
|
|
261
|
+
independently of the specific compute layer (`@batch`, `@kubernetes`).
|
|
262
|
+
|
|
263
|
+
You can choose the compute layer on the command line by executing e.g.
|
|
264
|
+
```
|
|
265
|
+
python myflow.py run --with batch
|
|
266
|
+
```
|
|
267
|
+
or
|
|
268
|
+
```
|
|
269
|
+
python myflow.py run --with kubernetes
|
|
270
|
+
```
|
|
271
|
+
which executes the flow on the desired system using the
|
|
272
|
+
requirements specified in `@resources`.
|
|
269
273
|
|
|
270
274
|
|
|
271
275
|
Parameters
|
|
272
276
|
----------
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
277
|
+
cpu : int, default 1
|
|
278
|
+
Number of CPUs required for this step.
|
|
279
|
+
gpu : int, optional, default None
|
|
280
|
+
Number of GPUs required for this step.
|
|
281
|
+
disk : int, optional, default None
|
|
282
|
+
Disk size (in MB) required for this step. Only applies on Kubernetes.
|
|
283
|
+
memory : int, default 4096
|
|
284
|
+
Memory size (in MB) required for this step.
|
|
285
|
+
shared_memory : int, optional, default None
|
|
286
|
+
The value for the size (in MiB) of the /dev/shm volume for this step.
|
|
287
|
+
This parameter maps to the `--shm-size` option in Docker.
|
|
283
288
|
"""
|
|
284
289
|
...
|
|
285
290
|
|
|
@@ -335,81 +340,157 @@ def pypi(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generat
|
|
|
335
340
|
...
|
|
336
341
|
|
|
337
342
|
@typing.overload
|
|
338
|
-
def
|
|
343
|
+
def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: str | None = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: int | None = None, max_swap: int | None = None, swappiness: int | None = None, aws_batch_tags: typing.Dict[str, str] | None = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: int | None = None, tmpfs_path: str | None = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: str | None = None, log_options: typing.List[str] | None = None) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
|
|
339
344
|
"""
|
|
340
|
-
Specifies
|
|
341
|
-
|
|
342
|
-
Use `@resources` to specify the resource requirements
|
|
343
|
-
independently of the specific compute layer (`@batch`, `@kubernetes`).
|
|
344
|
-
|
|
345
|
-
You can choose the compute layer on the command line by executing e.g.
|
|
346
|
-
```
|
|
347
|
-
python myflow.py run --with batch
|
|
348
|
-
```
|
|
349
|
-
or
|
|
350
|
-
```
|
|
351
|
-
python myflow.py run --with kubernetes
|
|
352
|
-
```
|
|
353
|
-
which executes the flow on the desired system using the
|
|
354
|
-
requirements specified in `@resources`.
|
|
345
|
+
Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
|
|
355
346
|
|
|
356
347
|
|
|
357
348
|
Parameters
|
|
358
349
|
----------
|
|
359
350
|
cpu : int, default 1
|
|
360
|
-
Number of CPUs required for this step.
|
|
361
|
-
|
|
362
|
-
|
|
363
|
-
|
|
364
|
-
|
|
351
|
+
Number of CPUs required for this step. If `@resources` is
|
|
352
|
+
also present, the maximum value from all decorators is used.
|
|
353
|
+
gpu : int, default 0
|
|
354
|
+
Number of GPUs required for this step. If `@resources` is
|
|
355
|
+
also present, the maximum value from all decorators is used.
|
|
365
356
|
memory : int, default 4096
|
|
366
|
-
Memory size (in MB) required for this step.
|
|
357
|
+
Memory size (in MB) required for this step. If
|
|
358
|
+
`@resources` is also present, the maximum value from all decorators is
|
|
359
|
+
used.
|
|
360
|
+
image : str, optional, default None
|
|
361
|
+
Docker image to use when launching on AWS Batch. If not specified, and
|
|
362
|
+
METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
|
|
363
|
+
not, a default Docker image mapping to the current version of Python is used.
|
|
364
|
+
queue : str, default METAFLOW_BATCH_JOB_QUEUE
|
|
365
|
+
AWS Batch Job Queue to submit the job to.
|
|
366
|
+
iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
|
|
367
|
+
AWS IAM role that AWS Batch container uses to access AWS cloud resources.
|
|
368
|
+
execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
|
|
369
|
+
AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
|
|
370
|
+
(https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
|
|
367
371
|
shared_memory : int, optional, default None
|
|
368
372
|
The value for the size (in MiB) of the /dev/shm volume for this step.
|
|
369
373
|
This parameter maps to the `--shm-size` option in Docker.
|
|
374
|
+
max_swap : int, optional, default None
|
|
375
|
+
The total amount of swap memory (in MiB) a container can use for this
|
|
376
|
+
step. This parameter is translated to the `--memory-swap` option in
|
|
377
|
+
Docker where the value is the sum of the container memory plus the
|
|
378
|
+
`max_swap` value.
|
|
379
|
+
swappiness : int, optional, default None
|
|
380
|
+
This allows you to tune memory swappiness behavior for this step.
|
|
381
|
+
A swappiness value of 0 causes swapping not to happen unless absolutely
|
|
382
|
+
necessary. A swappiness value of 100 causes pages to be swapped very
|
|
383
|
+
aggressively. Accepted values are whole numbers between 0 and 100.
|
|
384
|
+
aws_batch_tags: Dict[str, str], optional, default None
|
|
385
|
+
Sets arbitrary AWS tags on the AWS Batch compute environment.
|
|
386
|
+
Set as string key-value pairs.
|
|
387
|
+
use_tmpfs : bool, default False
|
|
388
|
+
This enables an explicit tmpfs mount for this step. Note that tmpfs is
|
|
389
|
+
not available on Fargate compute environments
|
|
390
|
+
tmpfs_tempdir : bool, default True
|
|
391
|
+
sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
|
|
392
|
+
tmpfs_size : int, optional, default None
|
|
393
|
+
The value for the size (in MiB) of the tmpfs mount for this step.
|
|
394
|
+
This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
|
|
395
|
+
memory allocated for this step.
|
|
396
|
+
tmpfs_path : str, optional, default None
|
|
397
|
+
Path to tmpfs mount for this step. Defaults to /metaflow_temp.
|
|
398
|
+
inferentia : int, default 0
|
|
399
|
+
Number of Inferentia chips required for this step.
|
|
400
|
+
trainium : int, default None
|
|
401
|
+
Alias for inferentia. Use only one of the two.
|
|
402
|
+
efa : int, default 0
|
|
403
|
+
Number of elastic fabric adapter network devices to attach to container
|
|
404
|
+
ephemeral_storage : int, default None
|
|
405
|
+
The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
|
|
406
|
+
This is only relevant for Fargate compute environments
|
|
407
|
+
log_driver: str, optional, default None
|
|
408
|
+
The log driver to use for the Amazon ECS container.
|
|
409
|
+
log_options: List[str], optional, default None
|
|
410
|
+
List of strings containing options for the chosen log driver. The configurable values
|
|
411
|
+
depend on the `log driver` chosen. Validation of these options is not supported yet.
|
|
412
|
+
Example: [`awslogs-group:aws/batch/job`]
|
|
370
413
|
"""
|
|
371
414
|
...
|
|
372
415
|
|
|
373
416
|
@typing.overload
|
|
374
|
-
def
|
|
417
|
+
def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
375
418
|
...
|
|
376
419
|
|
|
377
420
|
@typing.overload
|
|
378
|
-
def
|
|
421
|
+
def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
379
422
|
...
|
|
380
423
|
|
|
381
|
-
def
|
|
424
|
+
def batch(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: str | None = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: int | None = None, max_swap: int | None = None, swappiness: int | None = None, aws_batch_tags: typing.Dict[str, str] | None = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: int | None = None, tmpfs_path: str | None = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: str | None = None, log_options: typing.List[str] | None = None):
|
|
382
425
|
"""
|
|
383
|
-
Specifies
|
|
384
|
-
|
|
385
|
-
Use `@resources` to specify the resource requirements
|
|
386
|
-
independently of the specific compute layer (`@batch`, `@kubernetes`).
|
|
387
|
-
|
|
388
|
-
You can choose the compute layer on the command line by executing e.g.
|
|
389
|
-
```
|
|
390
|
-
python myflow.py run --with batch
|
|
391
|
-
```
|
|
392
|
-
or
|
|
393
|
-
```
|
|
394
|
-
python myflow.py run --with kubernetes
|
|
395
|
-
```
|
|
396
|
-
which executes the flow on the desired system using the
|
|
397
|
-
requirements specified in `@resources`.
|
|
426
|
+
Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
|
|
398
427
|
|
|
399
428
|
|
|
400
429
|
Parameters
|
|
401
430
|
----------
|
|
402
431
|
cpu : int, default 1
|
|
403
|
-
Number of CPUs required for this step.
|
|
404
|
-
|
|
405
|
-
|
|
406
|
-
|
|
407
|
-
|
|
432
|
+
Number of CPUs required for this step. If `@resources` is
|
|
433
|
+
also present, the maximum value from all decorators is used.
|
|
434
|
+
gpu : int, default 0
|
|
435
|
+
Number of GPUs required for this step. If `@resources` is
|
|
436
|
+
also present, the maximum value from all decorators is used.
|
|
408
437
|
memory : int, default 4096
|
|
409
|
-
Memory size (in MB) required for this step.
|
|
438
|
+
Memory size (in MB) required for this step. If
|
|
439
|
+
`@resources` is also present, the maximum value from all decorators is
|
|
440
|
+
used.
|
|
441
|
+
image : str, optional, default None
|
|
442
|
+
Docker image to use when launching on AWS Batch. If not specified, and
|
|
443
|
+
METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
|
|
444
|
+
not, a default Docker image mapping to the current version of Python is used.
|
|
445
|
+
queue : str, default METAFLOW_BATCH_JOB_QUEUE
|
|
446
|
+
AWS Batch Job Queue to submit the job to.
|
|
447
|
+
iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
|
|
448
|
+
AWS IAM role that AWS Batch container uses to access AWS cloud resources.
|
|
449
|
+
execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
|
|
450
|
+
AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
|
|
451
|
+
(https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
|
|
410
452
|
shared_memory : int, optional, default None
|
|
411
453
|
The value for the size (in MiB) of the /dev/shm volume for this step.
|
|
412
454
|
This parameter maps to the `--shm-size` option in Docker.
|
|
455
|
+
max_swap : int, optional, default None
|
|
456
|
+
The total amount of swap memory (in MiB) a container can use for this
|
|
457
|
+
step. This parameter is translated to the `--memory-swap` option in
|
|
458
|
+
Docker where the value is the sum of the container memory plus the
|
|
459
|
+
`max_swap` value.
|
|
460
|
+
swappiness : int, optional, default None
|
|
461
|
+
This allows you to tune memory swappiness behavior for this step.
|
|
462
|
+
A swappiness value of 0 causes swapping not to happen unless absolutely
|
|
463
|
+
necessary. A swappiness value of 100 causes pages to be swapped very
|
|
464
|
+
aggressively. Accepted values are whole numbers between 0 and 100.
|
|
465
|
+
aws_batch_tags: Dict[str, str], optional, default None
|
|
466
|
+
Sets arbitrary AWS tags on the AWS Batch compute environment.
|
|
467
|
+
Set as string key-value pairs.
|
|
468
|
+
use_tmpfs : bool, default False
|
|
469
|
+
This enables an explicit tmpfs mount for this step. Note that tmpfs is
|
|
470
|
+
not available on Fargate compute environments
|
|
471
|
+
tmpfs_tempdir : bool, default True
|
|
472
|
+
sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
|
|
473
|
+
tmpfs_size : int, optional, default None
|
|
474
|
+
The value for the size (in MiB) of the tmpfs mount for this step.
|
|
475
|
+
This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
|
|
476
|
+
memory allocated for this step.
|
|
477
|
+
tmpfs_path : str, optional, default None
|
|
478
|
+
Path to tmpfs mount for this step. Defaults to /metaflow_temp.
|
|
479
|
+
inferentia : int, default 0
|
|
480
|
+
Number of Inferentia chips required for this step.
|
|
481
|
+
trainium : int, default None
|
|
482
|
+
Alias for inferentia. Use only one of the two.
|
|
483
|
+
efa : int, default 0
|
|
484
|
+
Number of elastic fabric adapter network devices to attach to container
|
|
485
|
+
ephemeral_storage : int, default None
|
|
486
|
+
The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
|
|
487
|
+
This is only relevant for Fargate compute environments
|
|
488
|
+
log_driver: str, optional, default None
|
|
489
|
+
The log driver to use for the Amazon ECS container.
|
|
490
|
+
log_options: List[str], optional, default None
|
|
491
|
+
List of strings containing options for the chosen log driver. The configurable values
|
|
492
|
+
depend on the `log driver` chosen. Validation of these options is not supported yet.
|
|
493
|
+
Example: [`awslogs-group:aws/batch/job`]
|
|
413
494
|
"""
|
|
414
495
|
...
|
|
415
496
|
|
|
@@ -503,35 +584,109 @@ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: st
|
|
|
503
584
|
...
|
|
504
585
|
|
|
505
586
|
@typing.overload
|
|
506
|
-
def
|
|
587
|
+
def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
507
588
|
"""
|
|
508
|
-
|
|
589
|
+
Decorator prototype for all step decorators. This function gets specialized
|
|
590
|
+
and imported for all decorators types by _import_plugin_decorators().
|
|
591
|
+
"""
|
|
592
|
+
...
|
|
593
|
+
|
|
594
|
+
@typing.overload
|
|
595
|
+
def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
596
|
+
...
|
|
597
|
+
|
|
598
|
+
def parallel(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None):
|
|
599
|
+
"""
|
|
600
|
+
Decorator prototype for all step decorators. This function gets specialized
|
|
601
|
+
and imported for all decorators types by _import_plugin_decorators().
|
|
602
|
+
"""
|
|
603
|
+
...
|
|
604
|
+
|
|
605
|
+
@typing.overload
|
|
606
|
+
def secrets(*, sources: typing.List[str | typing.Dict[str, typing.Any]] = [], role: str | None = None) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
|
|
607
|
+
"""
|
|
608
|
+
Specifies secrets to be retrieved and injected as environment variables prior to
|
|
609
|
+
the execution of a step.
|
|
509
610
|
|
|
510
611
|
|
|
511
612
|
Parameters
|
|
512
613
|
----------
|
|
513
|
-
|
|
514
|
-
|
|
614
|
+
sources : List[Union[str, Dict[str, Any]]], default: []
|
|
615
|
+
List of secret specs, defining how the secrets are to be retrieved
|
|
616
|
+
role : str, optional, default: None
|
|
617
|
+
Role to use for fetching secrets
|
|
515
618
|
"""
|
|
516
619
|
...
|
|
517
620
|
|
|
518
621
|
@typing.overload
|
|
519
|
-
def
|
|
622
|
+
def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
520
623
|
...
|
|
521
624
|
|
|
522
625
|
@typing.overload
|
|
523
|
-
def
|
|
626
|
+
def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
524
627
|
...
|
|
525
628
|
|
|
526
|
-
def
|
|
629
|
+
def secrets(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, sources: typing.List[str | typing.Dict[str, typing.Any]] = [], role: str | None = None):
|
|
527
630
|
"""
|
|
528
|
-
Specifies
|
|
631
|
+
Specifies secrets to be retrieved and injected as environment variables prior to
|
|
632
|
+
the execution of a step.
|
|
529
633
|
|
|
530
634
|
|
|
531
635
|
Parameters
|
|
532
636
|
----------
|
|
533
|
-
|
|
534
|
-
|
|
637
|
+
sources : List[Union[str, Dict[str, Any]]], default: []
|
|
638
|
+
List of secret specs, defining how the secrets are to be retrieved
|
|
639
|
+
role : str, optional, default: None
|
|
640
|
+
Role to use for fetching secrets
|
|
641
|
+
"""
|
|
642
|
+
...
|
|
643
|
+
|
|
644
|
+
@typing.overload
|
|
645
|
+
def card(*, type: str = 'default', id: str | None = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
|
|
646
|
+
"""
|
|
647
|
+
Creates a human-readable report, a Metaflow Card, after this step completes.
|
|
648
|
+
|
|
649
|
+
Note that you may add multiple `@card` decorators in a step with different parameters.
|
|
650
|
+
|
|
651
|
+
|
|
652
|
+
Parameters
|
|
653
|
+
----------
|
|
654
|
+
type : str, default 'default'
|
|
655
|
+
Card type.
|
|
656
|
+
id : str, optional, default None
|
|
657
|
+
If multiple cards are present, use this id to identify this card.
|
|
658
|
+
options : Dict[str, Any], default {}
|
|
659
|
+
Options passed to the card. The contents depend on the card type.
|
|
660
|
+
timeout : int, default 45
|
|
661
|
+
Interrupt reporting if it takes more than this many seconds.
|
|
662
|
+
"""
|
|
663
|
+
...
|
|
664
|
+
|
|
665
|
+
@typing.overload
|
|
666
|
+
def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
667
|
+
...
|
|
668
|
+
|
|
669
|
+
@typing.overload
|
|
670
|
+
def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
671
|
+
...
|
|
672
|
+
|
|
673
|
+
def card(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, type: str = 'default', id: str | None = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
|
|
674
|
+
"""
|
|
675
|
+
Creates a human-readable report, a Metaflow Card, after this step completes.
|
|
676
|
+
|
|
677
|
+
Note that you may add multiple `@card` decorators in a step with different parameters.
|
|
678
|
+
|
|
679
|
+
|
|
680
|
+
Parameters
|
|
681
|
+
----------
|
|
682
|
+
type : str, default 'default'
|
|
683
|
+
Card type.
|
|
684
|
+
id : str, optional, default None
|
|
685
|
+
If multiple cards are present, use this id to identify this card.
|
|
686
|
+
options : Dict[str, Any], default {}
|
|
687
|
+
Options passed to the card. The contents depend on the card type.
|
|
688
|
+
timeout : int, default 45
|
|
689
|
+
Interrupt reporting if it takes more than this many seconds.
|
|
535
690
|
"""
|
|
536
691
|
...
|
|
537
692
|
|
|
@@ -595,300 +750,279 @@ def timeout(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_gene
|
|
|
595
750
|
...
|
|
596
751
|
|
|
597
752
|
@typing.overload
|
|
598
|
-
def
|
|
753
|
+
def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: str | None = None, disabled: bool = False) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
|
|
599
754
|
"""
|
|
600
|
-
Specifies
|
|
755
|
+
Specifies the Conda environment for the step.
|
|
756
|
+
|
|
757
|
+
Information in this decorator will augment any
|
|
758
|
+
attributes set in the `@conda_base` flow-level decorator. Hence,
|
|
759
|
+
you can use `@conda_base` to set packages required by all
|
|
760
|
+
steps and use `@conda` to specify step-specific overrides.
|
|
601
761
|
|
|
602
762
|
|
|
603
763
|
Parameters
|
|
604
764
|
----------
|
|
605
|
-
|
|
606
|
-
|
|
607
|
-
|
|
608
|
-
|
|
609
|
-
|
|
610
|
-
|
|
611
|
-
|
|
612
|
-
|
|
613
|
-
|
|
614
|
-
|
|
615
|
-
image : str, optional, default None
|
|
616
|
-
Docker image to use when launching on AWS Batch. If not specified, and
|
|
617
|
-
METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
|
|
618
|
-
not, a default Docker image mapping to the current version of Python is used.
|
|
619
|
-
queue : str, default METAFLOW_BATCH_JOB_QUEUE
|
|
620
|
-
AWS Batch Job Queue to submit the job to.
|
|
621
|
-
iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
|
|
622
|
-
AWS IAM role that AWS Batch container uses to access AWS cloud resources.
|
|
623
|
-
execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
|
|
624
|
-
AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
|
|
625
|
-
(https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
|
|
626
|
-
shared_memory : int, optional, default None
|
|
627
|
-
The value for the size (in MiB) of the /dev/shm volume for this step.
|
|
628
|
-
This parameter maps to the `--shm-size` option in Docker.
|
|
629
|
-
max_swap : int, optional, default None
|
|
630
|
-
The total amount of swap memory (in MiB) a container can use for this
|
|
631
|
-
step. This parameter is translated to the `--memory-swap` option in
|
|
632
|
-
Docker where the value is the sum of the container memory plus the
|
|
633
|
-
`max_swap` value.
|
|
634
|
-
swappiness : int, optional, default None
|
|
635
|
-
This allows you to tune memory swappiness behavior for this step.
|
|
636
|
-
A swappiness value of 0 causes swapping not to happen unless absolutely
|
|
637
|
-
necessary. A swappiness value of 100 causes pages to be swapped very
|
|
638
|
-
aggressively. Accepted values are whole numbers between 0 and 100.
|
|
639
|
-
aws_batch_tags: Dict[str, str], optional, default None
|
|
640
|
-
Sets arbitrary AWS tags on the AWS Batch compute environment.
|
|
641
|
-
Set as string key-value pairs.
|
|
642
|
-
use_tmpfs : bool, default False
|
|
643
|
-
This enables an explicit tmpfs mount for this step. Note that tmpfs is
|
|
644
|
-
not available on Fargate compute environments
|
|
645
|
-
tmpfs_tempdir : bool, default True
|
|
646
|
-
sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
|
|
647
|
-
tmpfs_size : int, optional, default None
|
|
648
|
-
The value for the size (in MiB) of the tmpfs mount for this step.
|
|
649
|
-
This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
|
|
650
|
-
memory allocated for this step.
|
|
651
|
-
tmpfs_path : str, optional, default None
|
|
652
|
-
Path to tmpfs mount for this step. Defaults to /metaflow_temp.
|
|
653
|
-
inferentia : int, default 0
|
|
654
|
-
Number of Inferentia chips required for this step.
|
|
655
|
-
trainium : int, default None
|
|
656
|
-
Alias for inferentia. Use only one of the two.
|
|
657
|
-
efa : int, default 0
|
|
658
|
-
Number of elastic fabric adapter network devices to attach to container
|
|
659
|
-
ephemeral_storage : int, default None
|
|
660
|
-
The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
|
|
661
|
-
This is only relevant for Fargate compute environments
|
|
662
|
-
log_driver: str, optional, default None
|
|
663
|
-
The log driver to use for the Amazon ECS container.
|
|
664
|
-
log_options: List[str], optional, default None
|
|
665
|
-
List of strings containing options for the chosen log driver. The configurable values
|
|
666
|
-
depend on the `log driver` chosen. Validation of these options is not supported yet.
|
|
667
|
-
Example: [`awslogs-group:aws/batch/job`]
|
|
765
|
+
packages : Dict[str, str], default {}
|
|
766
|
+
Packages to use for this step. The key is the name of the package
|
|
767
|
+
and the value is the version to use.
|
|
768
|
+
libraries : Dict[str, str], default {}
|
|
769
|
+
Supported for backward compatibility. When used with packages, packages will take precedence.
|
|
770
|
+
python : str, optional, default None
|
|
771
|
+
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
|
772
|
+
that the version used will correspond to the version of the Python interpreter used to start the run.
|
|
773
|
+
disabled : bool, default False
|
|
774
|
+
If set to True, disables @conda.
|
|
668
775
|
"""
|
|
669
776
|
...
|
|
670
777
|
|
|
671
778
|
@typing.overload
|
|
672
|
-
def
|
|
779
|
+
def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
673
780
|
...
|
|
674
781
|
|
|
675
782
|
@typing.overload
|
|
676
|
-
def
|
|
783
|
+
def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
677
784
|
...
|
|
678
785
|
|
|
679
|
-
def
|
|
786
|
+
def conda(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: str | None = None, disabled: bool = False):
|
|
680
787
|
"""
|
|
681
|
-
Specifies
|
|
788
|
+
Specifies the Conda environment for the step.
|
|
789
|
+
|
|
790
|
+
Information in this decorator will augment any
|
|
791
|
+
attributes set in the `@conda_base` flow-level decorator. Hence,
|
|
792
|
+
you can use `@conda_base` to set packages required by all
|
|
793
|
+
steps and use `@conda` to specify step-specific overrides.
|
|
682
794
|
|
|
683
795
|
|
|
684
796
|
Parameters
|
|
685
797
|
----------
|
|
686
|
-
|
|
687
|
-
|
|
688
|
-
|
|
689
|
-
|
|
690
|
-
|
|
691
|
-
|
|
692
|
-
|
|
693
|
-
|
|
694
|
-
|
|
695
|
-
|
|
696
|
-
image : str, optional, default None
|
|
697
|
-
Docker image to use when launching on AWS Batch. If not specified, and
|
|
698
|
-
METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
|
|
699
|
-
not, a default Docker image mapping to the current version of Python is used.
|
|
700
|
-
queue : str, default METAFLOW_BATCH_JOB_QUEUE
|
|
701
|
-
AWS Batch Job Queue to submit the job to.
|
|
702
|
-
iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
|
|
703
|
-
AWS IAM role that AWS Batch container uses to access AWS cloud resources.
|
|
704
|
-
execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
|
|
705
|
-
AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
|
|
706
|
-
(https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
|
|
707
|
-
shared_memory : int, optional, default None
|
|
708
|
-
The value for the size (in MiB) of the /dev/shm volume for this step.
|
|
709
|
-
This parameter maps to the `--shm-size` option in Docker.
|
|
710
|
-
max_swap : int, optional, default None
|
|
711
|
-
The total amount of swap memory (in MiB) a container can use for this
|
|
712
|
-
step. This parameter is translated to the `--memory-swap` option in
|
|
713
|
-
Docker where the value is the sum of the container memory plus the
|
|
714
|
-
`max_swap` value.
|
|
715
|
-
swappiness : int, optional, default None
|
|
716
|
-
This allows you to tune memory swappiness behavior for this step.
|
|
717
|
-
A swappiness value of 0 causes swapping not to happen unless absolutely
|
|
718
|
-
necessary. A swappiness value of 100 causes pages to be swapped very
|
|
719
|
-
aggressively. Accepted values are whole numbers between 0 and 100.
|
|
720
|
-
aws_batch_tags: Dict[str, str], optional, default None
|
|
721
|
-
Sets arbitrary AWS tags on the AWS Batch compute environment.
|
|
722
|
-
Set as string key-value pairs.
|
|
723
|
-
use_tmpfs : bool, default False
|
|
724
|
-
This enables an explicit tmpfs mount for this step. Note that tmpfs is
|
|
725
|
-
not available on Fargate compute environments
|
|
726
|
-
tmpfs_tempdir : bool, default True
|
|
727
|
-
sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
|
|
728
|
-
tmpfs_size : int, optional, default None
|
|
729
|
-
The value for the size (in MiB) of the tmpfs mount for this step.
|
|
730
|
-
This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
|
|
731
|
-
memory allocated for this step.
|
|
732
|
-
tmpfs_path : str, optional, default None
|
|
733
|
-
Path to tmpfs mount for this step. Defaults to /metaflow_temp.
|
|
734
|
-
inferentia : int, default 0
|
|
735
|
-
Number of Inferentia chips required for this step.
|
|
736
|
-
trainium : int, default None
|
|
737
|
-
Alias for inferentia. Use only one of the two.
|
|
738
|
-
efa : int, default 0
|
|
739
|
-
Number of elastic fabric adapter network devices to attach to container
|
|
740
|
-
ephemeral_storage : int, default None
|
|
741
|
-
The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
|
|
742
|
-
This is only relevant for Fargate compute environments
|
|
743
|
-
log_driver: str, optional, default None
|
|
744
|
-
The log driver to use for the Amazon ECS container.
|
|
745
|
-
log_options: List[str], optional, default None
|
|
746
|
-
List of strings containing options for the chosen log driver. The configurable values
|
|
747
|
-
depend on the `log driver` chosen. Validation of these options is not supported yet.
|
|
748
|
-
Example: [`awslogs-group:aws/batch/job`]
|
|
798
|
+
packages : Dict[str, str], default {}
|
|
799
|
+
Packages to use for this step. The key is the name of the package
|
|
800
|
+
and the value is the version to use.
|
|
801
|
+
libraries : Dict[str, str], default {}
|
|
802
|
+
Supported for backward compatibility. When used with packages, packages will take precedence.
|
|
803
|
+
python : str, optional, default None
|
|
804
|
+
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
|
805
|
+
that the version used will correspond to the version of the Python interpreter used to start the run.
|
|
806
|
+
disabled : bool, default False
|
|
807
|
+
If set to True, disables @conda.
|
|
749
808
|
"""
|
|
750
809
|
...
|
|
751
810
|
|
|
752
811
|
@typing.overload
|
|
753
|
-
def
|
|
812
|
+
def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
|
|
754
813
|
"""
|
|
755
|
-
Specifies
|
|
756
|
-
to a step needs to be retried.
|
|
757
|
-
|
|
758
|
-
This decorator is useful for handling transient errors, such as networking issues.
|
|
759
|
-
If your task contains operations that can't be retried safely, e.g. database updates,
|
|
760
|
-
it is advisable to annotate it with `@retry(times=0)`.
|
|
761
|
-
|
|
762
|
-
This can be used in conjunction with the `@catch` decorator. The `@catch`
|
|
763
|
-
decorator will execute a no-op task after all retries have been exhausted,
|
|
764
|
-
ensuring that the flow execution can continue.
|
|
814
|
+
Specifies environment variables to be set prior to the execution of a step.
|
|
765
815
|
|
|
766
816
|
|
|
767
817
|
Parameters
|
|
768
818
|
----------
|
|
769
|
-
|
|
770
|
-
|
|
771
|
-
minutes_between_retries : int, default 2
|
|
772
|
-
Number of minutes between retries.
|
|
819
|
+
vars : Dict[str, str], default {}
|
|
820
|
+
Dictionary of environment variables to set.
|
|
773
821
|
"""
|
|
774
822
|
...
|
|
775
823
|
|
|
776
824
|
@typing.overload
|
|
777
|
-
def
|
|
825
|
+
def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
778
826
|
...
|
|
779
827
|
|
|
780
828
|
@typing.overload
|
|
781
|
-
def
|
|
829
|
+
def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
782
830
|
...
|
|
783
831
|
|
|
784
|
-
def
|
|
832
|
+
def environment(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, vars: typing.Dict[str, str] = {}):
|
|
785
833
|
"""
|
|
786
|
-
Specifies
|
|
787
|
-
to a step needs to be retried.
|
|
788
|
-
|
|
789
|
-
This decorator is useful for handling transient errors, such as networking issues.
|
|
790
|
-
If your task contains operations that can't be retried safely, e.g. database updates,
|
|
791
|
-
it is advisable to annotate it with `@retry(times=0)`.
|
|
792
|
-
|
|
793
|
-
This can be used in conjunction with the `@catch` decorator. The `@catch`
|
|
794
|
-
decorator will execute a no-op task after all retries have been exhausted,
|
|
795
|
-
ensuring that the flow execution can continue.
|
|
834
|
+
Specifies environment variables to be set prior to the execution of a step.
|
|
796
835
|
|
|
797
836
|
|
|
798
837
|
Parameters
|
|
799
838
|
----------
|
|
800
|
-
|
|
801
|
-
|
|
802
|
-
minutes_between_retries : int, default 2
|
|
803
|
-
Number of minutes between retries.
|
|
839
|
+
vars : Dict[str, str], default {}
|
|
840
|
+
Dictionary of environment variables to set.
|
|
804
841
|
"""
|
|
805
842
|
...
|
|
806
843
|
|
|
807
844
|
@typing.overload
|
|
808
|
-
def
|
|
845
|
+
def catch(*, var: str | None = None, print_exception: bool = True) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
|
|
809
846
|
"""
|
|
810
|
-
|
|
847
|
+
Specifies that the step will success under all circumstances.
|
|
811
848
|
|
|
812
|
-
|
|
849
|
+
The decorator will create an optional artifact, specified by `var`, which
|
|
850
|
+
contains the exception raised. You can use it to detect the presence
|
|
851
|
+
of errors, indicating that all happy-path artifacts produced by the step
|
|
852
|
+
are missing.
|
|
813
853
|
|
|
814
854
|
|
|
815
855
|
Parameters
|
|
816
856
|
----------
|
|
817
|
-
|
|
818
|
-
|
|
819
|
-
|
|
820
|
-
|
|
821
|
-
|
|
822
|
-
|
|
823
|
-
timeout : int, default 45
|
|
824
|
-
Interrupt reporting if it takes more than this many seconds.
|
|
857
|
+
var : str, optional, default None
|
|
858
|
+
Name of the artifact in which to store the caught exception.
|
|
859
|
+
If not specified, the exception is not stored.
|
|
860
|
+
print_exception : bool, default True
|
|
861
|
+
Determines whether or not the exception is printed to
|
|
862
|
+
stdout when caught.
|
|
825
863
|
"""
|
|
826
864
|
...
|
|
827
865
|
|
|
828
866
|
@typing.overload
|
|
829
|
-
def
|
|
867
|
+
def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
|
830
868
|
...
|
|
831
869
|
|
|
832
870
|
@typing.overload
|
|
833
|
-
def
|
|
871
|
+
def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
|
834
872
|
...
|
|
835
873
|
|
|
836
|
-
def
|
|
874
|
+
def catch(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, var: str | None = None, print_exception: bool = True):
|
|
837
875
|
"""
|
|
838
|
-
|
|
876
|
+
Specifies that the step will success under all circumstances.
|
|
839
877
|
|
|
840
|
-
|
|
878
|
+
The decorator will create an optional artifact, specified by `var`, which
|
|
879
|
+
contains the exception raised. You can use it to detect the presence
|
|
880
|
+
of errors, indicating that all happy-path artifacts produced by the step
|
|
881
|
+
are missing.
|
|
841
882
|
|
|
842
883
|
|
|
843
884
|
Parameters
|
|
844
885
|
----------
|
|
845
|
-
|
|
846
|
-
|
|
847
|
-
|
|
848
|
-
|
|
886
|
+
var : str, optional, default None
|
|
887
|
+
Name of the artifact in which to store the caught exception.
|
|
888
|
+
If not specified, the exception is not stored.
|
|
889
|
+
print_exception : bool, default True
|
|
890
|
+
Determines whether or not the exception is printed to
|
|
891
|
+
stdout when caught.
|
|
892
|
+
"""
|
|
893
|
+
...
|
|
894
|
+
|
|
895
|
+
@typing.overload
|
|
896
|
+
def trigger(*, event: str | typing.Dict[str, typing.Any] | None = None, events: typing.List[str | typing.Dict[str, typing.Any]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
|
897
|
+
"""
|
|
898
|
+
Specifies the event(s) that this flow depends on.
|
|
899
|
+
|
|
900
|
+
```
|
|
901
|
+
@trigger(event='foo')
|
|
902
|
+
```
|
|
903
|
+
or
|
|
904
|
+
```
|
|
905
|
+
@trigger(events=['foo', 'bar'])
|
|
906
|
+
```
|
|
907
|
+
|
|
908
|
+
Additionally, you can specify the parameter mappings
|
|
909
|
+
to map event payload to Metaflow parameters for the flow.
|
|
910
|
+
```
|
|
911
|
+
@trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
|
|
912
|
+
```
|
|
913
|
+
or
|
|
914
|
+
```
|
|
915
|
+
@trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
|
|
916
|
+
{'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
|
|
917
|
+
```
|
|
918
|
+
|
|
919
|
+
'parameters' can also be a list of strings and tuples like so:
|
|
920
|
+
```
|
|
921
|
+
@trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
|
|
922
|
+
```
|
|
923
|
+
This is equivalent to:
|
|
924
|
+
```
|
|
925
|
+
@trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
|
|
926
|
+
```
|
|
927
|
+
|
|
928
|
+
|
|
929
|
+
Parameters
|
|
930
|
+
----------
|
|
931
|
+
event : Union[str, Dict[str, Any]], optional, default None
|
|
932
|
+
Event dependency for this flow.
|
|
933
|
+
events : List[Union[str, Dict[str, Any]]], default []
|
|
934
|
+
Events dependency for this flow.
|
|
849
935
|
options : Dict[str, Any], default {}
|
|
850
|
-
|
|
851
|
-
timeout : int, default 45
|
|
852
|
-
Interrupt reporting if it takes more than this many seconds.
|
|
936
|
+
Backend-specific configuration for tuning eventing behavior.
|
|
853
937
|
"""
|
|
854
938
|
...
|
|
855
939
|
|
|
856
940
|
@typing.overload
|
|
857
|
-
def
|
|
941
|
+
def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
|
942
|
+
...
|
|
943
|
+
|
|
944
|
+
def trigger(f: typing.Type[~FlowSpecDerived] | None = None, *, event: str | typing.Dict[str, typing.Any] | None = None, events: typing.List[str | typing.Dict[str, typing.Any]] = [], options: typing.Dict[str, typing.Any] = {}):
|
|
858
945
|
"""
|
|
859
|
-
Specifies
|
|
860
|
-
|
|
946
|
+
Specifies the event(s) that this flow depends on.
|
|
947
|
+
|
|
948
|
+
```
|
|
949
|
+
@trigger(event='foo')
|
|
950
|
+
```
|
|
951
|
+
or
|
|
952
|
+
```
|
|
953
|
+
@trigger(events=['foo', 'bar'])
|
|
954
|
+
```
|
|
955
|
+
|
|
956
|
+
Additionally, you can specify the parameter mappings
|
|
957
|
+
to map event payload to Metaflow parameters for the flow.
|
|
958
|
+
```
|
|
959
|
+
@trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
|
|
960
|
+
```
|
|
961
|
+
or
|
|
962
|
+
```
|
|
963
|
+
@trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
|
|
964
|
+
{'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
|
|
965
|
+
```
|
|
966
|
+
|
|
967
|
+
'parameters' can also be a list of strings and tuples like so:
|
|
968
|
+
```
|
|
969
|
+
@trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
|
|
970
|
+
```
|
|
971
|
+
This is equivalent to:
|
|
972
|
+
```
|
|
973
|
+
@trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
|
|
974
|
+
```
|
|
861
975
|
|
|
862
976
|
|
|
863
977
|
Parameters
|
|
864
978
|
----------
|
|
865
|
-
|
|
866
|
-
|
|
867
|
-
|
|
868
|
-
|
|
979
|
+
event : Union[str, Dict[str, Any]], optional, default None
|
|
980
|
+
Event dependency for this flow.
|
|
981
|
+
events : List[Union[str, Dict[str, Any]]], default []
|
|
982
|
+
Events dependency for this flow.
|
|
983
|
+
options : Dict[str, Any], default {}
|
|
984
|
+
Backend-specific configuration for tuning eventing behavior.
|
|
869
985
|
"""
|
|
870
986
|
...
|
|
871
987
|
|
|
872
988
|
@typing.overload
|
|
873
|
-
def
|
|
989
|
+
def pypi_base(*, packages: typing.Dict[str, str] = {}, python: str | None = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
|
990
|
+
"""
|
|
991
|
+
Specifies the PyPI packages for all steps of the flow.
|
|
992
|
+
|
|
993
|
+
Use `@pypi_base` to set common packages required by all
|
|
994
|
+
steps and use `@pypi` to specify step-specific overrides.
|
|
995
|
+
|
|
996
|
+
Parameters
|
|
997
|
+
----------
|
|
998
|
+
packages : Dict[str, str], default: {}
|
|
999
|
+
Packages to use for this flow. The key is the name of the package
|
|
1000
|
+
and the value is the version to use.
|
|
1001
|
+
python : str, optional, default: None
|
|
1002
|
+
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
|
1003
|
+
that the version used will correspond to the version of the Python interpreter used to start the run.
|
|
1004
|
+
"""
|
|
874
1005
|
...
|
|
875
1006
|
|
|
876
1007
|
@typing.overload
|
|
877
|
-
def
|
|
1008
|
+
def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
|
878
1009
|
...
|
|
879
1010
|
|
|
880
|
-
def
|
|
1011
|
+
def pypi_base(f: typing.Type[~FlowSpecDerived] | None = None, *, packages: typing.Dict[str, str] = {}, python: str | None = None):
|
|
881
1012
|
"""
|
|
882
|
-
Specifies
|
|
883
|
-
the execution of a step.
|
|
1013
|
+
Specifies the PyPI packages for all steps of the flow.
|
|
884
1014
|
|
|
1015
|
+
Use `@pypi_base` to set common packages required by all
|
|
1016
|
+
steps and use `@pypi` to specify step-specific overrides.
|
|
885
1017
|
|
|
886
1018
|
Parameters
|
|
887
1019
|
----------
|
|
888
|
-
|
|
889
|
-
|
|
890
|
-
|
|
891
|
-
|
|
1020
|
+
packages : Dict[str, str], default: {}
|
|
1021
|
+
Packages to use for this flow. The key is the name of the package
|
|
1022
|
+
and the value is the version to use.
|
|
1023
|
+
python : str, optional, default: None
|
|
1024
|
+
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
|
1025
|
+
that the version used will correspond to the version of the Python interpreter used to start the run.
|
|
892
1026
|
"""
|
|
893
1027
|
...
|
|
894
1028
|
|
|
@@ -935,97 +1069,38 @@ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str,
|
|
|
935
1069
|
"""
|
|
936
1070
|
...
|
|
937
1071
|
|
|
938
|
-
|
|
939
|
-
def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: str | None = None, timezone: str | None = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
|
940
|
-
"""
|
|
941
|
-
Specifies the times when the flow should be run when running on a
|
|
942
|
-
production scheduler.
|
|
943
|
-
|
|
944
|
-
|
|
945
|
-
Parameters
|
|
946
|
-
----------
|
|
947
|
-
hourly : bool, default False
|
|
948
|
-
Run the workflow hourly.
|
|
949
|
-
daily : bool, default True
|
|
950
|
-
Run the workflow daily.
|
|
951
|
-
weekly : bool, default False
|
|
952
|
-
Run the workflow weekly.
|
|
953
|
-
cron : str, optional, default None
|
|
954
|
-
Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
|
|
955
|
-
specified by this expression.
|
|
956
|
-
timezone : str, optional, default None
|
|
957
|
-
Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
|
|
958
|
-
which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
|
|
959
|
-
"""
|
|
960
|
-
...
|
|
961
|
-
|
|
962
|
-
@typing.overload
|
|
963
|
-
def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
|
964
|
-
...
|
|
965
|
-
|
|
966
|
-
def schedule(f: typing.Type[~FlowSpecDerived] | None = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: str | None = None, timezone: str | None = None):
|
|
967
|
-
"""
|
|
968
|
-
Specifies the times when the flow should be run when running on a
|
|
969
|
-
production scheduler.
|
|
970
|
-
|
|
971
|
-
|
|
972
|
-
Parameters
|
|
973
|
-
----------
|
|
974
|
-
hourly : bool, default False
|
|
975
|
-
Run the workflow hourly.
|
|
976
|
-
daily : bool, default True
|
|
977
|
-
Run the workflow daily.
|
|
978
|
-
weekly : bool, default False
|
|
979
|
-
Run the workflow weekly.
|
|
980
|
-
cron : str, optional, default None
|
|
981
|
-
Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
|
|
982
|
-
specified by this expression.
|
|
983
|
-
timezone : str, optional, default None
|
|
984
|
-
Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
|
|
985
|
-
which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
|
|
986
|
-
"""
|
|
987
|
-
...
|
|
988
|
-
|
|
989
|
-
def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: str | typing.List[str], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
|
1072
|
+
def project(*, name: str, branch: str | None = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
|
990
1073
|
"""
|
|
991
|
-
|
|
992
|
-
|
|
993
|
-
|
|
994
|
-
|
|
995
|
-
|
|
996
|
-
|
|
997
|
-
|
|
998
|
-
|
|
999
|
-
|
|
1000
|
-
|
|
1001
|
-
|
|
1002
|
-
|
|
1003
|
-
|
|
1004
|
-
|
|
1005
|
-
|
|
1006
|
-
|
|
1007
|
-
|
|
1008
|
-
|
|
1009
|
-
|
|
1010
|
-
|
|
1011
|
-
|
|
1012
|
-
|
|
1013
|
-
|
|
1014
|
-
|
|
1015
|
-
|
|
1016
|
-
|
|
1017
|
-
|
|
1018
|
-
|
|
1019
|
-
|
|
1020
|
-
|
|
1021
|
-
Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
|
|
1022
|
-
When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
|
|
1023
|
-
wildcard_match : bool
|
|
1024
|
-
whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
|
|
1025
|
-
aws_conn_id : str
|
|
1026
|
-
a reference to the s3 connection on Airflow. (Default: None)
|
|
1027
|
-
verify : bool
|
|
1028
|
-
Whether or not to verify SSL certificates for S3 connection. (Default: None)
|
|
1074
|
+
Specifies what flows belong to the same project.
|
|
1075
|
+
|
|
1076
|
+
A project-specific namespace is created for all flows that
|
|
1077
|
+
use the same `@project(name)`.
|
|
1078
|
+
|
|
1079
|
+
|
|
1080
|
+
Parameters
|
|
1081
|
+
----------
|
|
1082
|
+
name : str
|
|
1083
|
+
Project name. Make sure that the name is unique amongst all
|
|
1084
|
+
projects that use the same production scheduler. The name may
|
|
1085
|
+
contain only lowercase alphanumeric characters and underscores.
|
|
1086
|
+
|
|
1087
|
+
branch : Optional[str], default None
|
|
1088
|
+
The branch to use. If not specified, the branch is set to
|
|
1089
|
+
`user.<username>` unless `production` is set to `True`. This can
|
|
1090
|
+
also be set on the command line using `--branch` as a top-level option.
|
|
1091
|
+
It is an error to specify `branch` in the decorator and on the command line.
|
|
1092
|
+
|
|
1093
|
+
production : bool, default False
|
|
1094
|
+
Whether or not the branch is the production branch. This can also be set on the
|
|
1095
|
+
command line using `--production` as a top-level option. It is an error to specify
|
|
1096
|
+
`production` in the decorator and on the command line.
|
|
1097
|
+
The project branch name will be:
|
|
1098
|
+
- if `branch` is specified:
|
|
1099
|
+
- if `production` is True: `prod.<branch>`
|
|
1100
|
+
- if `production` is False: `test.<branch>`
|
|
1101
|
+
- if `branch` is not specified:
|
|
1102
|
+
- if `production` is True: `prod`
|
|
1103
|
+
- if `production` is False: `user.<username>`
|
|
1029
1104
|
"""
|
|
1030
1105
|
...
|
|
1031
1106
|
|
|
@@ -1130,38 +1205,54 @@ def trigger_on_finish(f: typing.Type[~FlowSpecDerived] | None = None, *, flow: t
|
|
|
1130
1205
|
"""
|
|
1131
1206
|
...
|
|
1132
1207
|
|
|
1133
|
-
|
|
1208
|
+
@typing.overload
|
|
1209
|
+
def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: str | None = None, timezone: str | None = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
|
1134
1210
|
"""
|
|
1135
|
-
Specifies
|
|
1136
|
-
|
|
1137
|
-
A project-specific namespace is created for all flows that
|
|
1138
|
-
use the same `@project(name)`.
|
|
1211
|
+
Specifies the times when the flow should be run when running on a
|
|
1212
|
+
production scheduler.
|
|
1139
1213
|
|
|
1140
1214
|
|
|
1141
1215
|
Parameters
|
|
1142
1216
|
----------
|
|
1143
|
-
|
|
1144
|
-
|
|
1145
|
-
|
|
1146
|
-
|
|
1217
|
+
hourly : bool, default False
|
|
1218
|
+
Run the workflow hourly.
|
|
1219
|
+
daily : bool, default True
|
|
1220
|
+
Run the workflow daily.
|
|
1221
|
+
weekly : bool, default False
|
|
1222
|
+
Run the workflow weekly.
|
|
1223
|
+
cron : str, optional, default None
|
|
1224
|
+
Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
|
|
1225
|
+
specified by this expression.
|
|
1226
|
+
timezone : str, optional, default None
|
|
1227
|
+
Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
|
|
1228
|
+
which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
|
|
1229
|
+
"""
|
|
1230
|
+
...
|
|
1231
|
+
|
|
1232
|
+
@typing.overload
|
|
1233
|
+
def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
|
1234
|
+
...
|
|
1235
|
+
|
|
1236
|
+
def schedule(f: typing.Type[~FlowSpecDerived] | None = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: str | None = None, timezone: str | None = None):
|
|
1237
|
+
"""
|
|
1238
|
+
Specifies the times when the flow should be run when running on a
|
|
1239
|
+
production scheduler.
|
|
1147
1240
|
|
|
1148
|
-
branch : Optional[str], default None
|
|
1149
|
-
The branch to use. If not specified, the branch is set to
|
|
1150
|
-
`user.<username>` unless `production` is set to `True`. This can
|
|
1151
|
-
also be set on the command line using `--branch` as a top-level option.
|
|
1152
|
-
It is an error to specify `branch` in the decorator and on the command line.
|
|
1153
1241
|
|
|
1154
|
-
|
|
1155
|
-
|
|
1156
|
-
|
|
1157
|
-
|
|
1158
|
-
|
|
1159
|
-
|
|
1160
|
-
|
|
1161
|
-
|
|
1162
|
-
|
|
1163
|
-
|
|
1164
|
-
|
|
1242
|
+
Parameters
|
|
1243
|
+
----------
|
|
1244
|
+
hourly : bool, default False
|
|
1245
|
+
Run the workflow hourly.
|
|
1246
|
+
daily : bool, default True
|
|
1247
|
+
Run the workflow daily.
|
|
1248
|
+
weekly : bool, default False
|
|
1249
|
+
Run the workflow weekly.
|
|
1250
|
+
cron : str, optional, default None
|
|
1251
|
+
Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
|
|
1252
|
+
specified by this expression.
|
|
1253
|
+
timezone : str, optional, default None
|
|
1254
|
+
Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
|
|
1255
|
+
which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
|
|
1165
1256
|
"""
|
|
1166
1257
|
...
|
|
1167
1258
|
|
|
@@ -1216,137 +1307,46 @@ def conda_base(f: typing.Type[~FlowSpecDerived] | None = None, *, packages: typi
|
|
|
1216
1307
|
"""
|
|
1217
1308
|
...
|
|
1218
1309
|
|
|
1219
|
-
|
|
1220
|
-
def trigger(*, event: str | typing.Dict[str, typing.Any] | None = None, events: typing.List[str | typing.Dict[str, typing.Any]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
|
1221
|
-
"""
|
|
1222
|
-
Specifies the event(s) that this flow depends on.
|
|
1223
|
-
|
|
1224
|
-
```
|
|
1225
|
-
@trigger(event='foo')
|
|
1226
|
-
```
|
|
1227
|
-
or
|
|
1228
|
-
```
|
|
1229
|
-
@trigger(events=['foo', 'bar'])
|
|
1230
|
-
```
|
|
1231
|
-
|
|
1232
|
-
Additionally, you can specify the parameter mappings
|
|
1233
|
-
to map event payload to Metaflow parameters for the flow.
|
|
1234
|
-
```
|
|
1235
|
-
@trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
|
|
1236
|
-
```
|
|
1237
|
-
or
|
|
1238
|
-
```
|
|
1239
|
-
@trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
|
|
1240
|
-
{'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
|
|
1241
|
-
```
|
|
1242
|
-
|
|
1243
|
-
'parameters' can also be a list of strings and tuples like so:
|
|
1244
|
-
```
|
|
1245
|
-
@trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
|
|
1246
|
-
```
|
|
1247
|
-
This is equivalent to:
|
|
1248
|
-
```
|
|
1249
|
-
@trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
|
|
1250
|
-
```
|
|
1251
|
-
|
|
1252
|
-
|
|
1253
|
-
Parameters
|
|
1254
|
-
----------
|
|
1255
|
-
event : Union[str, Dict[str, Any]], optional, default None
|
|
1256
|
-
Event dependency for this flow.
|
|
1257
|
-
events : List[Union[str, Dict[str, Any]]], default []
|
|
1258
|
-
Events dependency for this flow.
|
|
1259
|
-
options : Dict[str, Any], default {}
|
|
1260
|
-
Backend-specific configuration for tuning eventing behavior.
|
|
1261
|
-
"""
|
|
1262
|
-
...
|
|
1263
|
-
|
|
1264
|
-
@typing.overload
|
|
1265
|
-
def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
|
1266
|
-
...
|
|
1267
|
-
|
|
1268
|
-
def trigger(f: typing.Type[~FlowSpecDerived] | None = None, *, event: str | typing.Dict[str, typing.Any] | None = None, events: typing.List[str | typing.Dict[str, typing.Any]] = [], options: typing.Dict[str, typing.Any] = {}):
|
|
1269
|
-
"""
|
|
1270
|
-
Specifies the event(s) that this flow depends on.
|
|
1271
|
-
|
|
1272
|
-
```
|
|
1273
|
-
@trigger(event='foo')
|
|
1274
|
-
```
|
|
1275
|
-
or
|
|
1276
|
-
```
|
|
1277
|
-
@trigger(events=['foo', 'bar'])
|
|
1278
|
-
```
|
|
1279
|
-
|
|
1280
|
-
Additionally, you can specify the parameter mappings
|
|
1281
|
-
to map event payload to Metaflow parameters for the flow.
|
|
1282
|
-
```
|
|
1283
|
-
@trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
|
|
1284
|
-
```
|
|
1285
|
-
or
|
|
1286
|
-
```
|
|
1287
|
-
@trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
|
|
1288
|
-
{'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
|
|
1289
|
-
```
|
|
1290
|
-
|
|
1291
|
-
'parameters' can also be a list of strings and tuples like so:
|
|
1292
|
-
```
|
|
1293
|
-
@trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
|
|
1294
|
-
```
|
|
1295
|
-
This is equivalent to:
|
|
1296
|
-
```
|
|
1297
|
-
@trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
|
|
1298
|
-
```
|
|
1299
|
-
|
|
1300
|
-
|
|
1301
|
-
Parameters
|
|
1302
|
-
----------
|
|
1303
|
-
event : Union[str, Dict[str, Any]], optional, default None
|
|
1304
|
-
Event dependency for this flow.
|
|
1305
|
-
events : List[Union[str, Dict[str, Any]]], default []
|
|
1306
|
-
Events dependency for this flow.
|
|
1307
|
-
options : Dict[str, Any], default {}
|
|
1308
|
-
Backend-specific configuration for tuning eventing behavior.
|
|
1309
|
-
"""
|
|
1310
|
-
...
|
|
1311
|
-
|
|
1312
|
-
@typing.overload
|
|
1313
|
-
def pypi_base(*, packages: typing.Dict[str, str] = {}, python: str | None = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
|
1314
|
-
"""
|
|
1315
|
-
Specifies the PyPI packages for all steps of the flow.
|
|
1316
|
-
|
|
1317
|
-
Use `@pypi_base` to set common packages required by all
|
|
1318
|
-
steps and use `@pypi` to specify step-specific overrides.
|
|
1319
|
-
|
|
1320
|
-
Parameters
|
|
1321
|
-
----------
|
|
1322
|
-
packages : Dict[str, str], default: {}
|
|
1323
|
-
Packages to use for this flow. The key is the name of the package
|
|
1324
|
-
and the value is the version to use.
|
|
1325
|
-
python : str, optional, default: None
|
|
1326
|
-
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
|
1327
|
-
that the version used will correspond to the version of the Python interpreter used to start the run.
|
|
1328
|
-
"""
|
|
1329
|
-
...
|
|
1330
|
-
|
|
1331
|
-
@typing.overload
|
|
1332
|
-
def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
|
1333
|
-
...
|
|
1334
|
-
|
|
1335
|
-
def pypi_base(f: typing.Type[~FlowSpecDerived] | None = None, *, packages: typing.Dict[str, str] = {}, python: str | None = None):
|
|
1310
|
+
def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: str | typing.List[str], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
|
1336
1311
|
"""
|
|
1337
|
-
|
|
1312
|
+
The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
|
|
1313
|
+
before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
|
|
1314
|
+
and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
|
|
1315
|
+
added as a flow decorators. Adding more than one decorator will ensure that `start` step
|
|
1316
|
+
starts only after all sensors finish.
|
|
1338
1317
|
|
|
1339
|
-
Use `@pypi_base` to set common packages required by all
|
|
1340
|
-
steps and use `@pypi` to specify step-specific overrides.
|
|
1341
1318
|
|
|
1342
1319
|
Parameters
|
|
1343
1320
|
----------
|
|
1344
|
-
|
|
1345
|
-
|
|
1346
|
-
|
|
1347
|
-
|
|
1348
|
-
|
|
1349
|
-
|
|
1321
|
+
timeout : int
|
|
1322
|
+
Time, in seconds before the task times out and fails. (Default: 3600)
|
|
1323
|
+
poke_interval : int
|
|
1324
|
+
Time in seconds that the job should wait in between each try. (Default: 60)
|
|
1325
|
+
mode : str
|
|
1326
|
+
How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
|
|
1327
|
+
exponential_backoff : bool
|
|
1328
|
+
allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
|
|
1329
|
+
pool : str
|
|
1330
|
+
the slot pool this task should run in,
|
|
1331
|
+
slot pools are a way to limit concurrency for certain tasks. (Default:None)
|
|
1332
|
+
soft_fail : bool
|
|
1333
|
+
Set to true to mark the task as SKIPPED on failure. (Default: False)
|
|
1334
|
+
name : str
|
|
1335
|
+
Name of the sensor on Airflow
|
|
1336
|
+
description : str
|
|
1337
|
+
Description of sensor in the Airflow UI
|
|
1338
|
+
bucket_key : Union[str, List[str]]
|
|
1339
|
+
The key(s) being waited on. Supports full s3:// style url or relative path from root level.
|
|
1340
|
+
When it's specified as a full s3:// url, please leave `bucket_name` as None
|
|
1341
|
+
bucket_name : str
|
|
1342
|
+
Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
|
|
1343
|
+
When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
|
|
1344
|
+
wildcard_match : bool
|
|
1345
|
+
whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
|
|
1346
|
+
aws_conn_id : str
|
|
1347
|
+
a reference to the s3 connection on Airflow. (Default: None)
|
|
1348
|
+
verify : bool
|
|
1349
|
+
Whether or not to verify SSL certificates for S3 connection. (Default: None)
|
|
1350
1350
|
"""
|
|
1351
1351
|
...
|
|
1352
1352
|
|