metaflow-stubs 2.15.12__py2.py3-none-any.whl → 2.15.14__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- metaflow-stubs/__init__.pyi +372 -372
- metaflow-stubs/cards.pyi +2 -2
- metaflow-stubs/cli.pyi +2 -2
- metaflow-stubs/cli_components/__init__.pyi +2 -2
- metaflow-stubs/cli_components/utils.pyi +2 -2
- metaflow-stubs/client/__init__.pyi +2 -2
- metaflow-stubs/client/core.pyi +6 -6
- metaflow-stubs/client/filecache.pyi +3 -3
- metaflow-stubs/events.pyi +2 -2
- metaflow-stubs/exception.pyi +2 -2
- metaflow-stubs/flowspec.pyi +4 -4
- metaflow-stubs/generated_for.txt +1 -1
- metaflow-stubs/includefile.pyi +4 -4
- metaflow-stubs/info_file.pyi +2 -2
- metaflow-stubs/metadata_provider/__init__.pyi +2 -2
- metaflow-stubs/metadata_provider/heartbeat.pyi +2 -2
- metaflow-stubs/metadata_provider/metadata.pyi +3 -3
- metaflow-stubs/metadata_provider/util.pyi +2 -2
- metaflow-stubs/metaflow_config.pyi +2 -2
- metaflow-stubs/metaflow_current.pyi +22 -22
- metaflow-stubs/metaflow_git.pyi +2 -2
- metaflow-stubs/multicore_utils.pyi +2 -2
- metaflow-stubs/parameters.pyi +4 -4
- metaflow-stubs/plugins/__init__.pyi +9 -9
- metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
- metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
- metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
- metaflow-stubs/plugins/airflow/exception.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
- metaflow-stubs/plugins/argo/__init__.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_workflows.pyi +4 -4
- metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +3 -3
- metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +4 -4
- metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +3 -3
- metaflow-stubs/plugins/aws/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
- metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
- metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +3 -3
- metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +5 -5
- metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +4 -4
- metaflow-stubs/plugins/azure/__init__.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +3 -3
- metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
- metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
- metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
- metaflow-stubs/plugins/cards/__init__.pyi +2 -2
- metaflow-stubs/plugins/cards/card_client.pyi +2 -2
- metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
- metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
- metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/basic.pyi +3 -3
- metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/components.pyi +4 -4
- metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
- metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
- metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
- metaflow-stubs/plugins/cards/exception.pyi +2 -2
- metaflow-stubs/plugins/catch_decorator.pyi +2 -2
- metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
- metaflow-stubs/plugins/datatools/local.pyi +2 -2
- metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
- metaflow-stubs/plugins/datatools/s3/s3.pyi +4 -4
- metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
- metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
- metaflow-stubs/plugins/debug_logger.pyi +2 -2
- metaflow-stubs/plugins/debug_monitor.pyi +2 -2
- metaflow-stubs/plugins/environment_decorator.pyi +2 -2
- metaflow-stubs/plugins/events_decorator.pyi +2 -2
- metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
- metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
- metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
- metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +3 -3
- metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
- metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
- metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
- metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kube_utils.pyi +3 -3
- metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +2 -2
- metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
- metaflow-stubs/plugins/project_decorator.pyi +2 -2
- metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
- metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
- metaflow-stubs/plugins/pypi/conda_environment.pyi +4 -4
- metaflow-stubs/plugins/pypi/parsers.pyi +2 -2
- metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
- metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
- metaflow-stubs/plugins/pypi/utils.pyi +2 -2
- metaflow-stubs/plugins/resources_decorator.pyi +2 -2
- metaflow-stubs/plugins/retry_decorator.pyi +2 -2
- metaflow-stubs/plugins/secrets/__init__.pyi +3 -3
- metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +2 -2
- metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
- metaflow-stubs/plugins/storage_executor.pyi +2 -2
- metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +2 -2
- metaflow-stubs/plugins/timeout_decorator.pyi +2 -2
- metaflow-stubs/plugins/uv/__init__.pyi +2 -2
- metaflow-stubs/plugins/uv/uv_environment.pyi +2 -2
- metaflow-stubs/pylint_wrapper.pyi +2 -2
- metaflow-stubs/runner/__init__.pyi +2 -2
- metaflow-stubs/runner/deployer.pyi +30 -30
- metaflow-stubs/runner/deployer_impl.pyi +2 -2
- metaflow-stubs/runner/metaflow_runner.pyi +4 -4
- metaflow-stubs/runner/nbdeploy.pyi +2 -2
- metaflow-stubs/runner/nbrun.pyi +2 -2
- metaflow-stubs/runner/subprocess_manager.pyi +2 -2
- metaflow-stubs/runner/utils.pyi +4 -4
- metaflow-stubs/system/__init__.pyi +2 -2
- metaflow-stubs/system/system_logger.pyi +2 -2
- metaflow-stubs/system/system_monitor.pyi +2 -2
- metaflow-stubs/tagging_util.pyi +2 -2
- metaflow-stubs/tuple_util.pyi +2 -2
- metaflow-stubs/user_configs/__init__.pyi +2 -2
- metaflow-stubs/user_configs/config_decorators.pyi +5 -5
- metaflow-stubs/user_configs/config_options.pyi +4 -4
- metaflow-stubs/user_configs/config_parameters.pyi +5 -5
- metaflow-stubs/version.pyi +2 -2
- {metaflow_stubs-2.15.12.dist-info → metaflow_stubs-2.15.14.dist-info}/METADATA +2 -2
- metaflow_stubs-2.15.14.dist-info/RECORD +149 -0
- {metaflow_stubs-2.15.12.dist-info → metaflow_stubs-2.15.14.dist-info}/WHEEL +1 -1
- metaflow_stubs-2.15.12.dist-info/RECORD +0 -149
- {metaflow_stubs-2.15.12.dist-info → metaflow_stubs-2.15.14.dist-info}/top_level.txt +0 -0
metaflow-stubs/__init__.pyi
CHANGED
@@ -1,15 +1,15 @@
|
|
1
1
|
######################################################################################################
|
2
2
|
# Auto-generated Metaflow stub file #
|
3
|
-
# MF version: 2.15.
|
4
|
-
# Generated on 2025-05-
|
3
|
+
# MF version: 2.15.14 #
|
4
|
+
# Generated on 2025-05-21T14:01:03.779738 #
|
5
5
|
######################################################################################################
|
6
6
|
|
7
7
|
from __future__ import annotations
|
8
8
|
|
9
9
|
import typing
|
10
10
|
if typing.TYPE_CHECKING:
|
11
|
-
import datetime
|
12
11
|
import typing
|
12
|
+
import datetime
|
13
13
|
FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
|
14
14
|
StepFlag = typing.NewType("StepFlag", bool)
|
15
15
|
|
@@ -35,17 +35,17 @@ from .user_configs.config_parameters import ConfigValue as ConfigValue
|
|
35
35
|
from .user_configs.config_parameters import config_expr as config_expr
|
36
36
|
from .user_configs.config_decorators import CustomFlowDecorator as CustomFlowDecorator
|
37
37
|
from .user_configs.config_decorators import CustomStepDecorator as CustomStepDecorator
|
38
|
-
from . import events as events
|
39
38
|
from . import tuple_util as tuple_util
|
39
|
+
from . import events as events
|
40
40
|
from . import metaflow_git as metaflow_git
|
41
41
|
from . import runner as runner
|
42
42
|
from . import plugins as plugins
|
43
43
|
from .plugins.datatools.s3.s3 import S3 as S3
|
44
44
|
from . import includefile as includefile
|
45
45
|
from .includefile import IncludeFile as IncludeFile
|
46
|
-
from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
|
47
46
|
from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
|
48
47
|
from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
|
48
|
+
from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
|
49
49
|
from . import cards as cards
|
50
50
|
from . import client as client
|
51
51
|
from .client.core import namespace as namespace
|
@@ -147,86 +147,57 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
|
|
147
147
|
...
|
148
148
|
|
149
149
|
@typing.overload
|
150
|
-
def
|
150
|
+
def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
151
151
|
"""
|
152
|
-
|
152
|
+
Specifies the number of times the task corresponding
|
153
|
+
to a step needs to be retried.
|
153
154
|
|
154
|
-
|
155
|
+
This decorator is useful for handling transient errors, such as networking issues.
|
156
|
+
If your task contains operations that can't be retried safely, e.g. database updates,
|
157
|
+
it is advisable to annotate it with `@retry(times=0)`.
|
158
|
+
|
159
|
+
This can be used in conjunction with the `@catch` decorator. The `@catch`
|
160
|
+
decorator will execute a no-op task after all retries have been exhausted,
|
161
|
+
ensuring that the flow execution can continue.
|
155
162
|
|
156
163
|
|
157
164
|
Parameters
|
158
165
|
----------
|
159
|
-
|
160
|
-
|
161
|
-
|
162
|
-
|
163
|
-
options : Dict[str, Any], default {}
|
164
|
-
Options passed to the card. The contents depend on the card type.
|
165
|
-
timeout : int, default 45
|
166
|
-
Interrupt reporting if it takes more than this many seconds.
|
166
|
+
times : int, default 3
|
167
|
+
Number of times to retry this task.
|
168
|
+
minutes_between_retries : int, default 2
|
169
|
+
Number of minutes between retries.
|
167
170
|
"""
|
168
171
|
...
|
169
172
|
|
170
173
|
@typing.overload
|
171
|
-
def
|
174
|
+
def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
172
175
|
...
|
173
176
|
|
174
177
|
@typing.overload
|
175
|
-
def
|
176
|
-
...
|
177
|
-
|
178
|
-
def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
|
179
|
-
"""
|
180
|
-
Creates a human-readable report, a Metaflow Card, after this step completes.
|
181
|
-
|
182
|
-
Note that you may add multiple `@card` decorators in a step with different parameters.
|
183
|
-
|
184
|
-
|
185
|
-
Parameters
|
186
|
-
----------
|
187
|
-
type : str, default 'default'
|
188
|
-
Card type.
|
189
|
-
id : str, optional, default None
|
190
|
-
If multiple cards are present, use this id to identify this card.
|
191
|
-
options : Dict[str, Any], default {}
|
192
|
-
Options passed to the card. The contents depend on the card type.
|
193
|
-
timeout : int, default 45
|
194
|
-
Interrupt reporting if it takes more than this many seconds.
|
195
|
-
"""
|
178
|
+
def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
196
179
|
...
|
197
180
|
|
198
|
-
|
199
|
-
def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
181
|
+
def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
|
200
182
|
"""
|
201
|
-
Specifies
|
202
|
-
|
183
|
+
Specifies the number of times the task corresponding
|
184
|
+
to a step needs to be retried.
|
203
185
|
|
186
|
+
This decorator is useful for handling transient errors, such as networking issues.
|
187
|
+
If your task contains operations that can't be retried safely, e.g. database updates,
|
188
|
+
it is advisable to annotate it with `@retry(times=0)`.
|
204
189
|
|
205
|
-
|
206
|
-
|
207
|
-
|
208
|
-
List of secret specs, defining how the secrets are to be retrieved
|
209
|
-
"""
|
210
|
-
...
|
211
|
-
|
212
|
-
@typing.overload
|
213
|
-
def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
214
|
-
...
|
215
|
-
|
216
|
-
@typing.overload
|
217
|
-
def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
218
|
-
...
|
219
|
-
|
220
|
-
def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
|
221
|
-
"""
|
222
|
-
Specifies secrets to be retrieved and injected as environment variables prior to
|
223
|
-
the execution of a step.
|
190
|
+
This can be used in conjunction with the `@catch` decorator. The `@catch`
|
191
|
+
decorator will execute a no-op task after all retries have been exhausted,
|
192
|
+
ensuring that the flow execution can continue.
|
224
193
|
|
225
194
|
|
226
195
|
Parameters
|
227
196
|
----------
|
228
|
-
|
229
|
-
|
197
|
+
times : int, default 3
|
198
|
+
Number of times to retry this task.
|
199
|
+
minutes_between_retries : int, default 2
|
200
|
+
Number of minutes between retries.
|
230
201
|
"""
|
231
202
|
...
|
232
203
|
|
@@ -523,6 +494,137 @@ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
|
|
523
494
|
"""
|
524
495
|
...
|
525
496
|
|
497
|
+
@typing.overload
|
498
|
+
def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
499
|
+
"""
|
500
|
+
Specifies the resources needed when executing this step.
|
501
|
+
|
502
|
+
Use `@resources` to specify the resource requirements
|
503
|
+
independently of the specific compute layer (`@batch`, `@kubernetes`).
|
504
|
+
|
505
|
+
You can choose the compute layer on the command line by executing e.g.
|
506
|
+
```
|
507
|
+
python myflow.py run --with batch
|
508
|
+
```
|
509
|
+
or
|
510
|
+
```
|
511
|
+
python myflow.py run --with kubernetes
|
512
|
+
```
|
513
|
+
which executes the flow on the desired system using the
|
514
|
+
requirements specified in `@resources`.
|
515
|
+
|
516
|
+
|
517
|
+
Parameters
|
518
|
+
----------
|
519
|
+
cpu : int, default 1
|
520
|
+
Number of CPUs required for this step.
|
521
|
+
gpu : int, optional, default None
|
522
|
+
Number of GPUs required for this step.
|
523
|
+
disk : int, optional, default None
|
524
|
+
Disk size (in MB) required for this step. Only applies on Kubernetes.
|
525
|
+
memory : int, default 4096
|
526
|
+
Memory size (in MB) required for this step.
|
527
|
+
shared_memory : int, optional, default None
|
528
|
+
The value for the size (in MiB) of the /dev/shm volume for this step.
|
529
|
+
This parameter maps to the `--shm-size` option in Docker.
|
530
|
+
"""
|
531
|
+
...
|
532
|
+
|
533
|
+
@typing.overload
|
534
|
+
def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
535
|
+
...
|
536
|
+
|
537
|
+
@typing.overload
|
538
|
+
def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
539
|
+
...
|
540
|
+
|
541
|
+
def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
|
542
|
+
"""
|
543
|
+
Specifies the resources needed when executing this step.
|
544
|
+
|
545
|
+
Use `@resources` to specify the resource requirements
|
546
|
+
independently of the specific compute layer (`@batch`, `@kubernetes`).
|
547
|
+
|
548
|
+
You can choose the compute layer on the command line by executing e.g.
|
549
|
+
```
|
550
|
+
python myflow.py run --with batch
|
551
|
+
```
|
552
|
+
or
|
553
|
+
```
|
554
|
+
python myflow.py run --with kubernetes
|
555
|
+
```
|
556
|
+
which executes the flow on the desired system using the
|
557
|
+
requirements specified in `@resources`.
|
558
|
+
|
559
|
+
|
560
|
+
Parameters
|
561
|
+
----------
|
562
|
+
cpu : int, default 1
|
563
|
+
Number of CPUs required for this step.
|
564
|
+
gpu : int, optional, default None
|
565
|
+
Number of GPUs required for this step.
|
566
|
+
disk : int, optional, default None
|
567
|
+
Disk size (in MB) required for this step. Only applies on Kubernetes.
|
568
|
+
memory : int, default 4096
|
569
|
+
Memory size (in MB) required for this step.
|
570
|
+
shared_memory : int, optional, default None
|
571
|
+
The value for the size (in MiB) of the /dev/shm volume for this step.
|
572
|
+
This parameter maps to the `--shm-size` option in Docker.
|
573
|
+
"""
|
574
|
+
...
|
575
|
+
|
576
|
+
@typing.overload
|
577
|
+
def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
578
|
+
"""
|
579
|
+
Decorator prototype for all step decorators. This function gets specialized
|
580
|
+
and imported for all decorators types by _import_plugin_decorators().
|
581
|
+
"""
|
582
|
+
...
|
583
|
+
|
584
|
+
@typing.overload
|
585
|
+
def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
586
|
+
...
|
587
|
+
|
588
|
+
def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
|
589
|
+
"""
|
590
|
+
Decorator prototype for all step decorators. This function gets specialized
|
591
|
+
and imported for all decorators types by _import_plugin_decorators().
|
592
|
+
"""
|
593
|
+
...
|
594
|
+
|
595
|
+
@typing.overload
|
596
|
+
def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
597
|
+
"""
|
598
|
+
Specifies environment variables to be set prior to the execution of a step.
|
599
|
+
|
600
|
+
|
601
|
+
Parameters
|
602
|
+
----------
|
603
|
+
vars : Dict[str, str], default {}
|
604
|
+
Dictionary of environment variables to set.
|
605
|
+
"""
|
606
|
+
...
|
607
|
+
|
608
|
+
@typing.overload
|
609
|
+
def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
610
|
+
...
|
611
|
+
|
612
|
+
@typing.overload
|
613
|
+
def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
614
|
+
...
|
615
|
+
|
616
|
+
def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
|
617
|
+
"""
|
618
|
+
Specifies environment variables to be set prior to the execution of a step.
|
619
|
+
|
620
|
+
|
621
|
+
Parameters
|
622
|
+
----------
|
623
|
+
vars : Dict[str, str], default {}
|
624
|
+
Dictionary of environment variables to set.
|
625
|
+
"""
|
626
|
+
...
|
627
|
+
|
526
628
|
@typing.overload
|
527
629
|
def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
528
630
|
"""
|
@@ -575,21 +677,37 @@ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
|
|
575
677
|
...
|
576
678
|
|
577
679
|
@typing.overload
|
578
|
-
def
|
680
|
+
def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
579
681
|
"""
|
580
|
-
|
581
|
-
|
682
|
+
Specifies secrets to be retrieved and injected as environment variables prior to
|
683
|
+
the execution of a step.
|
684
|
+
|
685
|
+
|
686
|
+
Parameters
|
687
|
+
----------
|
688
|
+
sources : List[Union[str, Dict[str, Any]]], default: []
|
689
|
+
List of secret specs, defining how the secrets are to be retrieved
|
582
690
|
"""
|
583
691
|
...
|
584
692
|
|
585
693
|
@typing.overload
|
586
|
-
def
|
694
|
+
def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
587
695
|
...
|
588
696
|
|
589
|
-
|
697
|
+
@typing.overload
|
698
|
+
def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
699
|
+
...
|
700
|
+
|
701
|
+
def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
|
590
702
|
"""
|
591
|
-
|
592
|
-
|
703
|
+
Specifies secrets to be retrieved and injected as environment variables prior to
|
704
|
+
the execution of a step.
|
705
|
+
|
706
|
+
|
707
|
+
Parameters
|
708
|
+
----------
|
709
|
+
sources : List[Union[str, Dict[str, Any]]], default: []
|
710
|
+
List of secret specs, defining how the secrets are to be retrieved
|
593
711
|
"""
|
594
712
|
...
|
595
713
|
|
@@ -653,169 +771,51 @@ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
|
|
653
771
|
...
|
654
772
|
|
655
773
|
@typing.overload
|
656
|
-
def
|
774
|
+
def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
657
775
|
"""
|
658
|
-
|
659
|
-
to a step needs to be retried.
|
660
|
-
|
661
|
-
This decorator is useful for handling transient errors, such as networking issues.
|
662
|
-
If your task contains operations that can't be retried safely, e.g. database updates,
|
663
|
-
it is advisable to annotate it with `@retry(times=0)`.
|
664
|
-
|
665
|
-
This can be used in conjunction with the `@catch` decorator. The `@catch`
|
666
|
-
decorator will execute a no-op task after all retries have been exhausted,
|
667
|
-
ensuring that the flow execution can continue.
|
668
|
-
|
669
|
-
|
670
|
-
Parameters
|
671
|
-
----------
|
672
|
-
times : int, default 3
|
673
|
-
Number of times to retry this task.
|
674
|
-
minutes_between_retries : int, default 2
|
675
|
-
Number of minutes between retries.
|
676
|
-
"""
|
677
|
-
...
|
678
|
-
|
679
|
-
@typing.overload
|
680
|
-
def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
681
|
-
...
|
682
|
-
|
683
|
-
@typing.overload
|
684
|
-
def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
685
|
-
...
|
686
|
-
|
687
|
-
def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
|
688
|
-
"""
|
689
|
-
Specifies the number of times the task corresponding
|
690
|
-
to a step needs to be retried.
|
691
|
-
|
692
|
-
This decorator is useful for handling transient errors, such as networking issues.
|
693
|
-
If your task contains operations that can't be retried safely, e.g. database updates,
|
694
|
-
it is advisable to annotate it with `@retry(times=0)`.
|
695
|
-
|
696
|
-
This can be used in conjunction with the `@catch` decorator. The `@catch`
|
697
|
-
decorator will execute a no-op task after all retries have been exhausted,
|
698
|
-
ensuring that the flow execution can continue.
|
699
|
-
|
700
|
-
|
701
|
-
Parameters
|
702
|
-
----------
|
703
|
-
times : int, default 3
|
704
|
-
Number of times to retry this task.
|
705
|
-
minutes_between_retries : int, default 2
|
706
|
-
Number of minutes between retries.
|
707
|
-
"""
|
708
|
-
...
|
709
|
-
|
710
|
-
@typing.overload
|
711
|
-
def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
712
|
-
"""
|
713
|
-
Specifies environment variables to be set prior to the execution of a step.
|
714
|
-
|
715
|
-
|
716
|
-
Parameters
|
717
|
-
----------
|
718
|
-
vars : Dict[str, str], default {}
|
719
|
-
Dictionary of environment variables to set.
|
720
|
-
"""
|
721
|
-
...
|
722
|
-
|
723
|
-
@typing.overload
|
724
|
-
def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
725
|
-
...
|
726
|
-
|
727
|
-
@typing.overload
|
728
|
-
def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
729
|
-
...
|
730
|
-
|
731
|
-
def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
|
732
|
-
"""
|
733
|
-
Specifies environment variables to be set prior to the execution of a step.
|
734
|
-
|
735
|
-
|
736
|
-
Parameters
|
737
|
-
----------
|
738
|
-
vars : Dict[str, str], default {}
|
739
|
-
Dictionary of environment variables to set.
|
740
|
-
"""
|
741
|
-
...
|
742
|
-
|
743
|
-
@typing.overload
|
744
|
-
def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
745
|
-
"""
|
746
|
-
Specifies the resources needed when executing this step.
|
747
|
-
|
748
|
-
Use `@resources` to specify the resource requirements
|
749
|
-
independently of the specific compute layer (`@batch`, `@kubernetes`).
|
776
|
+
Creates a human-readable report, a Metaflow Card, after this step completes.
|
750
777
|
|
751
|
-
|
752
|
-
```
|
753
|
-
python myflow.py run --with batch
|
754
|
-
```
|
755
|
-
or
|
756
|
-
```
|
757
|
-
python myflow.py run --with kubernetes
|
758
|
-
```
|
759
|
-
which executes the flow on the desired system using the
|
760
|
-
requirements specified in `@resources`.
|
778
|
+
Note that you may add multiple `@card` decorators in a step with different parameters.
|
761
779
|
|
762
780
|
|
763
781
|
Parameters
|
764
782
|
----------
|
765
|
-
|
766
|
-
|
767
|
-
|
768
|
-
|
769
|
-
|
770
|
-
|
771
|
-
|
772
|
-
|
773
|
-
shared_memory : int, optional, default None
|
774
|
-
The value for the size (in MiB) of the /dev/shm volume for this step.
|
775
|
-
This parameter maps to the `--shm-size` option in Docker.
|
783
|
+
type : str, default 'default'
|
784
|
+
Card type.
|
785
|
+
id : str, optional, default None
|
786
|
+
If multiple cards are present, use this id to identify this card.
|
787
|
+
options : Dict[str, Any], default {}
|
788
|
+
Options passed to the card. The contents depend on the card type.
|
789
|
+
timeout : int, default 45
|
790
|
+
Interrupt reporting if it takes more than this many seconds.
|
776
791
|
"""
|
777
792
|
...
|
778
793
|
|
779
794
|
@typing.overload
|
780
|
-
def
|
795
|
+
def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
781
796
|
...
|
782
797
|
|
783
798
|
@typing.overload
|
784
|
-
def
|
799
|
+
def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
785
800
|
...
|
786
801
|
|
787
|
-
def
|
802
|
+
def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
|
788
803
|
"""
|
789
|
-
|
790
|
-
|
791
|
-
Use `@resources` to specify the resource requirements
|
792
|
-
independently of the specific compute layer (`@batch`, `@kubernetes`).
|
804
|
+
Creates a human-readable report, a Metaflow Card, after this step completes.
|
793
805
|
|
794
|
-
|
795
|
-
```
|
796
|
-
python myflow.py run --with batch
|
797
|
-
```
|
798
|
-
or
|
799
|
-
```
|
800
|
-
python myflow.py run --with kubernetes
|
801
|
-
```
|
802
|
-
which executes the flow on the desired system using the
|
803
|
-
requirements specified in `@resources`.
|
806
|
+
Note that you may add multiple `@card` decorators in a step with different parameters.
|
804
807
|
|
805
808
|
|
806
809
|
Parameters
|
807
810
|
----------
|
808
|
-
|
809
|
-
|
810
|
-
|
811
|
-
|
812
|
-
|
813
|
-
|
814
|
-
|
815
|
-
|
816
|
-
shared_memory : int, optional, default None
|
817
|
-
The value for the size (in MiB) of the /dev/shm volume for this step.
|
818
|
-
This parameter maps to the `--shm-size` option in Docker.
|
811
|
+
type : str, default 'default'
|
812
|
+
Card type.
|
813
|
+
id : str, optional, default None
|
814
|
+
If multiple cards are present, use this id to identify this card.
|
815
|
+
options : Dict[str, Any], default {}
|
816
|
+
Options passed to the card. The contents depend on the card type.
|
817
|
+
timeout : int, default 45
|
818
|
+
Interrupt reporting if it takes more than this many seconds.
|
819
819
|
"""
|
820
820
|
...
|
821
821
|
|
@@ -870,92 +870,6 @@ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
|
|
870
870
|
"""
|
871
871
|
...
|
872
872
|
|
873
|
-
def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
874
|
-
"""
|
875
|
-
The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
|
876
|
-
before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
|
877
|
-
and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
|
878
|
-
added as a flow decorators. Adding more than one decorator will ensure that `start` step
|
879
|
-
starts only after all sensors finish.
|
880
|
-
|
881
|
-
|
882
|
-
Parameters
|
883
|
-
----------
|
884
|
-
timeout : int
|
885
|
-
Time, in seconds before the task times out and fails. (Default: 3600)
|
886
|
-
poke_interval : int
|
887
|
-
Time in seconds that the job should wait in between each try. (Default: 60)
|
888
|
-
mode : str
|
889
|
-
How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
|
890
|
-
exponential_backoff : bool
|
891
|
-
allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
|
892
|
-
pool : str
|
893
|
-
the slot pool this task should run in,
|
894
|
-
slot pools are a way to limit concurrency for certain tasks. (Default:None)
|
895
|
-
soft_fail : bool
|
896
|
-
Set to true to mark the task as SKIPPED on failure. (Default: False)
|
897
|
-
name : str
|
898
|
-
Name of the sensor on Airflow
|
899
|
-
description : str
|
900
|
-
Description of sensor in the Airflow UI
|
901
|
-
bucket_key : Union[str, List[str]]
|
902
|
-
The key(s) being waited on. Supports full s3:// style url or relative path from root level.
|
903
|
-
When it's specified as a full s3:// url, please leave `bucket_name` as None
|
904
|
-
bucket_name : str
|
905
|
-
Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
|
906
|
-
When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
|
907
|
-
wildcard_match : bool
|
908
|
-
whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
|
909
|
-
aws_conn_id : str
|
910
|
-
a reference to the s3 connection on Airflow. (Default: None)
|
911
|
-
verify : bool
|
912
|
-
Whether or not to verify SSL certificates for S3 connection. (Default: None)
|
913
|
-
"""
|
914
|
-
...
|
915
|
-
|
916
|
-
def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
917
|
-
"""
|
918
|
-
The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
|
919
|
-
This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
|
920
|
-
|
921
|
-
|
922
|
-
Parameters
|
923
|
-
----------
|
924
|
-
timeout : int
|
925
|
-
Time, in seconds before the task times out and fails. (Default: 3600)
|
926
|
-
poke_interval : int
|
927
|
-
Time in seconds that the job should wait in between each try. (Default: 60)
|
928
|
-
mode : str
|
929
|
-
How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
|
930
|
-
exponential_backoff : bool
|
931
|
-
allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
|
932
|
-
pool : str
|
933
|
-
the slot pool this task should run in,
|
934
|
-
slot pools are a way to limit concurrency for certain tasks. (Default:None)
|
935
|
-
soft_fail : bool
|
936
|
-
Set to true to mark the task as SKIPPED on failure. (Default: False)
|
937
|
-
name : str
|
938
|
-
Name of the sensor on Airflow
|
939
|
-
description : str
|
940
|
-
Description of sensor in the Airflow UI
|
941
|
-
external_dag_id : str
|
942
|
-
The dag_id that contains the task you want to wait for.
|
943
|
-
external_task_ids : List[str]
|
944
|
-
The list of task_ids that you want to wait for.
|
945
|
-
If None (default value) the sensor waits for the DAG. (Default: None)
|
946
|
-
allowed_states : List[str]
|
947
|
-
Iterable of allowed states, (Default: ['success'])
|
948
|
-
failed_states : List[str]
|
949
|
-
Iterable of failed or dis-allowed states. (Default: None)
|
950
|
-
execution_delta : datetime.timedelta
|
951
|
-
time difference with the previous execution to look at,
|
952
|
-
the default is the same logical date as the current task or DAG. (Default: None)
|
953
|
-
check_existence: bool
|
954
|
-
Set to True to check if the external task exists or check if
|
955
|
-
the DAG to wait for exists. (Default: True)
|
956
|
-
"""
|
957
|
-
...
|
958
|
-
|
959
873
|
@typing.overload
|
960
874
|
def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
961
875
|
"""
|
@@ -1057,54 +971,46 @@ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *
|
|
1057
971
|
"""
|
1058
972
|
...
|
1059
973
|
|
1060
|
-
|
1061
|
-
def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1062
|
-
"""
|
1063
|
-
Specifies the Conda environment for all steps of the flow.
|
1064
|
-
|
1065
|
-
Use `@conda_base` to set common libraries required by all
|
1066
|
-
steps and use `@conda` to specify step-specific additions.
|
1067
|
-
|
1068
|
-
|
1069
|
-
Parameters
|
1070
|
-
----------
|
1071
|
-
packages : Dict[str, str], default {}
|
1072
|
-
Packages to use for this flow. The key is the name of the package
|
1073
|
-
and the value is the version to use.
|
1074
|
-
libraries : Dict[str, str], default {}
|
1075
|
-
Supported for backward compatibility. When used with packages, packages will take precedence.
|
1076
|
-
python : str, optional, default None
|
1077
|
-
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
1078
|
-
that the version used will correspond to the version of the Python interpreter used to start the run.
|
1079
|
-
disabled : bool, default False
|
1080
|
-
If set to True, disables Conda.
|
1081
|
-
"""
|
1082
|
-
...
|
1083
|
-
|
1084
|
-
@typing.overload
|
1085
|
-
def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
1086
|
-
...
|
1087
|
-
|
1088
|
-
def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
|
974
|
+
def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1089
975
|
"""
|
1090
|
-
|
1091
|
-
|
1092
|
-
Use `@conda_base` to set common libraries required by all
|
1093
|
-
steps and use `@conda` to specify step-specific additions.
|
976
|
+
The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
|
977
|
+
This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
|
1094
978
|
|
1095
979
|
|
1096
980
|
Parameters
|
1097
981
|
----------
|
1098
|
-
|
1099
|
-
|
1100
|
-
|
1101
|
-
|
1102
|
-
|
1103
|
-
|
1104
|
-
|
1105
|
-
|
1106
|
-
|
1107
|
-
|
982
|
+
timeout : int
|
983
|
+
Time, in seconds before the task times out and fails. (Default: 3600)
|
984
|
+
poke_interval : int
|
985
|
+
Time in seconds that the job should wait in between each try. (Default: 60)
|
986
|
+
mode : str
|
987
|
+
How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
|
988
|
+
exponential_backoff : bool
|
989
|
+
allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
|
990
|
+
pool : str
|
991
|
+
the slot pool this task should run in,
|
992
|
+
slot pools are a way to limit concurrency for certain tasks. (Default:None)
|
993
|
+
soft_fail : bool
|
994
|
+
Set to true to mark the task as SKIPPED on failure. (Default: False)
|
995
|
+
name : str
|
996
|
+
Name of the sensor on Airflow
|
997
|
+
description : str
|
998
|
+
Description of sensor in the Airflow UI
|
999
|
+
external_dag_id : str
|
1000
|
+
The dag_id that contains the task you want to wait for.
|
1001
|
+
external_task_ids : List[str]
|
1002
|
+
The list of task_ids that you want to wait for.
|
1003
|
+
If None (default value) the sensor waits for the DAG. (Default: None)
|
1004
|
+
allowed_states : List[str]
|
1005
|
+
Iterable of allowed states, (Default: ['success'])
|
1006
|
+
failed_states : List[str]
|
1007
|
+
Iterable of failed or dis-allowed states. (Default: None)
|
1008
|
+
execution_delta : datetime.timedelta
|
1009
|
+
time difference with the previous execution to look at,
|
1010
|
+
the default is the same logical date as the current task or DAG. (Default: None)
|
1011
|
+
check_existence: bool
|
1012
|
+
Set to True to check if the external task exists or check if
|
1013
|
+
the DAG to wait for exists. (Default: True)
|
1108
1014
|
"""
|
1109
1015
|
...
|
1110
1016
|
|
@@ -1149,6 +1055,49 @@ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packag
|
|
1149
1055
|
"""
|
1150
1056
|
...
|
1151
1057
|
|
1058
|
+
def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1059
|
+
"""
|
1060
|
+
The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
|
1061
|
+
before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
|
1062
|
+
and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
|
1063
|
+
added as a flow decorators. Adding more than one decorator will ensure that `start` step
|
1064
|
+
starts only after all sensors finish.
|
1065
|
+
|
1066
|
+
|
1067
|
+
Parameters
|
1068
|
+
----------
|
1069
|
+
timeout : int
|
1070
|
+
Time, in seconds before the task times out and fails. (Default: 3600)
|
1071
|
+
poke_interval : int
|
1072
|
+
Time in seconds that the job should wait in between each try. (Default: 60)
|
1073
|
+
mode : str
|
1074
|
+
How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
|
1075
|
+
exponential_backoff : bool
|
1076
|
+
allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
|
1077
|
+
pool : str
|
1078
|
+
the slot pool this task should run in,
|
1079
|
+
slot pools are a way to limit concurrency for certain tasks. (Default:None)
|
1080
|
+
soft_fail : bool
|
1081
|
+
Set to true to mark the task as SKIPPED on failure. (Default: False)
|
1082
|
+
name : str
|
1083
|
+
Name of the sensor on Airflow
|
1084
|
+
description : str
|
1085
|
+
Description of sensor in the Airflow UI
|
1086
|
+
bucket_key : Union[str, List[str]]
|
1087
|
+
The key(s) being waited on. Supports full s3:// style url or relative path from root level.
|
1088
|
+
When it's specified as a full s3:// url, please leave `bucket_name` as None
|
1089
|
+
bucket_name : str
|
1090
|
+
Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
|
1091
|
+
When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
|
1092
|
+
wildcard_match : bool
|
1093
|
+
whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
|
1094
|
+
aws_conn_id : str
|
1095
|
+
a reference to the s3 connection on Airflow. (Default: None)
|
1096
|
+
verify : bool
|
1097
|
+
Whether or not to verify SSL certificates for S3 connection. (Default: None)
|
1098
|
+
"""
|
1099
|
+
...
|
1100
|
+
|
1152
1101
|
@typing.overload
|
1153
1102
|
def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1154
1103
|
"""
|
@@ -1242,6 +1191,57 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
|
|
1242
1191
|
"""
|
1243
1192
|
...
|
1244
1193
|
|
1194
|
+
@typing.overload
|
1195
|
+
def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1196
|
+
"""
|
1197
|
+
Specifies the times when the flow should be run when running on a
|
1198
|
+
production scheduler.
|
1199
|
+
|
1200
|
+
|
1201
|
+
Parameters
|
1202
|
+
----------
|
1203
|
+
hourly : bool, default False
|
1204
|
+
Run the workflow hourly.
|
1205
|
+
daily : bool, default True
|
1206
|
+
Run the workflow daily.
|
1207
|
+
weekly : bool, default False
|
1208
|
+
Run the workflow weekly.
|
1209
|
+
cron : str, optional, default None
|
1210
|
+
Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
|
1211
|
+
specified by this expression.
|
1212
|
+
timezone : str, optional, default None
|
1213
|
+
Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
|
1214
|
+
which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
|
1215
|
+
"""
|
1216
|
+
...
|
1217
|
+
|
1218
|
+
@typing.overload
|
1219
|
+
def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
1220
|
+
...
|
1221
|
+
|
1222
|
+
def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
|
1223
|
+
"""
|
1224
|
+
Specifies the times when the flow should be run when running on a
|
1225
|
+
production scheduler.
|
1226
|
+
|
1227
|
+
|
1228
|
+
Parameters
|
1229
|
+
----------
|
1230
|
+
hourly : bool, default False
|
1231
|
+
Run the workflow hourly.
|
1232
|
+
daily : bool, default True
|
1233
|
+
Run the workflow daily.
|
1234
|
+
weekly : bool, default False
|
1235
|
+
Run the workflow weekly.
|
1236
|
+
cron : str, optional, default None
|
1237
|
+
Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
|
1238
|
+
specified by this expression.
|
1239
|
+
timezone : str, optional, default None
|
1240
|
+
Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
|
1241
|
+
which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
|
1242
|
+
"""
|
1243
|
+
...
|
1244
|
+
|
1245
1245
|
def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1246
1246
|
"""
|
1247
1247
|
Specifies what flows belong to the same project.
|
@@ -1278,53 +1278,53 @@ def project(*, name: str, branch: typing.Optional[str] = None, production: bool
|
|
1278
1278
|
...
|
1279
1279
|
|
1280
1280
|
@typing.overload
|
1281
|
-
def
|
1281
|
+
def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1282
1282
|
"""
|
1283
|
-
Specifies the
|
1284
|
-
|
1283
|
+
Specifies the Conda environment for all steps of the flow.
|
1284
|
+
|
1285
|
+
Use `@conda_base` to set common libraries required by all
|
1286
|
+
steps and use `@conda` to specify step-specific additions.
|
1285
1287
|
|
1286
1288
|
|
1287
1289
|
Parameters
|
1288
1290
|
----------
|
1289
|
-
|
1290
|
-
|
1291
|
-
|
1292
|
-
|
1293
|
-
|
1294
|
-
|
1295
|
-
|
1296
|
-
|
1297
|
-
|
1298
|
-
|
1299
|
-
Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
|
1300
|
-
which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
|
1291
|
+
packages : Dict[str, str], default {}
|
1292
|
+
Packages to use for this flow. The key is the name of the package
|
1293
|
+
and the value is the version to use.
|
1294
|
+
libraries : Dict[str, str], default {}
|
1295
|
+
Supported for backward compatibility. When used with packages, packages will take precedence.
|
1296
|
+
python : str, optional, default None
|
1297
|
+
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
1298
|
+
that the version used will correspond to the version of the Python interpreter used to start the run.
|
1299
|
+
disabled : bool, default False
|
1300
|
+
If set to True, disables Conda.
|
1301
1301
|
"""
|
1302
1302
|
...
|
1303
1303
|
|
1304
1304
|
@typing.overload
|
1305
|
-
def
|
1305
|
+
def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
1306
1306
|
...
|
1307
1307
|
|
1308
|
-
def
|
1308
|
+
def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
|
1309
1309
|
"""
|
1310
|
-
Specifies the
|
1311
|
-
|
1310
|
+
Specifies the Conda environment for all steps of the flow.
|
1311
|
+
|
1312
|
+
Use `@conda_base` to set common libraries required by all
|
1313
|
+
steps and use `@conda` to specify step-specific additions.
|
1312
1314
|
|
1313
1315
|
|
1314
1316
|
Parameters
|
1315
1317
|
----------
|
1316
|
-
|
1317
|
-
|
1318
|
-
|
1319
|
-
|
1320
|
-
|
1321
|
-
|
1322
|
-
|
1323
|
-
|
1324
|
-
|
1325
|
-
|
1326
|
-
Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
|
1327
|
-
which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
|
1318
|
+
packages : Dict[str, str], default {}
|
1319
|
+
Packages to use for this flow. The key is the name of the package
|
1320
|
+
and the value is the version to use.
|
1321
|
+
libraries : Dict[str, str], default {}
|
1322
|
+
Supported for backward compatibility. When used with packages, packages will take precedence.
|
1323
|
+
python : str, optional, default None
|
1324
|
+
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
1325
|
+
that the version used will correspond to the version of the Python interpreter used to start the run.
|
1326
|
+
disabled : bool, default False
|
1327
|
+
If set to True, disables Conda.
|
1328
1328
|
"""
|
1329
1329
|
...
|
1330
1330
|
|