ob-metaflow-stubs 5.7.1__py2.py3-none-any.whl → 5.8__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- metaflow-stubs/__init__.pyi +447 -447
- metaflow-stubs/cards.pyi +6 -6
- metaflow-stubs/cli.pyi +3 -3
- metaflow-stubs/client/__init__.pyi +4 -4
- metaflow-stubs/client/core.pyi +7 -7
- metaflow-stubs/client/filecache.pyi +2 -2
- metaflow-stubs/clone_util.pyi +2 -2
- metaflow-stubs/events.pyi +2 -2
- metaflow-stubs/exception.pyi +2 -2
- metaflow-stubs/flowspec.pyi +5 -5
- metaflow-stubs/generated_for.txt +1 -1
- metaflow-stubs/includefile.pyi +4 -4
- metaflow-stubs/metadata/metadata.pyi +2 -2
- metaflow-stubs/metadata/util.pyi +2 -2
- metaflow-stubs/metaflow_config.pyi +2 -2
- metaflow-stubs/metaflow_current.pyi +24 -24
- metaflow-stubs/mflog/mflog.pyi +2 -2
- metaflow-stubs/multicore_utils.pyi +2 -2
- metaflow-stubs/parameters.pyi +3 -3
- metaflow-stubs/plugins/__init__.pyi +2 -2
- metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
- metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
- metaflow-stubs/plugins/airflow/exception.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +3 -3
- metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
- metaflow-stubs/plugins/argo/__init__.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_workflows.pyi +4 -4
- metaflow-stubs/plugins/argo/argo_workflows_cli.pyi +6 -6
- metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +6 -6
- metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +3 -3
- metaflow-stubs/plugins/aws/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
- metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +3 -3
- metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +3 -3
- metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/production_token.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +3 -3
- metaflow-stubs/plugins/azure/__init__.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +3 -3
- metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
- metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
- metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
- metaflow-stubs/plugins/cards/__init__.pyi +2 -2
- metaflow-stubs/plugins/cards/card_cli.pyi +4 -4
- metaflow-stubs/plugins/cards/card_client.pyi +3 -3
- metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
- metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
- metaflow-stubs/plugins/cards/card_decorator.pyi +3 -3
- metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/basic.pyi +3 -3
- metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/components.pyi +3 -3
- metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
- metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
- metaflow-stubs/plugins/cards/component_serializer.pyi +3 -3
- metaflow-stubs/plugins/cards/exception.pyi +2 -2
- metaflow-stubs/plugins/catch_decorator.pyi +4 -4
- metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
- metaflow-stubs/plugins/datatools/local.pyi +2 -2
- metaflow-stubs/plugins/datatools/s3/__init__.pyi +3 -3
- metaflow-stubs/plugins/datatools/s3/s3.pyi +6 -6
- metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
- metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
- metaflow-stubs/plugins/debug_logger.pyi +2 -2
- metaflow-stubs/plugins/debug_monitor.pyi +2 -2
- metaflow-stubs/plugins/environment_decorator.pyi +2 -2
- metaflow-stubs/plugins/events_decorator.pyi +3 -3
- metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
- metaflow-stubs/plugins/frameworks/pytorch.pyi +4 -4
- metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
- metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +3 -3
- metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
- metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
- metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
- metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes.pyi +3 -3
- metaflow-stubs/plugins/kubernetes/kubernetes_cli.pyi +3 -3
- metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +3 -3
- metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
- metaflow-stubs/plugins/logs_cli.pyi +3 -3
- metaflow-stubs/plugins/package_cli.pyi +2 -2
- metaflow-stubs/plugins/parallel_decorator.pyi +3 -3
- metaflow-stubs/plugins/perimeters.pyi +2 -2
- metaflow-stubs/plugins/project_decorator.pyi +3 -3
- metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
- metaflow-stubs/plugins/pypi/conda_decorator.pyi +6 -2
- metaflow-stubs/plugins/pypi/conda_environment.pyi +4 -4
- metaflow-stubs/plugins/pypi/pypi_decorator.pyi +4 -2
- metaflow-stubs/plugins/pypi/pypi_environment.pyi +3 -3
- metaflow-stubs/plugins/pypi/utils.pyi +2 -2
- metaflow-stubs/plugins/resources_decorator.pyi +2 -2
- metaflow-stubs/plugins/retry_decorator.pyi +2 -2
- metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
- metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +3 -3
- metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
- metaflow-stubs/plugins/storage_executor.pyi +2 -2
- metaflow-stubs/plugins/tag_cli.pyi +4 -4
- metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +2 -2
- metaflow-stubs/plugins/timeout_decorator.pyi +3 -3
- metaflow-stubs/procpoll.pyi +2 -2
- metaflow-stubs/profilers/__init__.pyi +2 -2
- metaflow-stubs/pylint_wrapper.pyi +2 -2
- metaflow-stubs/runner/__init__.pyi +2 -2
- metaflow-stubs/runner/deployer.pyi +3 -3
- metaflow-stubs/runner/metaflow_runner.pyi +5 -5
- metaflow-stubs/runner/nbdeploy.pyi +2 -2
- metaflow-stubs/runner/nbrun.pyi +2 -2
- metaflow-stubs/runner/subprocess_manager.pyi +2 -2
- metaflow-stubs/runner/utils.pyi +2 -2
- metaflow-stubs/system/__init__.pyi +4 -4
- metaflow-stubs/system/system_logger.pyi +3 -3
- metaflow-stubs/system/system_monitor.pyi +3 -3
- metaflow-stubs/tagging_util.pyi +2 -2
- metaflow-stubs/tuple_util.pyi +2 -2
- {ob_metaflow_stubs-5.7.1.dist-info → ob_metaflow_stubs-5.8.dist-info}/METADATA +1 -1
- ob_metaflow_stubs-5.8.dist-info/RECORD +138 -0
- ob_metaflow_stubs-5.7.1.dist-info/RECORD +0 -138
- {ob_metaflow_stubs-5.7.1.dist-info → ob_metaflow_stubs-5.8.dist-info}/WHEEL +0 -0
- {ob_metaflow_stubs-5.7.1.dist-info → ob_metaflow_stubs-5.8.dist-info}/top_level.txt +0 -0
metaflow-stubs/__init__.pyi
CHANGED
@@ -1,23 +1,23 @@
|
|
1
1
|
##################################################################################
|
2
2
|
# Auto-generated Metaflow stub file #
|
3
|
-
# MF version: 2.12.
|
4
|
-
# Generated on 2024-
|
3
|
+
# MF version: 2.12.19.1+ob(v1) #
|
4
|
+
# Generated on 2024-09-04T22:56:44.982024 #
|
5
5
|
##################################################################################
|
6
6
|
|
7
7
|
from __future__ import annotations
|
8
8
|
|
9
9
|
import typing
|
10
10
|
if typing.TYPE_CHECKING:
|
11
|
-
import metaflow.
|
12
|
-
import metaflow.
|
13
|
-
import metaflow.metaflow_current
|
11
|
+
import metaflow.client.core
|
12
|
+
import metaflow.parameters
|
14
13
|
import metaflow.events
|
14
|
+
import metaflow._vendor.click.types
|
15
|
+
import metaflow.runner.metaflow_runner
|
15
16
|
import datetime
|
16
|
-
import metaflow.parameters
|
17
17
|
import metaflow.flowspec
|
18
|
-
import metaflow.
|
18
|
+
import metaflow.datastore.inputs
|
19
19
|
import typing
|
20
|
-
import metaflow.
|
20
|
+
import metaflow.metaflow_current
|
21
21
|
FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
|
22
22
|
StepFlag = typing.NewType("StepFlag", bool)
|
23
23
|
|
@@ -437,87 +437,6 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
|
|
437
437
|
"""
|
438
438
|
...
|
439
439
|
|
440
|
-
def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = "KUBERNETES_IMAGE_PULL_POLICY", service_account: str = "METAFLOW_KUBERNETES_SERVICE_ACCOUNT", secrets: typing.Optional[typing.List[str]] = None, namespace: str = "METAFLOW_KUBERNETES_NAMESPACE", gpu: typing.Optional[int] = None, gpu_vendor: str = "KUBERNETES_GPU_VENDOR", tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = "/metaflow_temp", persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
441
|
-
"""
|
442
|
-
Specifies that this step should execute on Kubernetes.
|
443
|
-
|
444
|
-
Parameters
|
445
|
-
----------
|
446
|
-
cpu : int, default 1
|
447
|
-
Number of CPUs required for this step. If `@resources` is
|
448
|
-
also present, the maximum value from all decorators is used.
|
449
|
-
memory : int, default 4096
|
450
|
-
Memory size (in MB) required for this step. If
|
451
|
-
`@resources` is also present, the maximum value from all decorators is
|
452
|
-
used.
|
453
|
-
disk : int, default 10240
|
454
|
-
Disk size (in MB) required for this step. If
|
455
|
-
`@resources` is also present, the maximum value from all decorators is
|
456
|
-
used.
|
457
|
-
image : str, optional, default None
|
458
|
-
Docker image to use when launching on Kubernetes. If not specified, and
|
459
|
-
METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
|
460
|
-
not, a default Docker image mapping to the current version of Python is used.
|
461
|
-
image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
|
462
|
-
If given, the imagePullPolicy to be applied to the Docker image of the step.
|
463
|
-
service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
|
464
|
-
Kubernetes service account to use when launching pod in Kubernetes.
|
465
|
-
secrets : List[str], optional, default None
|
466
|
-
Kubernetes secrets to use when launching pod in Kubernetes. These
|
467
|
-
secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
|
468
|
-
in Metaflow configuration.
|
469
|
-
namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
|
470
|
-
Kubernetes namespace to use when launching pod in Kubernetes.
|
471
|
-
gpu : int, optional, default None
|
472
|
-
Number of GPUs required for this step. A value of zero implies that
|
473
|
-
the scheduled node should not have GPUs.
|
474
|
-
gpu_vendor : str, default KUBERNETES_GPU_VENDOR
|
475
|
-
The vendor of the GPUs to be used for this step.
|
476
|
-
tolerations : List[str], default []
|
477
|
-
The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
|
478
|
-
Kubernetes tolerations to use when launching pod in Kubernetes.
|
479
|
-
use_tmpfs : bool, default False
|
480
|
-
This enables an explicit tmpfs mount for this step.
|
481
|
-
tmpfs_tempdir : bool, default True
|
482
|
-
sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
|
483
|
-
tmpfs_size : int, optional, default: None
|
484
|
-
The value for the size (in MiB) of the tmpfs mount for this step.
|
485
|
-
This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
|
486
|
-
memory allocated for this step.
|
487
|
-
tmpfs_path : str, optional, default /metaflow_temp
|
488
|
-
Path to tmpfs mount for this step.
|
489
|
-
persistent_volume_claims : Dict[str, str], optional, default None
|
490
|
-
A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
|
491
|
-
volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
|
492
|
-
shared_memory: int, optional
|
493
|
-
Shared memory size (in MiB) required for this step
|
494
|
-
port: int, optional
|
495
|
-
Port number to specify in the Kubernetes job object
|
496
|
-
compute_pool : str, optional, default None
|
497
|
-
Compute pool to be used for for this step.
|
498
|
-
If not specified, any accessible compute pool within the perimeter is used.
|
499
|
-
"""
|
500
|
-
...
|
501
|
-
|
502
|
-
@typing.overload
|
503
|
-
def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
504
|
-
"""
|
505
|
-
Decorator prototype for all step decorators. This function gets specialized
|
506
|
-
and imported for all decorators types by _import_plugin_decorators().
|
507
|
-
"""
|
508
|
-
...
|
509
|
-
|
510
|
-
@typing.overload
|
511
|
-
def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
512
|
-
...
|
513
|
-
|
514
|
-
def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
|
515
|
-
"""
|
516
|
-
Decorator prototype for all step decorators. This function gets specialized
|
517
|
-
and imported for all decorators types by _import_plugin_decorators().
|
518
|
-
"""
|
519
|
-
...
|
520
|
-
|
521
440
|
@typing.overload
|
522
441
|
def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
523
442
|
"""
|
@@ -592,83 +511,6 @@ def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepF
|
|
592
511
|
"""
|
593
512
|
...
|
594
513
|
|
595
|
-
@typing.overload
|
596
|
-
def resources(*, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
597
|
-
"""
|
598
|
-
Specifies the resources needed when executing this step.
|
599
|
-
|
600
|
-
Use `@resources` to specify the resource requirements
|
601
|
-
independently of the specific compute layer (`@batch`, `@kubernetes`).
|
602
|
-
|
603
|
-
You can choose the compute layer on the command line by executing e.g.
|
604
|
-
```
|
605
|
-
python myflow.py run --with batch
|
606
|
-
```
|
607
|
-
or
|
608
|
-
```
|
609
|
-
python myflow.py run --with kubernetes
|
610
|
-
```
|
611
|
-
which executes the flow on the desired system using the
|
612
|
-
requirements specified in `@resources`.
|
613
|
-
|
614
|
-
Parameters
|
615
|
-
----------
|
616
|
-
cpu : int, default 1
|
617
|
-
Number of CPUs required for this step.
|
618
|
-
gpu : int, default 0
|
619
|
-
Number of GPUs required for this step.
|
620
|
-
disk : int, optional, default None
|
621
|
-
Disk size (in MB) required for this step. Only applies on Kubernetes.
|
622
|
-
memory : int, default 4096
|
623
|
-
Memory size (in MB) required for this step.
|
624
|
-
shared_memory : int, optional, default None
|
625
|
-
The value for the size (in MiB) of the /dev/shm volume for this step.
|
626
|
-
This parameter maps to the `--shm-size` option in Docker.
|
627
|
-
"""
|
628
|
-
...
|
629
|
-
|
630
|
-
@typing.overload
|
631
|
-
def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
632
|
-
...
|
633
|
-
|
634
|
-
@typing.overload
|
635
|
-
def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
636
|
-
...
|
637
|
-
|
638
|
-
def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
|
639
|
-
"""
|
640
|
-
Specifies the resources needed when executing this step.
|
641
|
-
|
642
|
-
Use `@resources` to specify the resource requirements
|
643
|
-
independently of the specific compute layer (`@batch`, `@kubernetes`).
|
644
|
-
|
645
|
-
You can choose the compute layer on the command line by executing e.g.
|
646
|
-
```
|
647
|
-
python myflow.py run --with batch
|
648
|
-
```
|
649
|
-
or
|
650
|
-
```
|
651
|
-
python myflow.py run --with kubernetes
|
652
|
-
```
|
653
|
-
which executes the flow on the desired system using the
|
654
|
-
requirements specified in `@resources`.
|
655
|
-
|
656
|
-
Parameters
|
657
|
-
----------
|
658
|
-
cpu : int, default 1
|
659
|
-
Number of CPUs required for this step.
|
660
|
-
gpu : int, default 0
|
661
|
-
Number of GPUs required for this step.
|
662
|
-
disk : int, optional, default None
|
663
|
-
Disk size (in MB) required for this step. Only applies on Kubernetes.
|
664
|
-
memory : int, default 4096
|
665
|
-
Memory size (in MB) required for this step.
|
666
|
-
shared_memory : int, optional, default None
|
667
|
-
The value for the size (in MiB) of the /dev/shm volume for this step.
|
668
|
-
This parameter maps to the `--shm-size` option in Docker.
|
669
|
-
"""
|
670
|
-
...
|
671
|
-
|
672
514
|
@typing.overload
|
673
515
|
def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
674
516
|
"""
|
@@ -727,55 +569,33 @@ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
|
|
727
569
|
...
|
728
570
|
|
729
571
|
@typing.overload
|
730
|
-
def
|
572
|
+
def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
731
573
|
"""
|
732
|
-
Specifies
|
733
|
-
to a step needs to be retried.
|
734
|
-
|
735
|
-
This decorator is useful for handling transient errors, such as networking issues.
|
736
|
-
If your task contains operations that can't be retried safely, e.g. database updates,
|
737
|
-
it is advisable to annotate it with `@retry(times=0)`.
|
738
|
-
|
739
|
-
This can be used in conjunction with the `@catch` decorator. The `@catch`
|
740
|
-
decorator will execute a no-op task after all retries have been exhausted,
|
741
|
-
ensuring that the flow execution can continue.
|
574
|
+
Specifies environment variables to be set prior to the execution of a step.
|
742
575
|
|
743
576
|
Parameters
|
744
577
|
----------
|
745
|
-
|
746
|
-
|
747
|
-
minutes_between_retries : int, default 2
|
748
|
-
Number of minutes between retries.
|
578
|
+
vars : Dict[str, str], default {}
|
579
|
+
Dictionary of environment variables to set.
|
749
580
|
"""
|
750
581
|
...
|
751
582
|
|
752
583
|
@typing.overload
|
753
|
-
def
|
584
|
+
def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
754
585
|
...
|
755
586
|
|
756
587
|
@typing.overload
|
757
|
-
def
|
588
|
+
def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
758
589
|
...
|
759
590
|
|
760
|
-
def
|
591
|
+
def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
|
761
592
|
"""
|
762
|
-
Specifies
|
763
|
-
to a step needs to be retried.
|
764
|
-
|
765
|
-
This decorator is useful for handling transient errors, such as networking issues.
|
766
|
-
If your task contains operations that can't be retried safely, e.g. database updates,
|
767
|
-
it is advisable to annotate it with `@retry(times=0)`.
|
768
|
-
|
769
|
-
This can be used in conjunction with the `@catch` decorator. The `@catch`
|
770
|
-
decorator will execute a no-op task after all retries have been exhausted,
|
771
|
-
ensuring that the flow execution can continue.
|
593
|
+
Specifies environment variables to be set prior to the execution of a step.
|
772
594
|
|
773
595
|
Parameters
|
774
596
|
----------
|
775
|
-
|
776
|
-
|
777
|
-
minutes_between_retries : int, default 2
|
778
|
-
Number of minutes between retries.
|
597
|
+
vars : Dict[str, str], default {}
|
598
|
+
Dictionary of environment variables to set.
|
779
599
|
"""
|
780
600
|
...
|
781
601
|
|
@@ -829,66 +649,117 @@ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
|
|
829
649
|
...
|
830
650
|
|
831
651
|
@typing.overload
|
832
|
-
def
|
652
|
+
def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
833
653
|
"""
|
834
|
-
Specifies
|
835
|
-
|
654
|
+
Specifies the number of times the task corresponding
|
655
|
+
to a step needs to be retried.
|
656
|
+
|
657
|
+
This decorator is useful for handling transient errors, such as networking issues.
|
658
|
+
If your task contains operations that can't be retried safely, e.g. database updates,
|
659
|
+
it is advisable to annotate it with `@retry(times=0)`.
|
660
|
+
|
661
|
+
This can be used in conjunction with the `@catch` decorator. The `@catch`
|
662
|
+
decorator will execute a no-op task after all retries have been exhausted,
|
663
|
+
ensuring that the flow execution can continue.
|
836
664
|
|
837
665
|
Parameters
|
838
666
|
----------
|
839
|
-
|
840
|
-
|
667
|
+
times : int, default 3
|
668
|
+
Number of times to retry this task.
|
669
|
+
minutes_between_retries : int, default 2
|
670
|
+
Number of minutes between retries.
|
841
671
|
"""
|
842
672
|
...
|
843
673
|
|
844
674
|
@typing.overload
|
845
|
-
def
|
675
|
+
def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
846
676
|
...
|
847
677
|
|
848
678
|
@typing.overload
|
849
|
-
def
|
679
|
+
def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
850
680
|
...
|
851
681
|
|
852
|
-
def
|
682
|
+
def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
|
853
683
|
"""
|
854
|
-
Specifies
|
855
|
-
|
684
|
+
Specifies the number of times the task corresponding
|
685
|
+
to a step needs to be retried.
|
686
|
+
|
687
|
+
This decorator is useful for handling transient errors, such as networking issues.
|
688
|
+
If your task contains operations that can't be retried safely, e.g. database updates,
|
689
|
+
it is advisable to annotate it with `@retry(times=0)`.
|
690
|
+
|
691
|
+
This can be used in conjunction with the `@catch` decorator. The `@catch`
|
692
|
+
decorator will execute a no-op task after all retries have been exhausted,
|
693
|
+
ensuring that the flow execution can continue.
|
856
694
|
|
857
695
|
Parameters
|
858
696
|
----------
|
859
|
-
|
860
|
-
|
697
|
+
times : int, default 3
|
698
|
+
Number of times to retry this task.
|
699
|
+
minutes_between_retries : int, default 2
|
700
|
+
Number of minutes between retries.
|
861
701
|
"""
|
862
702
|
...
|
863
703
|
|
864
|
-
|
865
|
-
def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
866
|
-
"""
|
867
|
-
Specifies environment variables to be set prior to the execution of a step.
|
868
|
-
|
869
|
-
Parameters
|
870
|
-
----------
|
871
|
-
vars : Dict[str, str], default {}
|
872
|
-
Dictionary of environment variables to set.
|
873
|
-
"""
|
874
|
-
...
|
875
|
-
|
876
|
-
@typing.overload
|
877
|
-
def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
878
|
-
...
|
879
|
-
|
880
|
-
@typing.overload
|
881
|
-
def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
882
|
-
...
|
883
|
-
|
884
|
-
def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
|
704
|
+
def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = "KUBERNETES_IMAGE_PULL_POLICY", service_account: str = "METAFLOW_KUBERNETES_SERVICE_ACCOUNT", secrets: typing.Optional[typing.List[str]] = None, namespace: str = "METAFLOW_KUBERNETES_NAMESPACE", gpu: typing.Optional[int] = None, gpu_vendor: str = "KUBERNETES_GPU_VENDOR", tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = "/metaflow_temp", persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
885
705
|
"""
|
886
|
-
Specifies
|
706
|
+
Specifies that this step should execute on Kubernetes.
|
887
707
|
|
888
708
|
Parameters
|
889
709
|
----------
|
890
|
-
|
891
|
-
|
710
|
+
cpu : int, default 1
|
711
|
+
Number of CPUs required for this step. If `@resources` is
|
712
|
+
also present, the maximum value from all decorators is used.
|
713
|
+
memory : int, default 4096
|
714
|
+
Memory size (in MB) required for this step. If
|
715
|
+
`@resources` is also present, the maximum value from all decorators is
|
716
|
+
used.
|
717
|
+
disk : int, default 10240
|
718
|
+
Disk size (in MB) required for this step. If
|
719
|
+
`@resources` is also present, the maximum value from all decorators is
|
720
|
+
used.
|
721
|
+
image : str, optional, default None
|
722
|
+
Docker image to use when launching on Kubernetes. If not specified, and
|
723
|
+
METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
|
724
|
+
not, a default Docker image mapping to the current version of Python is used.
|
725
|
+
image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
|
726
|
+
If given, the imagePullPolicy to be applied to the Docker image of the step.
|
727
|
+
service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
|
728
|
+
Kubernetes service account to use when launching pod in Kubernetes.
|
729
|
+
secrets : List[str], optional, default None
|
730
|
+
Kubernetes secrets to use when launching pod in Kubernetes. These
|
731
|
+
secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
|
732
|
+
in Metaflow configuration.
|
733
|
+
namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
|
734
|
+
Kubernetes namespace to use when launching pod in Kubernetes.
|
735
|
+
gpu : int, optional, default None
|
736
|
+
Number of GPUs required for this step. A value of zero implies that
|
737
|
+
the scheduled node should not have GPUs.
|
738
|
+
gpu_vendor : str, default KUBERNETES_GPU_VENDOR
|
739
|
+
The vendor of the GPUs to be used for this step.
|
740
|
+
tolerations : List[str], default []
|
741
|
+
The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
|
742
|
+
Kubernetes tolerations to use when launching pod in Kubernetes.
|
743
|
+
use_tmpfs : bool, default False
|
744
|
+
This enables an explicit tmpfs mount for this step.
|
745
|
+
tmpfs_tempdir : bool, default True
|
746
|
+
sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
|
747
|
+
tmpfs_size : int, optional, default: None
|
748
|
+
The value for the size (in MiB) of the tmpfs mount for this step.
|
749
|
+
This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
|
750
|
+
memory allocated for this step.
|
751
|
+
tmpfs_path : str, optional, default /metaflow_temp
|
752
|
+
Path to tmpfs mount for this step.
|
753
|
+
persistent_volume_claims : Dict[str, str], optional, default None
|
754
|
+
A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
|
755
|
+
volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
|
756
|
+
shared_memory: int, optional
|
757
|
+
Shared memory size (in MiB) required for this step
|
758
|
+
port: int, optional
|
759
|
+
Port number to specify in the Kubernetes job object
|
760
|
+
compute_pool : str, optional, default None
|
761
|
+
Compute pool to be used for for this step.
|
762
|
+
If not specified, any accessible compute pool within the perimeter is used.
|
892
763
|
"""
|
893
764
|
...
|
894
765
|
|
@@ -943,6 +814,58 @@ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
|
|
943
814
|
"""
|
944
815
|
...
|
945
816
|
|
817
|
+
@typing.overload
|
818
|
+
def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
819
|
+
"""
|
820
|
+
Specifies secrets to be retrieved and injected as environment variables prior to
|
821
|
+
the execution of a step.
|
822
|
+
|
823
|
+
Parameters
|
824
|
+
----------
|
825
|
+
sources : List[Union[str, Dict[str, Any]]], default: []
|
826
|
+
List of secret specs, defining how the secrets are to be retrieved
|
827
|
+
"""
|
828
|
+
...
|
829
|
+
|
830
|
+
@typing.overload
|
831
|
+
def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
832
|
+
...
|
833
|
+
|
834
|
+
@typing.overload
|
835
|
+
def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
836
|
+
...
|
837
|
+
|
838
|
+
def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
|
839
|
+
"""
|
840
|
+
Specifies secrets to be retrieved and injected as environment variables prior to
|
841
|
+
the execution of a step.
|
842
|
+
|
843
|
+
Parameters
|
844
|
+
----------
|
845
|
+
sources : List[Union[str, Dict[str, Any]]], default: []
|
846
|
+
List of secret specs, defining how the secrets are to be retrieved
|
847
|
+
"""
|
848
|
+
...
|
849
|
+
|
850
|
+
@typing.overload
|
851
|
+
def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
852
|
+
"""
|
853
|
+
Decorator prototype for all step decorators. This function gets specialized
|
854
|
+
and imported for all decorators types by _import_plugin_decorators().
|
855
|
+
"""
|
856
|
+
...
|
857
|
+
|
858
|
+
@typing.overload
|
859
|
+
def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
860
|
+
...
|
861
|
+
|
862
|
+
def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
|
863
|
+
"""
|
864
|
+
Decorator prototype for all step decorators. This function gets specialized
|
865
|
+
and imported for all decorators types by _import_plugin_decorators().
|
866
|
+
"""
|
867
|
+
...
|
868
|
+
|
946
869
|
@typing.overload
|
947
870
|
def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
948
871
|
"""
|
@@ -993,51 +916,265 @@ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
|
|
993
916
|
...
|
994
917
|
|
995
918
|
@typing.overload
|
996
|
-
def
|
919
|
+
def resources(*, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
997
920
|
"""
|
998
|
-
Specifies the
|
921
|
+
Specifies the resources needed when executing this step.
|
999
922
|
|
1000
|
-
Use `@
|
1001
|
-
|
923
|
+
Use `@resources` to specify the resource requirements
|
924
|
+
independently of the specific compute layer (`@batch`, `@kubernetes`).
|
925
|
+
|
926
|
+
You can choose the compute layer on the command line by executing e.g.
|
927
|
+
```
|
928
|
+
python myflow.py run --with batch
|
929
|
+
```
|
930
|
+
or
|
931
|
+
```
|
932
|
+
python myflow.py run --with kubernetes
|
933
|
+
```
|
934
|
+
which executes the flow on the desired system using the
|
935
|
+
requirements specified in `@resources`.
|
1002
936
|
|
1003
937
|
Parameters
|
1004
938
|
----------
|
1005
|
-
|
1006
|
-
|
1007
|
-
|
1008
|
-
|
1009
|
-
|
1010
|
-
|
1011
|
-
|
1012
|
-
|
1013
|
-
|
1014
|
-
|
939
|
+
cpu : int, default 1
|
940
|
+
Number of CPUs required for this step.
|
941
|
+
gpu : int, default 0
|
942
|
+
Number of GPUs required for this step.
|
943
|
+
disk : int, optional, default None
|
944
|
+
Disk size (in MB) required for this step. Only applies on Kubernetes.
|
945
|
+
memory : int, default 4096
|
946
|
+
Memory size (in MB) required for this step.
|
947
|
+
shared_memory : int, optional, default None
|
948
|
+
The value for the size (in MiB) of the /dev/shm volume for this step.
|
949
|
+
This parameter maps to the `--shm-size` option in Docker.
|
1015
950
|
"""
|
1016
951
|
...
|
1017
952
|
|
1018
953
|
@typing.overload
|
1019
|
-
def
|
954
|
+
def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
1020
955
|
...
|
1021
956
|
|
1022
|
-
|
957
|
+
@typing.overload
|
958
|
+
def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
959
|
+
...
|
960
|
+
|
961
|
+
def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
|
1023
962
|
"""
|
1024
|
-
Specifies the
|
963
|
+
Specifies the resources needed when executing this step.
|
1025
964
|
|
1026
|
-
Use `@
|
1027
|
-
|
965
|
+
Use `@resources` to specify the resource requirements
|
966
|
+
independently of the specific compute layer (`@batch`, `@kubernetes`).
|
967
|
+
|
968
|
+
You can choose the compute layer on the command line by executing e.g.
|
969
|
+
```
|
970
|
+
python myflow.py run --with batch
|
971
|
+
```
|
972
|
+
or
|
973
|
+
```
|
974
|
+
python myflow.py run --with kubernetes
|
975
|
+
```
|
976
|
+
which executes the flow on the desired system using the
|
977
|
+
requirements specified in `@resources`.
|
978
|
+
|
979
|
+
Parameters
|
980
|
+
----------
|
981
|
+
cpu : int, default 1
|
982
|
+
Number of CPUs required for this step.
|
983
|
+
gpu : int, default 0
|
984
|
+
Number of GPUs required for this step.
|
985
|
+
disk : int, optional, default None
|
986
|
+
Disk size (in MB) required for this step. Only applies on Kubernetes.
|
987
|
+
memory : int, default 4096
|
988
|
+
Memory size (in MB) required for this step.
|
989
|
+
shared_memory : int, optional, default None
|
990
|
+
The value for the size (in MiB) of the /dev/shm volume for this step.
|
991
|
+
This parameter maps to the `--shm-size` option in Docker.
|
992
|
+
"""
|
993
|
+
...
|
994
|
+
|
995
|
+
@typing.overload
|
996
|
+
def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
997
|
+
"""
|
998
|
+
Specifies the event(s) that this flow depends on.
|
999
|
+
|
1000
|
+
```
|
1001
|
+
@trigger(event='foo')
|
1002
|
+
```
|
1003
|
+
or
|
1004
|
+
```
|
1005
|
+
@trigger(events=['foo', 'bar'])
|
1006
|
+
```
|
1007
|
+
|
1008
|
+
Additionally, you can specify the parameter mappings
|
1009
|
+
to map event payload to Metaflow parameters for the flow.
|
1010
|
+
```
|
1011
|
+
@trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
|
1012
|
+
```
|
1013
|
+
or
|
1014
|
+
```
|
1015
|
+
@trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
|
1016
|
+
{'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
|
1017
|
+
```
|
1018
|
+
|
1019
|
+
'parameters' can also be a list of strings and tuples like so:
|
1020
|
+
```
|
1021
|
+
@trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
|
1022
|
+
```
|
1023
|
+
This is equivalent to:
|
1024
|
+
```
|
1025
|
+
@trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
|
1026
|
+
```
|
1027
|
+
|
1028
|
+
Parameters
|
1029
|
+
----------
|
1030
|
+
event : Union[str, Dict[str, Any]], optional, default None
|
1031
|
+
Event dependency for this flow.
|
1032
|
+
events : List[Union[str, Dict[str, Any]]], default []
|
1033
|
+
Events dependency for this flow.
|
1034
|
+
options : Dict[str, Any], default {}
|
1035
|
+
Backend-specific configuration for tuning eventing behavior.
|
1036
|
+
|
1037
|
+
|
1038
|
+
"""
|
1039
|
+
...
|
1040
|
+
|
1041
|
+
@typing.overload
|
1042
|
+
def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
1043
|
+
...
|
1044
|
+
|
1045
|
+
def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
|
1046
|
+
"""
|
1047
|
+
Specifies the event(s) that this flow depends on.
|
1048
|
+
|
1049
|
+
```
|
1050
|
+
@trigger(event='foo')
|
1051
|
+
```
|
1052
|
+
or
|
1053
|
+
```
|
1054
|
+
@trigger(events=['foo', 'bar'])
|
1055
|
+
```
|
1056
|
+
|
1057
|
+
Additionally, you can specify the parameter mappings
|
1058
|
+
to map event payload to Metaflow parameters for the flow.
|
1059
|
+
```
|
1060
|
+
@trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
|
1061
|
+
```
|
1062
|
+
or
|
1063
|
+
```
|
1064
|
+
@trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
|
1065
|
+
{'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
|
1066
|
+
```
|
1067
|
+
|
1068
|
+
'parameters' can also be a list of strings and tuples like so:
|
1069
|
+
```
|
1070
|
+
@trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
|
1071
|
+
```
|
1072
|
+
This is equivalent to:
|
1073
|
+
```
|
1074
|
+
@trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
|
1075
|
+
```
|
1076
|
+
|
1077
|
+
Parameters
|
1078
|
+
----------
|
1079
|
+
event : Union[str, Dict[str, Any]], optional, default None
|
1080
|
+
Event dependency for this flow.
|
1081
|
+
events : List[Union[str, Dict[str, Any]]], default []
|
1082
|
+
Events dependency for this flow.
|
1083
|
+
options : Dict[str, Any], default {}
|
1084
|
+
Backend-specific configuration for tuning eventing behavior.
|
1085
|
+
|
1086
|
+
|
1087
|
+
"""
|
1088
|
+
...
|
1089
|
+
|
1090
|
+
def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1091
|
+
"""
|
1092
|
+
The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
|
1093
|
+
This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
|
1094
|
+
|
1095
|
+
Parameters
|
1096
|
+
----------
|
1097
|
+
timeout : int
|
1098
|
+
Time, in seconds before the task times out and fails. (Default: 3600)
|
1099
|
+
poke_interval : int
|
1100
|
+
Time in seconds that the job should wait in between each try. (Default: 60)
|
1101
|
+
mode : str
|
1102
|
+
How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
|
1103
|
+
exponential_backoff : bool
|
1104
|
+
allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
|
1105
|
+
pool : str
|
1106
|
+
the slot pool this task should run in,
|
1107
|
+
slot pools are a way to limit concurrency for certain tasks. (Default:None)
|
1108
|
+
soft_fail : bool
|
1109
|
+
Set to true to mark the task as SKIPPED on failure. (Default: False)
|
1110
|
+
name : str
|
1111
|
+
Name of the sensor on Airflow
|
1112
|
+
description : str
|
1113
|
+
Description of sensor in the Airflow UI
|
1114
|
+
external_dag_id : str
|
1115
|
+
The dag_id that contains the task you want to wait for.
|
1116
|
+
external_task_ids : List[str]
|
1117
|
+
The list of task_ids that you want to wait for.
|
1118
|
+
If None (default value) the sensor waits for the DAG. (Default: None)
|
1119
|
+
allowed_states : List[str]
|
1120
|
+
Iterable of allowed states, (Default: ['success'])
|
1121
|
+
failed_states : List[str]
|
1122
|
+
Iterable of failed or dis-allowed states. (Default: None)
|
1123
|
+
execution_delta : datetime.timedelta
|
1124
|
+
time difference with the previous execution to look at,
|
1125
|
+
the default is the same logical date as the current task or DAG. (Default: None)
|
1126
|
+
check_existence: bool
|
1127
|
+
Set to True to check if the external task exists or check if
|
1128
|
+
the DAG to wait for exists. (Default: True)
|
1129
|
+
"""
|
1130
|
+
...
|
1131
|
+
|
1132
|
+
@typing.overload
|
1133
|
+
def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1134
|
+
"""
|
1135
|
+
Specifies the times when the flow should be run when running on a
|
1136
|
+
production scheduler.
|
1137
|
+
|
1138
|
+
Parameters
|
1139
|
+
----------
|
1140
|
+
hourly : bool, default False
|
1141
|
+
Run the workflow hourly.
|
1142
|
+
daily : bool, default True
|
1143
|
+
Run the workflow daily.
|
1144
|
+
weekly : bool, default False
|
1145
|
+
Run the workflow weekly.
|
1146
|
+
cron : str, optional, default None
|
1147
|
+
Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
|
1148
|
+
specified by this expression.
|
1149
|
+
timezone : str, optional, default None
|
1150
|
+
Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
|
1151
|
+
which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
|
1152
|
+
"""
|
1153
|
+
...
|
1154
|
+
|
1155
|
+
@typing.overload
|
1156
|
+
def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
1157
|
+
...
|
1158
|
+
|
1159
|
+
def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
|
1160
|
+
"""
|
1161
|
+
Specifies the times when the flow should be run when running on a
|
1162
|
+
production scheduler.
|
1028
1163
|
|
1029
1164
|
Parameters
|
1030
1165
|
----------
|
1031
|
-
|
1032
|
-
|
1033
|
-
|
1034
|
-
|
1035
|
-
|
1036
|
-
|
1037
|
-
|
1038
|
-
|
1039
|
-
|
1040
|
-
|
1166
|
+
hourly : bool, default False
|
1167
|
+
Run the workflow hourly.
|
1168
|
+
daily : bool, default True
|
1169
|
+
Run the workflow daily.
|
1170
|
+
weekly : bool, default False
|
1171
|
+
Run the workflow weekly.
|
1172
|
+
cron : str, optional, default None
|
1173
|
+
Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
|
1174
|
+
specified by this expression.
|
1175
|
+
timezone : str, optional, default None
|
1176
|
+
Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
|
1177
|
+
which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
|
1041
1178
|
"""
|
1042
1179
|
...
|
1043
1180
|
|
@@ -1144,6 +1281,24 @@ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *
|
|
1144
1281
|
"""
|
1145
1282
|
...
|
1146
1283
|
|
1284
|
+
def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1285
|
+
"""
|
1286
|
+
Specifies what flows belong to the same project.
|
1287
|
+
|
1288
|
+
A project-specific namespace is created for all flows that
|
1289
|
+
use the same `@project(name)`.
|
1290
|
+
|
1291
|
+
Parameters
|
1292
|
+
----------
|
1293
|
+
name : str
|
1294
|
+
Project name. Make sure that the name is unique amongst all
|
1295
|
+
projects that use the same production scheduler. The name may
|
1296
|
+
contain only lowercase alphanumeric characters and underscores.
|
1297
|
+
|
1298
|
+
|
1299
|
+
"""
|
1300
|
+
...
|
1301
|
+
|
1147
1302
|
def nim(*, models: "list[NIM]", backend: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1148
1303
|
"""
|
1149
1304
|
This decorator is used to run NIM containers in Metaflow tasks as sidecars.
|
@@ -1214,207 +1369,52 @@ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packag
|
|
1214
1369
|
"""
|
1215
1370
|
...
|
1216
1371
|
|
1217
|
-
def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1218
|
-
"""
|
1219
|
-
The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
|
1220
|
-
This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
|
1221
|
-
|
1222
|
-
Parameters
|
1223
|
-
----------
|
1224
|
-
timeout : int
|
1225
|
-
Time, in seconds before the task times out and fails. (Default: 3600)
|
1226
|
-
poke_interval : int
|
1227
|
-
Time in seconds that the job should wait in between each try. (Default: 60)
|
1228
|
-
mode : str
|
1229
|
-
How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
|
1230
|
-
exponential_backoff : bool
|
1231
|
-
allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
|
1232
|
-
pool : str
|
1233
|
-
the slot pool this task should run in,
|
1234
|
-
slot pools are a way to limit concurrency for certain tasks. (Default:None)
|
1235
|
-
soft_fail : bool
|
1236
|
-
Set to true to mark the task as SKIPPED on failure. (Default: False)
|
1237
|
-
name : str
|
1238
|
-
Name of the sensor on Airflow
|
1239
|
-
description : str
|
1240
|
-
Description of sensor in the Airflow UI
|
1241
|
-
external_dag_id : str
|
1242
|
-
The dag_id that contains the task you want to wait for.
|
1243
|
-
external_task_ids : List[str]
|
1244
|
-
The list of task_ids that you want to wait for.
|
1245
|
-
If None (default value) the sensor waits for the DAG. (Default: None)
|
1246
|
-
allowed_states : List[str]
|
1247
|
-
Iterable of allowed states, (Default: ['success'])
|
1248
|
-
failed_states : List[str]
|
1249
|
-
Iterable of failed or dis-allowed states. (Default: None)
|
1250
|
-
execution_delta : datetime.timedelta
|
1251
|
-
time difference with the previous execution to look at,
|
1252
|
-
the default is the same logical date as the current task or DAG. (Default: None)
|
1253
|
-
check_existence: bool
|
1254
|
-
Set to True to check if the external task exists or check if
|
1255
|
-
the DAG to wait for exists. (Default: True)
|
1256
|
-
"""
|
1257
|
-
...
|
1258
|
-
|
1259
|
-
def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1260
|
-
"""
|
1261
|
-
Specifies what flows belong to the same project.
|
1262
|
-
|
1263
|
-
A project-specific namespace is created for all flows that
|
1264
|
-
use the same `@project(name)`.
|
1265
|
-
|
1266
|
-
Parameters
|
1267
|
-
----------
|
1268
|
-
name : str
|
1269
|
-
Project name. Make sure that the name is unique amongst all
|
1270
|
-
projects that use the same production scheduler. The name may
|
1271
|
-
contain only lowercase alphanumeric characters and underscores.
|
1272
|
-
|
1273
|
-
|
1274
|
-
"""
|
1275
|
-
...
|
1276
|
-
|
1277
1372
|
@typing.overload
|
1278
|
-
def
|
1373
|
+
def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1279
1374
|
"""
|
1280
|
-
Specifies the
|
1281
|
-
|
1282
|
-
```
|
1283
|
-
@trigger(event='foo')
|
1284
|
-
```
|
1285
|
-
or
|
1286
|
-
```
|
1287
|
-
@trigger(events=['foo', 'bar'])
|
1288
|
-
```
|
1289
|
-
|
1290
|
-
Additionally, you can specify the parameter mappings
|
1291
|
-
to map event payload to Metaflow parameters for the flow.
|
1292
|
-
```
|
1293
|
-
@trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
|
1294
|
-
```
|
1295
|
-
or
|
1296
|
-
```
|
1297
|
-
@trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
|
1298
|
-
{'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
|
1299
|
-
```
|
1375
|
+
Specifies the Conda environment for all steps of the flow.
|
1300
1376
|
|
1301
|
-
|
1302
|
-
|
1303
|
-
@trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
|
1304
|
-
```
|
1305
|
-
This is equivalent to:
|
1306
|
-
```
|
1307
|
-
@trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
|
1308
|
-
```
|
1377
|
+
Use `@conda_base` to set common libraries required by all
|
1378
|
+
steps and use `@conda` to specify step-specific additions.
|
1309
1379
|
|
1310
1380
|
Parameters
|
1311
1381
|
----------
|
1312
|
-
|
1313
|
-
|
1314
|
-
|
1315
|
-
|
1316
|
-
|
1317
|
-
|
1318
|
-
|
1319
|
-
|
1382
|
+
packages : Dict[str, str], default {}
|
1383
|
+
Packages to use for this flow. The key is the name of the package
|
1384
|
+
and the value is the version to use.
|
1385
|
+
libraries : Dict[str, str], default {}
|
1386
|
+
Supported for backward compatibility. When used with packages, packages will take precedence.
|
1387
|
+
python : str, optional, default None
|
1388
|
+
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
1389
|
+
that the version used will correspond to the version of the Python interpreter used to start the run.
|
1390
|
+
disabled : bool, default False
|
1391
|
+
If set to True, disables Conda.
|
1320
1392
|
"""
|
1321
1393
|
...
|
1322
1394
|
|
1323
1395
|
@typing.overload
|
1324
|
-
def
|
1325
|
-
...
|
1326
|
-
|
1327
|
-
def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
|
1328
|
-
"""
|
1329
|
-
Specifies the event(s) that this flow depends on.
|
1330
|
-
|
1331
|
-
```
|
1332
|
-
@trigger(event='foo')
|
1333
|
-
```
|
1334
|
-
or
|
1335
|
-
```
|
1336
|
-
@trigger(events=['foo', 'bar'])
|
1337
|
-
```
|
1338
|
-
|
1339
|
-
Additionally, you can specify the parameter mappings
|
1340
|
-
to map event payload to Metaflow parameters for the flow.
|
1341
|
-
```
|
1342
|
-
@trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
|
1343
|
-
```
|
1344
|
-
or
|
1345
|
-
```
|
1346
|
-
@trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
|
1347
|
-
{'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
|
1348
|
-
```
|
1349
|
-
|
1350
|
-
'parameters' can also be a list of strings and tuples like so:
|
1351
|
-
```
|
1352
|
-
@trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
|
1353
|
-
```
|
1354
|
-
This is equivalent to:
|
1355
|
-
```
|
1356
|
-
@trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
|
1357
|
-
```
|
1358
|
-
|
1359
|
-
Parameters
|
1360
|
-
----------
|
1361
|
-
event : Union[str, Dict[str, Any]], optional, default None
|
1362
|
-
Event dependency for this flow.
|
1363
|
-
events : List[Union[str, Dict[str, Any]]], default []
|
1364
|
-
Events dependency for this flow.
|
1365
|
-
options : Dict[str, Any], default {}
|
1366
|
-
Backend-specific configuration for tuning eventing behavior.
|
1367
|
-
|
1368
|
-
|
1369
|
-
"""
|
1396
|
+
def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
1370
1397
|
...
|
1371
1398
|
|
1372
|
-
|
1373
|
-
def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1399
|
+
def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
|
1374
1400
|
"""
|
1375
|
-
Specifies the
|
1376
|
-
production scheduler.
|
1401
|
+
Specifies the Conda environment for all steps of the flow.
|
1377
1402
|
|
1378
|
-
|
1379
|
-
|
1380
|
-
hourly : bool, default False
|
1381
|
-
Run the workflow hourly.
|
1382
|
-
daily : bool, default True
|
1383
|
-
Run the workflow daily.
|
1384
|
-
weekly : bool, default False
|
1385
|
-
Run the workflow weekly.
|
1386
|
-
cron : str, optional, default None
|
1387
|
-
Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
|
1388
|
-
specified by this expression.
|
1389
|
-
timezone : str, optional, default None
|
1390
|
-
Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
|
1391
|
-
which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
|
1392
|
-
"""
|
1393
|
-
...
|
1394
|
-
|
1395
|
-
@typing.overload
|
1396
|
-
def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
1397
|
-
...
|
1398
|
-
|
1399
|
-
def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
|
1400
|
-
"""
|
1401
|
-
Specifies the times when the flow should be run when running on a
|
1402
|
-
production scheduler.
|
1403
|
+
Use `@conda_base` to set common libraries required by all
|
1404
|
+
steps and use `@conda` to specify step-specific additions.
|
1403
1405
|
|
1404
1406
|
Parameters
|
1405
1407
|
----------
|
1406
|
-
|
1407
|
-
|
1408
|
-
|
1409
|
-
|
1410
|
-
|
1411
|
-
|
1412
|
-
|
1413
|
-
|
1414
|
-
|
1415
|
-
|
1416
|
-
Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
|
1417
|
-
which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
|
1408
|
+
packages : Dict[str, str], default {}
|
1409
|
+
Packages to use for this flow. The key is the name of the package
|
1410
|
+
and the value is the version to use.
|
1411
|
+
libraries : Dict[str, str], default {}
|
1412
|
+
Supported for backward compatibility. When used with packages, packages will take precedence.
|
1413
|
+
python : str, optional, default None
|
1414
|
+
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
1415
|
+
that the version used will correspond to the version of the Python interpreter used to start the run.
|
1416
|
+
disabled : bool, default False
|
1417
|
+
If set to True, disables Conda.
|
1418
1418
|
"""
|
1419
1419
|
...
|
1420
1420
|
|