ob-metaflow-stubs 5.8.2__py2.py3-none-any.whl → 5.9.1__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- metaflow-stubs/__init__.pyi +491 -487
- metaflow-stubs/cards.pyi +5 -5
- metaflow-stubs/cli.pyi +2 -2
- metaflow-stubs/client/__init__.pyi +4 -4
- metaflow-stubs/client/core.pyi +6 -6
- metaflow-stubs/client/filecache.pyi +2 -2
- metaflow-stubs/clone_util.pyi +2 -2
- metaflow-stubs/events.pyi +2 -2
- metaflow-stubs/exception.pyi +2 -2
- metaflow-stubs/flowspec.pyi +5 -5
- metaflow-stubs/generated_for.txt +1 -1
- metaflow-stubs/includefile.pyi +4 -4
- metaflow-stubs/metadata/metadata.pyi +2 -2
- metaflow-stubs/metadata/util.pyi +2 -2
- metaflow-stubs/metaflow_config.pyi +2 -2
- metaflow-stubs/metaflow_current.pyi +27 -25
- metaflow-stubs/mflog/mflog.pyi +2 -2
- metaflow-stubs/multicore_utils.pyi +2 -2
- metaflow-stubs/parameters.pyi +3 -3
- metaflow-stubs/plugins/__init__.pyi +3 -3
- metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
- metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
- metaflow-stubs/plugins/airflow/exception.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +3 -3
- metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +3 -3
- metaflow-stubs/plugins/argo/__init__.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_client.pyi +6 -2
- metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_workflows.pyi +5 -5
- metaflow-stubs/plugins/argo/argo_workflows_cli.pyi +7 -7
- metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +6 -6
- metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +5 -3
- metaflow-stubs/plugins/aws/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
- metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +3 -3
- metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +3 -3
- metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/production_token.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +4 -2
- metaflow-stubs/plugins/azure/__init__.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +3 -3
- metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
- metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
- metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
- metaflow-stubs/plugins/cards/__init__.pyi +2 -2
- metaflow-stubs/plugins/cards/card_cli.pyi +5 -5
- metaflow-stubs/plugins/cards/card_client.pyi +2 -2
- metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
- metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
- metaflow-stubs/plugins/cards/card_decorator.pyi +3 -3
- metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/basic.pyi +3 -3
- metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/components.pyi +5 -5
- metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
- metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
- metaflow-stubs/plugins/cards/component_serializer.pyi +3 -3
- metaflow-stubs/plugins/cards/exception.pyi +2 -2
- metaflow-stubs/plugins/catch_decorator.pyi +3 -3
- metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
- metaflow-stubs/plugins/datatools/local.pyi +2 -2
- metaflow-stubs/plugins/datatools/s3/__init__.pyi +4 -4
- metaflow-stubs/plugins/datatools/s3/s3.pyi +6 -6
- metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
- metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
- metaflow-stubs/plugins/debug_logger.pyi +2 -2
- metaflow-stubs/plugins/debug_monitor.pyi +2 -2
- metaflow-stubs/plugins/environment_decorator.pyi +2 -2
- metaflow-stubs/plugins/events_decorator.pyi +3 -3
- metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
- metaflow-stubs/plugins/frameworks/pytorch.pyi +4 -4
- metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
- metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +3 -3
- metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
- metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
- metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
- metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kube_utils.pyi +18 -0
- metaflow-stubs/plugins/kubernetes/kubernetes.pyi +6 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_cli.pyi +26 -4
- metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +8 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +3 -3
- metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
- metaflow-stubs/plugins/logs_cli.pyi +3 -3
- metaflow-stubs/plugins/package_cli.pyi +2 -2
- metaflow-stubs/plugins/parallel_decorator.pyi +5 -5
- metaflow-stubs/plugins/perimeters.pyi +2 -2
- metaflow-stubs/plugins/project_decorator.pyi +3 -3
- metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
- metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
- metaflow-stubs/plugins/pypi/conda_environment.pyi +3 -3
- metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
- metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
- metaflow-stubs/plugins/pypi/utils.pyi +2 -2
- metaflow-stubs/plugins/resources_decorator.pyi +2 -2
- metaflow-stubs/plugins/retry_decorator.pyi +2 -2
- metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
- metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +2 -2
- metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
- metaflow-stubs/plugins/storage_executor.pyi +2 -2
- metaflow-stubs/plugins/tag_cli.pyi +5 -5
- metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +3 -3
- metaflow-stubs/plugins/timeout_decorator.pyi +2 -2
- metaflow-stubs/procpoll.pyi +2 -2
- metaflow-stubs/profilers/__init__.pyi +2 -2
- metaflow-stubs/pylint_wrapper.pyi +2 -2
- metaflow-stubs/runner/__init__.pyi +2 -2
- metaflow-stubs/runner/deployer.pyi +7 -5
- metaflow-stubs/runner/metaflow_runner.pyi +8 -8
- metaflow-stubs/runner/nbdeploy.pyi +2 -2
- metaflow-stubs/runner/nbrun.pyi +2 -2
- metaflow-stubs/runner/subprocess_manager.pyi +4 -4
- metaflow-stubs/runner/utils.pyi +5 -3
- metaflow-stubs/system/__init__.pyi +3 -3
- metaflow-stubs/system/system_logger.pyi +2 -2
- metaflow-stubs/system/system_monitor.pyi +3 -3
- metaflow-stubs/tagging_util.pyi +2 -2
- metaflow-stubs/tuple_util.pyi +2 -2
- {ob_metaflow_stubs-5.8.2.dist-info → ob_metaflow_stubs-5.9.1.dist-info}/METADATA +1 -1
- ob_metaflow_stubs-5.9.1.dist-info/RECORD +139 -0
- ob_metaflow_stubs-5.8.2.dist-info/RECORD +0 -138
- {ob_metaflow_stubs-5.8.2.dist-info → ob_metaflow_stubs-5.9.1.dist-info}/WHEEL +0 -0
- {ob_metaflow_stubs-5.8.2.dist-info → ob_metaflow_stubs-5.9.1.dist-info}/top_level.txt +0 -0
metaflow-stubs/__init__.pyi
CHANGED
@@ -1,23 +1,23 @@
|
|
1
1
|
##################################################################################
|
2
2
|
# Auto-generated Metaflow stub file #
|
3
|
-
# MF version: 2.12.
|
4
|
-
# Generated on 2024-09-
|
3
|
+
# MF version: 2.12.20.1+ob(v1) #
|
4
|
+
# Generated on 2024-09-16T18:41:26.221357 #
|
5
5
|
##################################################################################
|
6
6
|
|
7
7
|
from __future__ import annotations
|
8
8
|
|
9
9
|
import typing
|
10
10
|
if typing.TYPE_CHECKING:
|
11
|
-
import metaflow.parameters
|
12
|
-
import datetime
|
13
11
|
import metaflow._vendor.click.types
|
14
|
-
import metaflow.
|
15
|
-
import metaflow.datastore.inputs
|
16
|
-
import metaflow.flowspec
|
17
|
-
import typing
|
12
|
+
import metaflow.client.core
|
18
13
|
import metaflow.runner.metaflow_runner
|
14
|
+
import datetime
|
19
15
|
import metaflow.events
|
20
|
-
import metaflow.
|
16
|
+
import metaflow.metaflow_current
|
17
|
+
import typing
|
18
|
+
import metaflow.flowspec
|
19
|
+
import metaflow.parameters
|
20
|
+
import metaflow.datastore.inputs
|
21
21
|
FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
|
22
22
|
StepFlag = typing.NewType("StepFlag", bool)
|
23
23
|
|
@@ -438,21 +438,110 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
|
|
438
438
|
...
|
439
439
|
|
440
440
|
@typing.overload
|
441
|
-
def
|
441
|
+
def resources(*, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
442
442
|
"""
|
443
|
-
|
444
|
-
|
443
|
+
Specifies the resources needed when executing this step.
|
444
|
+
|
445
|
+
Use `@resources` to specify the resource requirements
|
446
|
+
independently of the specific compute layer (`@batch`, `@kubernetes`).
|
447
|
+
|
448
|
+
You can choose the compute layer on the command line by executing e.g.
|
449
|
+
```
|
450
|
+
python myflow.py run --with batch
|
451
|
+
```
|
452
|
+
or
|
453
|
+
```
|
454
|
+
python myflow.py run --with kubernetes
|
455
|
+
```
|
456
|
+
which executes the flow on the desired system using the
|
457
|
+
requirements specified in `@resources`.
|
458
|
+
|
459
|
+
Parameters
|
460
|
+
----------
|
461
|
+
cpu : int, default 1
|
462
|
+
Number of CPUs required for this step.
|
463
|
+
gpu : int, default 0
|
464
|
+
Number of GPUs required for this step.
|
465
|
+
disk : int, optional, default None
|
466
|
+
Disk size (in MB) required for this step. Only applies on Kubernetes.
|
467
|
+
memory : int, default 4096
|
468
|
+
Memory size (in MB) required for this step.
|
469
|
+
shared_memory : int, optional, default None
|
470
|
+
The value for the size (in MiB) of the /dev/shm volume for this step.
|
471
|
+
This parameter maps to the `--shm-size` option in Docker.
|
445
472
|
"""
|
446
473
|
...
|
447
474
|
|
448
475
|
@typing.overload
|
449
|
-
def
|
476
|
+
def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
450
477
|
...
|
451
478
|
|
452
|
-
|
479
|
+
@typing.overload
|
480
|
+
def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
481
|
+
...
|
482
|
+
|
483
|
+
def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
|
453
484
|
"""
|
454
|
-
|
455
|
-
|
485
|
+
Specifies the resources needed when executing this step.
|
486
|
+
|
487
|
+
Use `@resources` to specify the resource requirements
|
488
|
+
independently of the specific compute layer (`@batch`, `@kubernetes`).
|
489
|
+
|
490
|
+
You can choose the compute layer on the command line by executing e.g.
|
491
|
+
```
|
492
|
+
python myflow.py run --with batch
|
493
|
+
```
|
494
|
+
or
|
495
|
+
```
|
496
|
+
python myflow.py run --with kubernetes
|
497
|
+
```
|
498
|
+
which executes the flow on the desired system using the
|
499
|
+
requirements specified in `@resources`.
|
500
|
+
|
501
|
+
Parameters
|
502
|
+
----------
|
503
|
+
cpu : int, default 1
|
504
|
+
Number of CPUs required for this step.
|
505
|
+
gpu : int, default 0
|
506
|
+
Number of GPUs required for this step.
|
507
|
+
disk : int, optional, default None
|
508
|
+
Disk size (in MB) required for this step. Only applies on Kubernetes.
|
509
|
+
memory : int, default 4096
|
510
|
+
Memory size (in MB) required for this step.
|
511
|
+
shared_memory : int, optional, default None
|
512
|
+
The value for the size (in MiB) of the /dev/shm volume for this step.
|
513
|
+
This parameter maps to the `--shm-size` option in Docker.
|
514
|
+
"""
|
515
|
+
...
|
516
|
+
|
517
|
+
@typing.overload
|
518
|
+
def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
519
|
+
"""
|
520
|
+
Specifies environment variables to be set prior to the execution of a step.
|
521
|
+
|
522
|
+
Parameters
|
523
|
+
----------
|
524
|
+
vars : Dict[str, str], default {}
|
525
|
+
Dictionary of environment variables to set.
|
526
|
+
"""
|
527
|
+
...
|
528
|
+
|
529
|
+
@typing.overload
|
530
|
+
def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
531
|
+
...
|
532
|
+
|
533
|
+
@typing.overload
|
534
|
+
def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
535
|
+
...
|
536
|
+
|
537
|
+
def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
|
538
|
+
"""
|
539
|
+
Specifies environment variables to be set prior to the execution of a step.
|
540
|
+
|
541
|
+
Parameters
|
542
|
+
----------
|
543
|
+
vars : Dict[str, str], default {}
|
544
|
+
Dictionary of environment variables to set.
|
456
545
|
"""
|
457
546
|
...
|
458
547
|
|
@@ -489,6 +578,129 @@ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
|
|
489
578
|
"""
|
490
579
|
...
|
491
580
|
|
581
|
+
@typing.overload
|
582
|
+
def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
583
|
+
"""
|
584
|
+
Specifies the Conda environment for the step.
|
585
|
+
|
586
|
+
Information in this decorator will augment any
|
587
|
+
attributes set in the `@conda_base` flow-level decorator. Hence,
|
588
|
+
you can use `@conda_base` to set packages required by all
|
589
|
+
steps and use `@conda` to specify step-specific overrides.
|
590
|
+
|
591
|
+
Parameters
|
592
|
+
----------
|
593
|
+
packages : Dict[str, str], default {}
|
594
|
+
Packages to use for this step. The key is the name of the package
|
595
|
+
and the value is the version to use.
|
596
|
+
libraries : Dict[str, str], default {}
|
597
|
+
Supported for backward compatibility. When used with packages, packages will take precedence.
|
598
|
+
python : str, optional, default None
|
599
|
+
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
600
|
+
that the version used will correspond to the version of the Python interpreter used to start the run.
|
601
|
+
disabled : bool, default False
|
602
|
+
If set to True, disables @conda.
|
603
|
+
"""
|
604
|
+
...
|
605
|
+
|
606
|
+
@typing.overload
|
607
|
+
def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
608
|
+
...
|
609
|
+
|
610
|
+
@typing.overload
|
611
|
+
def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
612
|
+
...
|
613
|
+
|
614
|
+
def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
|
615
|
+
"""
|
616
|
+
Specifies the Conda environment for the step.
|
617
|
+
|
618
|
+
Information in this decorator will augment any
|
619
|
+
attributes set in the `@conda_base` flow-level decorator. Hence,
|
620
|
+
you can use `@conda_base` to set packages required by all
|
621
|
+
steps and use `@conda` to specify step-specific overrides.
|
622
|
+
|
623
|
+
Parameters
|
624
|
+
----------
|
625
|
+
packages : Dict[str, str], default {}
|
626
|
+
Packages to use for this step. The key is the name of the package
|
627
|
+
and the value is the version to use.
|
628
|
+
libraries : Dict[str, str], default {}
|
629
|
+
Supported for backward compatibility. When used with packages, packages will take precedence.
|
630
|
+
python : str, optional, default None
|
631
|
+
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
632
|
+
that the version used will correspond to the version of the Python interpreter used to start the run.
|
633
|
+
disabled : bool, default False
|
634
|
+
If set to True, disables @conda.
|
635
|
+
"""
|
636
|
+
...
|
637
|
+
|
638
|
+
@typing.overload
|
639
|
+
def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
640
|
+
"""
|
641
|
+
Specifies that the step will success under all circumstances.
|
642
|
+
|
643
|
+
The decorator will create an optional artifact, specified by `var`, which
|
644
|
+
contains the exception raised. You can use it to detect the presence
|
645
|
+
of errors, indicating that all happy-path artifacts produced by the step
|
646
|
+
are missing.
|
647
|
+
|
648
|
+
Parameters
|
649
|
+
----------
|
650
|
+
var : str, optional, default None
|
651
|
+
Name of the artifact in which to store the caught exception.
|
652
|
+
If not specified, the exception is not stored.
|
653
|
+
print_exception : bool, default True
|
654
|
+
Determines whether or not the exception is printed to
|
655
|
+
stdout when caught.
|
656
|
+
"""
|
657
|
+
...
|
658
|
+
|
659
|
+
@typing.overload
|
660
|
+
def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
661
|
+
...
|
662
|
+
|
663
|
+
@typing.overload
|
664
|
+
def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
665
|
+
...
|
666
|
+
|
667
|
+
def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
|
668
|
+
"""
|
669
|
+
Specifies that the step will success under all circumstances.
|
670
|
+
|
671
|
+
The decorator will create an optional artifact, specified by `var`, which
|
672
|
+
contains the exception raised. You can use it to detect the presence
|
673
|
+
of errors, indicating that all happy-path artifacts produced by the step
|
674
|
+
are missing.
|
675
|
+
|
676
|
+
Parameters
|
677
|
+
----------
|
678
|
+
var : str, optional, default None
|
679
|
+
Name of the artifact in which to store the caught exception.
|
680
|
+
If not specified, the exception is not stored.
|
681
|
+
print_exception : bool, default True
|
682
|
+
Determines whether or not the exception is printed to
|
683
|
+
stdout when caught.
|
684
|
+
"""
|
685
|
+
...
|
686
|
+
|
687
|
+
@typing.overload
|
688
|
+
def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
689
|
+
"""
|
690
|
+
Internal decorator to support Fast bakery
|
691
|
+
"""
|
692
|
+
...
|
693
|
+
|
694
|
+
@typing.overload
|
695
|
+
def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
696
|
+
...
|
697
|
+
|
698
|
+
def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
|
699
|
+
"""
|
700
|
+
Internal decorator to support Fast bakery
|
701
|
+
"""
|
702
|
+
...
|
703
|
+
|
492
704
|
@typing.overload
|
493
705
|
def card(*, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
494
706
|
"""
|
@@ -590,37 +802,59 @@ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
|
|
590
802
|
...
|
591
803
|
|
592
804
|
@typing.overload
|
593
|
-
def
|
805
|
+
def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
594
806
|
"""
|
595
|
-
Specifies
|
807
|
+
Specifies the number of times the task corresponding
|
808
|
+
to a step needs to be retried.
|
809
|
+
|
810
|
+
This decorator is useful for handling transient errors, such as networking issues.
|
811
|
+
If your task contains operations that can't be retried safely, e.g. database updates,
|
812
|
+
it is advisable to annotate it with `@retry(times=0)`.
|
813
|
+
|
814
|
+
This can be used in conjunction with the `@catch` decorator. The `@catch`
|
815
|
+
decorator will execute a no-op task after all retries have been exhausted,
|
816
|
+
ensuring that the flow execution can continue.
|
596
817
|
|
597
818
|
Parameters
|
598
819
|
----------
|
599
|
-
|
600
|
-
|
601
|
-
|
820
|
+
times : int, default 3
|
821
|
+
Number of times to retry this task.
|
822
|
+
minutes_between_retries : int, default 2
|
823
|
+
Number of minutes between retries.
|
824
|
+
"""
|
602
825
|
...
|
603
826
|
|
604
827
|
@typing.overload
|
605
|
-
def
|
828
|
+
def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
606
829
|
...
|
607
830
|
|
608
831
|
@typing.overload
|
609
|
-
def
|
832
|
+
def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
610
833
|
...
|
611
834
|
|
612
|
-
def
|
835
|
+
def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
|
613
836
|
"""
|
614
|
-
Specifies
|
837
|
+
Specifies the number of times the task corresponding
|
838
|
+
to a step needs to be retried.
|
839
|
+
|
840
|
+
This decorator is useful for handling transient errors, such as networking issues.
|
841
|
+
If your task contains operations that can't be retried safely, e.g. database updates,
|
842
|
+
it is advisable to annotate it with `@retry(times=0)`.
|
843
|
+
|
844
|
+
This can be used in conjunction with the `@catch` decorator. The `@catch`
|
845
|
+
decorator will execute a no-op task after all retries have been exhausted,
|
846
|
+
ensuring that the flow execution can continue.
|
615
847
|
|
616
848
|
Parameters
|
617
849
|
----------
|
618
|
-
|
619
|
-
|
850
|
+
times : int, default 3
|
851
|
+
Number of times to retry this task.
|
852
|
+
minutes_between_retries : int, default 2
|
853
|
+
Number of minutes between retries.
|
620
854
|
"""
|
621
855
|
...
|
622
856
|
|
623
|
-
def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = "KUBERNETES_IMAGE_PULL_POLICY", service_account: str = "METAFLOW_KUBERNETES_SERVICE_ACCOUNT", secrets: typing.Optional[typing.List[str]] = None, namespace: str = "METAFLOW_KUBERNETES_NAMESPACE", gpu: typing.Optional[int] = None, gpu_vendor: str = "KUBERNETES_GPU_VENDOR", tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = "/metaflow_temp", persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
857
|
+
def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = "KUBERNETES_IMAGE_PULL_POLICY", service_account: str = "METAFLOW_KUBERNETES_SERVICE_ACCOUNT", secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = "METAFLOW_KUBERNETES_NAMESPACE", gpu: typing.Optional[int] = None, gpu_vendor: str = "KUBERNETES_GPU_VENDOR", tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = "/metaflow_temp", persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
624
858
|
"""
|
625
859
|
Specifies that this step should execute on Kubernetes.
|
626
860
|
|
@@ -649,6 +883,10 @@ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: ty
|
|
649
883
|
Kubernetes secrets to use when launching pod in Kubernetes. These
|
650
884
|
secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
|
651
885
|
in Metaflow configuration.
|
886
|
+
node_selector: Union[Dict[str,str], str], optional, default None
|
887
|
+
Kubernetes node selector(s) to apply to the pod running the task.
|
888
|
+
Can be passed in as a comma separated string of values e.g. "kubernetes.io/os=linux,kubernetes.io/arch=amd64"
|
889
|
+
or as a dictionary {"kubernetes.io/os": "linux", "kubernetes.io/arch": "amd64"}
|
652
890
|
namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
|
653
891
|
Kubernetes namespace to use when launching pod in Kubernetes.
|
654
892
|
gpu : int, optional, default None
|
@@ -683,128 +921,21 @@ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: ty
|
|
683
921
|
...
|
684
922
|
|
685
923
|
@typing.overload
|
686
|
-
def
|
687
|
-
"""
|
688
|
-
Specifies that the step will success under all circumstances.
|
689
|
-
|
690
|
-
The decorator will create an optional artifact, specified by `var`, which
|
691
|
-
contains the exception raised. You can use it to detect the presence
|
692
|
-
of errors, indicating that all happy-path artifacts produced by the step
|
693
|
-
are missing.
|
694
|
-
|
695
|
-
Parameters
|
696
|
-
----------
|
697
|
-
var : str, optional, default None
|
698
|
-
Name of the artifact in which to store the caught exception.
|
699
|
-
If not specified, the exception is not stored.
|
700
|
-
print_exception : bool, default True
|
701
|
-
Determines whether or not the exception is printed to
|
702
|
-
stdout when caught.
|
703
|
-
"""
|
704
|
-
...
|
705
|
-
|
706
|
-
@typing.overload
|
707
|
-
def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
708
|
-
...
|
709
|
-
|
710
|
-
@typing.overload
|
711
|
-
def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
712
|
-
...
|
713
|
-
|
714
|
-
def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
|
715
|
-
"""
|
716
|
-
Specifies that the step will success under all circumstances.
|
717
|
-
|
718
|
-
The decorator will create an optional artifact, specified by `var`, which
|
719
|
-
contains the exception raised. You can use it to detect the presence
|
720
|
-
of errors, indicating that all happy-path artifacts produced by the step
|
721
|
-
are missing.
|
722
|
-
|
723
|
-
Parameters
|
724
|
-
----------
|
725
|
-
var : str, optional, default None
|
726
|
-
Name of the artifact in which to store the caught exception.
|
727
|
-
If not specified, the exception is not stored.
|
728
|
-
print_exception : bool, default True
|
729
|
-
Determines whether or not the exception is printed to
|
730
|
-
stdout when caught.
|
731
|
-
"""
|
732
|
-
...
|
733
|
-
|
734
|
-
@typing.overload
|
735
|
-
def resources(*, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
924
|
+
def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
736
925
|
"""
|
737
|
-
|
738
|
-
|
739
|
-
Use `@resources` to specify the resource requirements
|
740
|
-
independently of the specific compute layer (`@batch`, `@kubernetes`).
|
741
|
-
|
742
|
-
You can choose the compute layer on the command line by executing e.g.
|
743
|
-
```
|
744
|
-
python myflow.py run --with batch
|
745
|
-
```
|
746
|
-
or
|
747
|
-
```
|
748
|
-
python myflow.py run --with kubernetes
|
749
|
-
```
|
750
|
-
which executes the flow on the desired system using the
|
751
|
-
requirements specified in `@resources`.
|
752
|
-
|
753
|
-
Parameters
|
754
|
-
----------
|
755
|
-
cpu : int, default 1
|
756
|
-
Number of CPUs required for this step.
|
757
|
-
gpu : int, default 0
|
758
|
-
Number of GPUs required for this step.
|
759
|
-
disk : int, optional, default None
|
760
|
-
Disk size (in MB) required for this step. Only applies on Kubernetes.
|
761
|
-
memory : int, default 4096
|
762
|
-
Memory size (in MB) required for this step.
|
763
|
-
shared_memory : int, optional, default None
|
764
|
-
The value for the size (in MiB) of the /dev/shm volume for this step.
|
765
|
-
This parameter maps to the `--shm-size` option in Docker.
|
926
|
+
Decorator prototype for all step decorators. This function gets specialized
|
927
|
+
and imported for all decorators types by _import_plugin_decorators().
|
766
928
|
"""
|
767
929
|
...
|
768
930
|
|
769
931
|
@typing.overload
|
770
|
-
def
|
771
|
-
...
|
772
|
-
|
773
|
-
@typing.overload
|
774
|
-
def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
932
|
+
def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
775
933
|
...
|
776
934
|
|
777
|
-
def
|
935
|
+
def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
|
778
936
|
"""
|
779
|
-
|
780
|
-
|
781
|
-
Use `@resources` to specify the resource requirements
|
782
|
-
independently of the specific compute layer (`@batch`, `@kubernetes`).
|
783
|
-
|
784
|
-
You can choose the compute layer on the command line by executing e.g.
|
785
|
-
```
|
786
|
-
python myflow.py run --with batch
|
787
|
-
```
|
788
|
-
or
|
789
|
-
```
|
790
|
-
python myflow.py run --with kubernetes
|
791
|
-
```
|
792
|
-
which executes the flow on the desired system using the
|
793
|
-
requirements specified in `@resources`.
|
794
|
-
|
795
|
-
Parameters
|
796
|
-
----------
|
797
|
-
cpu : int, default 1
|
798
|
-
Number of CPUs required for this step.
|
799
|
-
gpu : int, default 0
|
800
|
-
Number of GPUs required for this step.
|
801
|
-
disk : int, optional, default None
|
802
|
-
Disk size (in MB) required for this step. Only applies on Kubernetes.
|
803
|
-
memory : int, default 4096
|
804
|
-
Memory size (in MB) required for this step.
|
805
|
-
shared_memory : int, optional, default None
|
806
|
-
The value for the size (in MiB) of the /dev/shm volume for this step.
|
807
|
-
This parameter maps to the `--shm-size` option in Docker.
|
937
|
+
Decorator prototype for all step decorators. This function gets specialized
|
938
|
+
and imported for all decorators types by _import_plugin_decorators().
|
808
939
|
"""
|
809
940
|
...
|
810
941
|
|
@@ -866,160 +997,153 @@ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
|
|
866
997
|
...
|
867
998
|
|
868
999
|
@typing.overload
|
869
|
-
def
|
1000
|
+
def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
870
1001
|
"""
|
871
|
-
Specifies the
|
872
|
-
|
873
|
-
|
874
|
-
This decorator is useful for handling transient errors, such as networking issues.
|
875
|
-
If your task contains operations that can't be retried safely, e.g. database updates,
|
876
|
-
it is advisable to annotate it with `@retry(times=0)`.
|
877
|
-
|
878
|
-
This can be used in conjunction with the `@catch` decorator. The `@catch`
|
879
|
-
decorator will execute a no-op task after all retries have been exhausted,
|
880
|
-
ensuring that the flow execution can continue.
|
1002
|
+
Specifies the times when the flow should be run when running on a
|
1003
|
+
production scheduler.
|
881
1004
|
|
882
1005
|
Parameters
|
883
1006
|
----------
|
884
|
-
|
885
|
-
|
886
|
-
|
887
|
-
|
1007
|
+
hourly : bool, default False
|
1008
|
+
Run the workflow hourly.
|
1009
|
+
daily : bool, default True
|
1010
|
+
Run the workflow daily.
|
1011
|
+
weekly : bool, default False
|
1012
|
+
Run the workflow weekly.
|
1013
|
+
cron : str, optional, default None
|
1014
|
+
Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
|
1015
|
+
specified by this expression.
|
1016
|
+
timezone : str, optional, default None
|
1017
|
+
Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
|
1018
|
+
which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
|
888
1019
|
"""
|
889
1020
|
...
|
890
1021
|
|
891
1022
|
@typing.overload
|
892
|
-
def
|
893
|
-
...
|
894
|
-
|
895
|
-
@typing.overload
|
896
|
-
def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
1023
|
+
def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
897
1024
|
...
|
898
1025
|
|
899
|
-
def
|
1026
|
+
def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
|
900
1027
|
"""
|
901
|
-
Specifies the
|
902
|
-
|
903
|
-
|
904
|
-
This decorator is useful for handling transient errors, such as networking issues.
|
905
|
-
If your task contains operations that can't be retried safely, e.g. database updates,
|
906
|
-
it is advisable to annotate it with `@retry(times=0)`.
|
907
|
-
|
908
|
-
This can be used in conjunction with the `@catch` decorator. The `@catch`
|
909
|
-
decorator will execute a no-op task after all retries have been exhausted,
|
910
|
-
ensuring that the flow execution can continue.
|
1028
|
+
Specifies the times when the flow should be run when running on a
|
1029
|
+
production scheduler.
|
911
1030
|
|
912
1031
|
Parameters
|
913
1032
|
----------
|
914
|
-
|
915
|
-
|
916
|
-
|
917
|
-
|
918
|
-
|
919
|
-
|
920
|
-
|
921
|
-
|
922
|
-
|
923
|
-
|
924
|
-
|
925
|
-
|
926
|
-
...
|
927
|
-
|
928
|
-
@typing.overload
|
929
|
-
def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
930
|
-
...
|
931
|
-
|
932
|
-
def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
|
933
|
-
"""
|
934
|
-
Internal decorator to support Fast bakery
|
1033
|
+
hourly : bool, default False
|
1034
|
+
Run the workflow hourly.
|
1035
|
+
daily : bool, default True
|
1036
|
+
Run the workflow daily.
|
1037
|
+
weekly : bool, default False
|
1038
|
+
Run the workflow weekly.
|
1039
|
+
cron : str, optional, default None
|
1040
|
+
Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
|
1041
|
+
specified by this expression.
|
1042
|
+
timezone : str, optional, default None
|
1043
|
+
Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
|
1044
|
+
which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
|
935
1045
|
"""
|
936
1046
|
...
|
937
1047
|
|
938
|
-
|
939
|
-
def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
1048
|
+
def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
940
1049
|
"""
|
941
|
-
|
942
|
-
|
943
|
-
|
944
|
-
|
945
|
-
|
946
|
-
steps and use `@conda` to specify step-specific overrides.
|
1050
|
+
The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
|
1051
|
+
before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
|
1052
|
+
and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
|
1053
|
+
added as a flow decorators. Adding more than one decorator will ensure that `start` step
|
1054
|
+
starts only after all sensors finish.
|
947
1055
|
|
948
1056
|
Parameters
|
949
1057
|
----------
|
950
|
-
|
951
|
-
|
952
|
-
|
953
|
-
|
954
|
-
|
955
|
-
|
956
|
-
|
957
|
-
|
958
|
-
|
959
|
-
|
1058
|
+
timeout : int
|
1059
|
+
Time, in seconds before the task times out and fails. (Default: 3600)
|
1060
|
+
poke_interval : int
|
1061
|
+
Time in seconds that the job should wait in between each try. (Default: 60)
|
1062
|
+
mode : str
|
1063
|
+
How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
|
1064
|
+
exponential_backoff : bool
|
1065
|
+
allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
|
1066
|
+
pool : str
|
1067
|
+
the slot pool this task should run in,
|
1068
|
+
slot pools are a way to limit concurrency for certain tasks. (Default:None)
|
1069
|
+
soft_fail : bool
|
1070
|
+
Set to true to mark the task as SKIPPED on failure. (Default: False)
|
1071
|
+
name : str
|
1072
|
+
Name of the sensor on Airflow
|
1073
|
+
description : str
|
1074
|
+
Description of sensor in the Airflow UI
|
1075
|
+
bucket_key : Union[str, List[str]]
|
1076
|
+
The key(s) being waited on. Supports full s3:// style url or relative path from root level.
|
1077
|
+
When it's specified as a full s3:// url, please leave `bucket_name` as None
|
1078
|
+
bucket_name : str
|
1079
|
+
Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
|
1080
|
+
When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
|
1081
|
+
wildcard_match : bool
|
1082
|
+
whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
|
1083
|
+
aws_conn_id : str
|
1084
|
+
a reference to the s3 connection on Airflow. (Default: None)
|
1085
|
+
verify : bool
|
1086
|
+
Whether or not to verify SSL certificates for S3 connection. (Default: None)
|
960
1087
|
"""
|
961
1088
|
...
|
962
1089
|
|
963
|
-
|
964
|
-
def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
965
|
-
...
|
966
|
-
|
967
|
-
@typing.overload
|
968
|
-
def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
969
|
-
...
|
970
|
-
|
971
|
-
def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
|
1090
|
+
def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
972
1091
|
"""
|
973
|
-
|
974
|
-
|
975
|
-
Information in this decorator will augment any
|
976
|
-
attributes set in the `@conda_base` flow-level decorator. Hence,
|
977
|
-
you can use `@conda_base` to set packages required by all
|
978
|
-
steps and use `@conda` to specify step-specific overrides.
|
1092
|
+
The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
|
1093
|
+
This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
|
979
1094
|
|
980
1095
|
Parameters
|
981
1096
|
----------
|
982
|
-
|
983
|
-
|
984
|
-
|
985
|
-
|
986
|
-
|
987
|
-
|
988
|
-
|
989
|
-
|
990
|
-
|
991
|
-
|
1097
|
+
timeout : int
|
1098
|
+
Time, in seconds before the task times out and fails. (Default: 3600)
|
1099
|
+
poke_interval : int
|
1100
|
+
Time in seconds that the job should wait in between each try. (Default: 60)
|
1101
|
+
mode : str
|
1102
|
+
How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
|
1103
|
+
exponential_backoff : bool
|
1104
|
+
allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
|
1105
|
+
pool : str
|
1106
|
+
the slot pool this task should run in,
|
1107
|
+
slot pools are a way to limit concurrency for certain tasks. (Default:None)
|
1108
|
+
soft_fail : bool
|
1109
|
+
Set to true to mark the task as SKIPPED on failure. (Default: False)
|
1110
|
+
name : str
|
1111
|
+
Name of the sensor on Airflow
|
1112
|
+
description : str
|
1113
|
+
Description of sensor in the Airflow UI
|
1114
|
+
external_dag_id : str
|
1115
|
+
The dag_id that contains the task you want to wait for.
|
1116
|
+
external_task_ids : List[str]
|
1117
|
+
The list of task_ids that you want to wait for.
|
1118
|
+
If None (default value) the sensor waits for the DAG. (Default: None)
|
1119
|
+
allowed_states : List[str]
|
1120
|
+
Iterable of allowed states, (Default: ['success'])
|
1121
|
+
failed_states : List[str]
|
1122
|
+
Iterable of failed or dis-allowed states. (Default: None)
|
1123
|
+
execution_delta : datetime.timedelta
|
1124
|
+
time difference with the previous execution to look at,
|
1125
|
+
the default is the same logical date as the current task or DAG. (Default: None)
|
1126
|
+
check_existence: bool
|
1127
|
+
Set to True to check if the external task exists or check if
|
1128
|
+
the DAG to wait for exists. (Default: True)
|
992
1129
|
"""
|
993
1130
|
...
|
994
1131
|
|
995
|
-
def
|
1132
|
+
def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
996
1133
|
"""
|
997
|
-
|
998
|
-
|
999
|
-
User code call
|
1000
|
-
-----------
|
1001
|
-
@nim(
|
1002
|
-
models=['meta/llama3-8b-instruct', 'meta/llama3-70b-instruct'],
|
1003
|
-
backend='managed'
|
1004
|
-
)
|
1005
|
-
|
1006
|
-
Valid backend options
|
1007
|
-
---------------------
|
1008
|
-
- 'managed': Outerbounds selects a compute provider based on the model.
|
1009
|
-
- 🚧 'dataplane': Run in your account.
|
1134
|
+
Specifies what flows belong to the same project.
|
1010
1135
|
|
1011
|
-
|
1012
|
-
|
1013
|
-
- 'meta/llama3-8b-instruct': 8B parameter model
|
1014
|
-
- 'meta/llama3-70b-instruct': 70B parameter model
|
1015
|
-
- Upon request, any model here: https://nvcf.ngc.nvidia.com/functions?filter=nvidia-functions
|
1136
|
+
A project-specific namespace is created for all flows that
|
1137
|
+
use the same `@project(name)`.
|
1016
1138
|
|
1017
1139
|
Parameters
|
1018
1140
|
----------
|
1019
|
-
|
1020
|
-
|
1021
|
-
|
1022
|
-
|
1141
|
+
name : str
|
1142
|
+
Project name. Make sure that the name is unique amongst all
|
1143
|
+
projects that use the same production scheduler. The name may
|
1144
|
+
contain only lowercase alphanumeric characters and underscores.
|
1145
|
+
|
1146
|
+
|
1023
1147
|
"""
|
1024
1148
|
...
|
1025
1149
|
|
@@ -1062,70 +1186,34 @@ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packag
|
|
1062
1186
|
"""
|
1063
1187
|
...
|
1064
1188
|
|
1065
|
-
def
|
1189
|
+
def nim(*, models: "list[NIM]", backend: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1066
1190
|
"""
|
1067
|
-
|
1068
|
-
|
1069
|
-
A project-specific namespace is created for all flows that
|
1070
|
-
use the same `@project(name)`.
|
1071
|
-
|
1072
|
-
Parameters
|
1073
|
-
----------
|
1074
|
-
name : str
|
1075
|
-
Project name. Make sure that the name is unique amongst all
|
1076
|
-
projects that use the same production scheduler. The name may
|
1077
|
-
contain only lowercase alphanumeric characters and underscores.
|
1191
|
+
This decorator is used to run NIM containers in Metaflow tasks as sidecars.
|
1078
1192
|
|
1193
|
+
User code call
|
1194
|
+
-----------
|
1195
|
+
@nim(
|
1196
|
+
models=['meta/llama3-8b-instruct', 'meta/llama3-70b-instruct'],
|
1197
|
+
backend='managed'
|
1198
|
+
)
|
1079
1199
|
|
1080
|
-
|
1081
|
-
|
1082
|
-
|
1083
|
-
|
1084
|
-
def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1085
|
-
"""
|
1086
|
-
Specifies the times when the flow should be run when running on a
|
1087
|
-
production scheduler.
|
1200
|
+
Valid backend options
|
1201
|
+
---------------------
|
1202
|
+
- 'managed': Outerbounds selects a compute provider based on the model.
|
1203
|
+
- 🚧 'dataplane': Run in your account.
|
1088
1204
|
|
1089
|
-
|
1090
|
-
|
1091
|
-
|
1092
|
-
|
1093
|
-
|
1094
|
-
Run the workflow daily.
|
1095
|
-
weekly : bool, default False
|
1096
|
-
Run the workflow weekly.
|
1097
|
-
cron : str, optional, default None
|
1098
|
-
Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
|
1099
|
-
specified by this expression.
|
1100
|
-
timezone : str, optional, default None
|
1101
|
-
Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
|
1102
|
-
which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
|
1103
|
-
"""
|
1104
|
-
...
|
1105
|
-
|
1106
|
-
@typing.overload
|
1107
|
-
def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
1108
|
-
...
|
1109
|
-
|
1110
|
-
def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
|
1111
|
-
"""
|
1112
|
-
Specifies the times when the flow should be run when running on a
|
1113
|
-
production scheduler.
|
1205
|
+
Valid model options
|
1206
|
+
----------------
|
1207
|
+
- 'meta/llama3-8b-instruct': 8B parameter model
|
1208
|
+
- 'meta/llama3-70b-instruct': 70B parameter model
|
1209
|
+
- Upon request, any model here: https://nvcf.ngc.nvidia.com/functions?filter=nvidia-functions
|
1114
1210
|
|
1115
1211
|
Parameters
|
1116
1212
|
----------
|
1117
|
-
|
1118
|
-
|
1119
|
-
|
1120
|
-
|
1121
|
-
weekly : bool, default False
|
1122
|
-
Run the workflow weekly.
|
1123
|
-
cron : str, optional, default None
|
1124
|
-
Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
|
1125
|
-
specified by this expression.
|
1126
|
-
timezone : str, optional, default None
|
1127
|
-
Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
|
1128
|
-
which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
|
1213
|
+
models: list[NIM]
|
1214
|
+
List of NIM containers running models in sidecars.
|
1215
|
+
backend: str
|
1216
|
+
Compute provider to run the NIM container.
|
1129
1217
|
"""
|
1130
1218
|
...
|
1131
1219
|
|
@@ -1178,87 +1266,49 @@ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packa
|
|
1178
1266
|
"""
|
1179
1267
|
...
|
1180
1268
|
|
1181
|
-
def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1182
|
-
"""
|
1183
|
-
The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
|
1184
|
-
before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
|
1185
|
-
and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
|
1186
|
-
added as a flow decorators. Adding more than one decorator will ensure that `start` step
|
1187
|
-
starts only after all sensors finish.
|
1188
|
-
|
1189
|
-
Parameters
|
1190
|
-
----------
|
1191
|
-
timeout : int
|
1192
|
-
Time, in seconds before the task times out and fails. (Default: 3600)
|
1193
|
-
poke_interval : int
|
1194
|
-
Time in seconds that the job should wait in between each try. (Default: 60)
|
1195
|
-
mode : str
|
1196
|
-
How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
|
1197
|
-
exponential_backoff : bool
|
1198
|
-
allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
|
1199
|
-
pool : str
|
1200
|
-
the slot pool this task should run in,
|
1201
|
-
slot pools are a way to limit concurrency for certain tasks. (Default:None)
|
1202
|
-
soft_fail : bool
|
1203
|
-
Set to true to mark the task as SKIPPED on failure. (Default: False)
|
1204
|
-
name : str
|
1205
|
-
Name of the sensor on Airflow
|
1206
|
-
description : str
|
1207
|
-
Description of sensor in the Airflow UI
|
1208
|
-
bucket_key : Union[str, List[str]]
|
1209
|
-
The key(s) being waited on. Supports full s3:// style url or relative path from root level.
|
1210
|
-
When it's specified as a full s3:// url, please leave `bucket_name` as None
|
1211
|
-
bucket_name : str
|
1212
|
-
Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
|
1213
|
-
When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
|
1214
|
-
wildcard_match : bool
|
1215
|
-
whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
|
1216
|
-
aws_conn_id : str
|
1217
|
-
a reference to the s3 connection on Airflow. (Default: None)
|
1218
|
-
verify : bool
|
1219
|
-
Whether or not to verify SSL certificates for S3 connection. (Default: None)
|
1220
|
-
"""
|
1221
|
-
...
|
1222
|
-
|
1223
1269
|
@typing.overload
|
1224
|
-
def
|
1270
|
+
def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1225
1271
|
"""
|
1226
|
-
Specifies the
|
1272
|
+
Specifies the flow(s) that this flow depends on.
|
1227
1273
|
|
1228
1274
|
```
|
1229
|
-
@
|
1275
|
+
@trigger_on_finish(flow='FooFlow')
|
1230
1276
|
```
|
1231
1277
|
or
|
1232
1278
|
```
|
1233
|
-
@
|
1279
|
+
@trigger_on_finish(flows=['FooFlow', 'BarFlow'])
|
1234
1280
|
```
|
1281
|
+
This decorator respects the @project decorator and triggers the flow
|
1282
|
+
when upstream runs within the same namespace complete successfully
|
1235
1283
|
|
1236
|
-
Additionally, you can specify
|
1237
|
-
|
1284
|
+
Additionally, you can specify project aware upstream flow dependencies
|
1285
|
+
by specifying the fully qualified project_flow_name.
|
1238
1286
|
```
|
1239
|
-
@
|
1287
|
+
@trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
|
1240
1288
|
```
|
1241
1289
|
or
|
1242
1290
|
```
|
1243
|
-
@
|
1244
|
-
{'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
|
1291
|
+
@trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
|
1245
1292
|
```
|
1246
1293
|
|
1247
|
-
|
1248
|
-
|
1249
|
-
@trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
|
1250
|
-
```
|
1251
|
-
This is equivalent to:
|
1294
|
+
You can also specify just the project or project branch (other values will be
|
1295
|
+
inferred from the current project or project branch):
|
1252
1296
|
```
|
1253
|
-
@
|
1297
|
+
@trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
|
1254
1298
|
```
|
1255
1299
|
|
1300
|
+
Note that `branch` is typically one of:
|
1301
|
+
- `prod`
|
1302
|
+
- `user.bob`
|
1303
|
+
- `test.my_experiment`
|
1304
|
+
- `prod.staging`
|
1305
|
+
|
1256
1306
|
Parameters
|
1257
1307
|
----------
|
1258
|
-
|
1259
|
-
|
1260
|
-
|
1261
|
-
|
1308
|
+
flow : Union[str, Dict[str, str]], optional, default None
|
1309
|
+
Upstream flow dependency for this flow.
|
1310
|
+
flows : List[Union[str, Dict[str, str]]], default []
|
1311
|
+
Upstream flow dependencies for this flow.
|
1262
1312
|
options : Dict[str, Any], default {}
|
1263
1313
|
Backend-specific configuration for tuning eventing behavior.
|
1264
1314
|
|
@@ -1267,47 +1317,51 @@ def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = No
|
|
1267
1317
|
...
|
1268
1318
|
|
1269
1319
|
@typing.overload
|
1270
|
-
def
|
1320
|
+
def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
1271
1321
|
...
|
1272
1322
|
|
1273
|
-
def
|
1323
|
+
def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
|
1274
1324
|
"""
|
1275
|
-
Specifies the
|
1325
|
+
Specifies the flow(s) that this flow depends on.
|
1276
1326
|
|
1277
1327
|
```
|
1278
|
-
@
|
1328
|
+
@trigger_on_finish(flow='FooFlow')
|
1279
1329
|
```
|
1280
1330
|
or
|
1281
1331
|
```
|
1282
|
-
@
|
1332
|
+
@trigger_on_finish(flows=['FooFlow', 'BarFlow'])
|
1283
1333
|
```
|
1334
|
+
This decorator respects the @project decorator and triggers the flow
|
1335
|
+
when upstream runs within the same namespace complete successfully
|
1284
1336
|
|
1285
|
-
Additionally, you can specify
|
1286
|
-
|
1337
|
+
Additionally, you can specify project aware upstream flow dependencies
|
1338
|
+
by specifying the fully qualified project_flow_name.
|
1287
1339
|
```
|
1288
|
-
@
|
1340
|
+
@trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
|
1289
1341
|
```
|
1290
1342
|
or
|
1291
1343
|
```
|
1292
|
-
@
|
1293
|
-
{'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
|
1344
|
+
@trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
|
1294
1345
|
```
|
1295
1346
|
|
1296
|
-
|
1297
|
-
|
1298
|
-
@trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
|
1299
|
-
```
|
1300
|
-
This is equivalent to:
|
1347
|
+
You can also specify just the project or project branch (other values will be
|
1348
|
+
inferred from the current project or project branch):
|
1301
1349
|
```
|
1302
|
-
@
|
1350
|
+
@trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
|
1303
1351
|
```
|
1304
1352
|
|
1353
|
+
Note that `branch` is typically one of:
|
1354
|
+
- `prod`
|
1355
|
+
- `user.bob`
|
1356
|
+
- `test.my_experiment`
|
1357
|
+
- `prod.staging`
|
1358
|
+
|
1305
1359
|
Parameters
|
1306
1360
|
----------
|
1307
|
-
|
1308
|
-
|
1309
|
-
|
1310
|
-
|
1361
|
+
flow : Union[str, Dict[str, str]], optional, default None
|
1362
|
+
Upstream flow dependency for this flow.
|
1363
|
+
flows : List[Union[str, Dict[str, str]]], default []
|
1364
|
+
Upstream flow dependencies for this flow.
|
1311
1365
|
options : Dict[str, Any], default {}
|
1312
1366
|
Backend-specific configuration for tuning eventing behavior.
|
1313
1367
|
|
@@ -1315,91 +1369,45 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
|
|
1315
1369
|
"""
|
1316
1370
|
...
|
1317
1371
|
|
1318
|
-
def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1319
|
-
"""
|
1320
|
-
The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
|
1321
|
-
This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
|
1322
|
-
|
1323
|
-
Parameters
|
1324
|
-
----------
|
1325
|
-
timeout : int
|
1326
|
-
Time, in seconds before the task times out and fails. (Default: 3600)
|
1327
|
-
poke_interval : int
|
1328
|
-
Time in seconds that the job should wait in between each try. (Default: 60)
|
1329
|
-
mode : str
|
1330
|
-
How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
|
1331
|
-
exponential_backoff : bool
|
1332
|
-
allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
|
1333
|
-
pool : str
|
1334
|
-
the slot pool this task should run in,
|
1335
|
-
slot pools are a way to limit concurrency for certain tasks. (Default:None)
|
1336
|
-
soft_fail : bool
|
1337
|
-
Set to true to mark the task as SKIPPED on failure. (Default: False)
|
1338
|
-
name : str
|
1339
|
-
Name of the sensor on Airflow
|
1340
|
-
description : str
|
1341
|
-
Description of sensor in the Airflow UI
|
1342
|
-
external_dag_id : str
|
1343
|
-
The dag_id that contains the task you want to wait for.
|
1344
|
-
external_task_ids : List[str]
|
1345
|
-
The list of task_ids that you want to wait for.
|
1346
|
-
If None (default value) the sensor waits for the DAG. (Default: None)
|
1347
|
-
allowed_states : List[str]
|
1348
|
-
Iterable of allowed states, (Default: ['success'])
|
1349
|
-
failed_states : List[str]
|
1350
|
-
Iterable of failed or dis-allowed states. (Default: None)
|
1351
|
-
execution_delta : datetime.timedelta
|
1352
|
-
time difference with the previous execution to look at,
|
1353
|
-
the default is the same logical date as the current task or DAG. (Default: None)
|
1354
|
-
check_existence: bool
|
1355
|
-
Set to True to check if the external task exists or check if
|
1356
|
-
the DAG to wait for exists. (Default: True)
|
1357
|
-
"""
|
1358
|
-
...
|
1359
|
-
|
1360
1372
|
@typing.overload
|
1361
|
-
def
|
1373
|
+
def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1362
1374
|
"""
|
1363
|
-
Specifies the
|
1375
|
+
Specifies the event(s) that this flow depends on.
|
1364
1376
|
|
1365
1377
|
```
|
1366
|
-
@
|
1378
|
+
@trigger(event='foo')
|
1367
1379
|
```
|
1368
1380
|
or
|
1369
1381
|
```
|
1370
|
-
@
|
1382
|
+
@trigger(events=['foo', 'bar'])
|
1371
1383
|
```
|
1372
|
-
This decorator respects the @project decorator and triggers the flow
|
1373
|
-
when upstream runs within the same namespace complete successfully
|
1374
1384
|
|
1375
|
-
Additionally, you can specify
|
1376
|
-
|
1385
|
+
Additionally, you can specify the parameter mappings
|
1386
|
+
to map event payload to Metaflow parameters for the flow.
|
1377
1387
|
```
|
1378
|
-
@
|
1388
|
+
@trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
|
1379
1389
|
```
|
1380
1390
|
or
|
1381
1391
|
```
|
1382
|
-
@
|
1392
|
+
@trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
|
1393
|
+
{'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
|
1383
1394
|
```
|
1384
1395
|
|
1385
|
-
|
1386
|
-
inferred from the current project or project branch):
|
1396
|
+
'parameters' can also be a list of strings and tuples like so:
|
1387
1397
|
```
|
1388
|
-
@
|
1398
|
+
@trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
|
1399
|
+
```
|
1400
|
+
This is equivalent to:
|
1401
|
+
```
|
1402
|
+
@trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
|
1389
1403
|
```
|
1390
|
-
|
1391
|
-
Note that `branch` is typically one of:
|
1392
|
-
- `prod`
|
1393
|
-
- `user.bob`
|
1394
|
-
- `test.my_experiment`
|
1395
|
-
- `prod.staging`
|
1396
1404
|
|
1397
1405
|
Parameters
|
1398
1406
|
----------
|
1399
|
-
|
1400
|
-
|
1401
|
-
|
1402
|
-
|
1407
|
+
event : Union[str, Dict[str, Any]], optional, default None
|
1408
|
+
Event dependency for this flow.
|
1409
|
+
events : List[Union[str, Dict[str, Any]]], default []
|
1410
|
+
Events dependency for this flow.
|
1403
1411
|
options : Dict[str, Any], default {}
|
1404
1412
|
Backend-specific configuration for tuning eventing behavior.
|
1405
1413
|
|
@@ -1408,51 +1416,47 @@ def trigger_on_finish(*, flow: typing.Union[str, typing.Dict[str, str], None] =
|
|
1408
1416
|
...
|
1409
1417
|
|
1410
1418
|
@typing.overload
|
1411
|
-
def
|
1419
|
+
def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
1412
1420
|
...
|
1413
1421
|
|
1414
|
-
def
|
1422
|
+
def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
|
1415
1423
|
"""
|
1416
|
-
Specifies the
|
1424
|
+
Specifies the event(s) that this flow depends on.
|
1417
1425
|
|
1418
1426
|
```
|
1419
|
-
@
|
1427
|
+
@trigger(event='foo')
|
1420
1428
|
```
|
1421
1429
|
or
|
1422
1430
|
```
|
1423
|
-
@
|
1431
|
+
@trigger(events=['foo', 'bar'])
|
1424
1432
|
```
|
1425
|
-
This decorator respects the @project decorator and triggers the flow
|
1426
|
-
when upstream runs within the same namespace complete successfully
|
1427
1433
|
|
1428
|
-
Additionally, you can specify
|
1429
|
-
|
1434
|
+
Additionally, you can specify the parameter mappings
|
1435
|
+
to map event payload to Metaflow parameters for the flow.
|
1430
1436
|
```
|
1431
|
-
@
|
1437
|
+
@trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
|
1432
1438
|
```
|
1433
1439
|
or
|
1434
1440
|
```
|
1435
|
-
@
|
1441
|
+
@trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
|
1442
|
+
{'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
|
1436
1443
|
```
|
1437
1444
|
|
1438
|
-
|
1439
|
-
inferred from the current project or project branch):
|
1445
|
+
'parameters' can also be a list of strings and tuples like so:
|
1440
1446
|
```
|
1441
|
-
@
|
1447
|
+
@trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
|
1448
|
+
```
|
1449
|
+
This is equivalent to:
|
1450
|
+
```
|
1451
|
+
@trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
|
1442
1452
|
```
|
1443
|
-
|
1444
|
-
Note that `branch` is typically one of:
|
1445
|
-
- `prod`
|
1446
|
-
- `user.bob`
|
1447
|
-
- `test.my_experiment`
|
1448
|
-
- `prod.staging`
|
1449
1453
|
|
1450
1454
|
Parameters
|
1451
1455
|
----------
|
1452
|
-
|
1453
|
-
|
1454
|
-
|
1455
|
-
|
1456
|
+
event : Union[str, Dict[str, Any]], optional, default None
|
1457
|
+
Event dependency for this flow.
|
1458
|
+
events : List[Union[str, Dict[str, Any]]], default []
|
1459
|
+
Events dependency for this flow.
|
1456
1460
|
options : Dict[str, Any], default {}
|
1457
1461
|
Backend-specific configuration for tuning eventing behavior.
|
1458
1462
|
|