metaflow-stubs 2.11.7__py2.py3-none-any.whl → 2.11.9__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- metaflow-stubs/__init__.pyi +423 -423
- metaflow-stubs/cards.pyi +5 -5
- metaflow-stubs/cli.pyi +3 -3
- metaflow-stubs/client/__init__.pyi +3 -3
- metaflow-stubs/client/core.pyi +7 -7
- metaflow-stubs/client/filecache.pyi +2 -2
- metaflow-stubs/clone_util.pyi +2 -2
- metaflow-stubs/events.pyi +2 -2
- metaflow-stubs/exception.pyi +2 -2
- metaflow-stubs/flowspec.pyi +5 -5
- metaflow-stubs/generated_for.txt +1 -1
- metaflow-stubs/includefile.pyi +4 -4
- metaflow-stubs/metadata/metadata.pyi +3 -3
- metaflow-stubs/metadata/util.pyi +2 -2
- metaflow-stubs/metaflow_config.pyi +2 -2
- metaflow-stubs/metaflow_current.pyi +5 -5
- metaflow-stubs/mflog/mflog.pyi +2 -2
- metaflow-stubs/multicore_utils.pyi +2 -2
- metaflow-stubs/parameters.pyi +3 -3
- metaflow-stubs/plugins/__init__.pyi +3 -3
- metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
- metaflow-stubs/plugins/airflow/airflow.pyi +2 -2
- metaflow-stubs/plugins/airflow/airflow_cli.pyi +4 -4
- metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
- metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
- metaflow-stubs/plugins/airflow/exception.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +3 -3
- metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +3 -3
- metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +3 -3
- metaflow-stubs/plugins/argo/__init__.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_workflows.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_workflows_cli.pyi +3 -3
- metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +3 -3
- metaflow-stubs/plugins/aws/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
- metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch_cli.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +3 -3
- metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +4 -4
- metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/production_token.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions_cli.pyi +3 -3
- metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
- metaflow-stubs/plugins/azure/__init__.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
- metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
- metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
- metaflow-stubs/plugins/cards/__init__.pyi +2 -2
- metaflow-stubs/plugins/cards/card_cli.pyi +5 -5
- metaflow-stubs/plugins/cards/card_client.pyi +3 -3
- metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
- metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
- metaflow-stubs/plugins/cards/card_decorator.pyi +3 -3
- metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/basic.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/chevron/__init__.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/chevron/main.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/chevron/metadata.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/chevron/renderer.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/chevron/tokenizer.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/components.pyi +3 -3
- metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
- metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
- metaflow-stubs/plugins/cards/component_serializer.pyi +4 -4
- metaflow-stubs/plugins/cards/exception.pyi +2 -2
- metaflow-stubs/plugins/catch_decorator.pyi +4 -4
- metaflow-stubs/plugins/datatools/__init__.pyi +4 -4
- metaflow-stubs/plugins/datatools/local.pyi +2 -2
- metaflow-stubs/plugins/datatools/s3/__init__.pyi +4 -4
- metaflow-stubs/plugins/datatools/s3/s3.pyi +5 -5
- metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
- metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
- metaflow-stubs/plugins/debug_logger.pyi +2 -2
- metaflow-stubs/plugins/debug_monitor.pyi +2 -2
- metaflow-stubs/plugins/environment_decorator.pyi +2 -2
- metaflow-stubs/plugins/events_decorator.pyi +3 -3
- metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
- metaflow-stubs/plugins/frameworks/pytorch.pyi +4 -4
- metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
- metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
- metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
- metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
- metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes.pyi +3 -3
- metaflow-stubs/plugins/kubernetes/kubernetes_cli.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +3 -3
- metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +2 -2
- metaflow-stubs/plugins/package_cli.pyi +2 -2
- metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
- metaflow-stubs/plugins/project_decorator.pyi +3 -3
- metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
- metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
- metaflow-stubs/plugins/pypi/conda_environment.pyi +4 -4
- metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
- metaflow-stubs/plugins/pypi/pypi_environment.pyi +3 -3
- metaflow-stubs/plugins/pypi/utils.pyi +2 -2
- metaflow-stubs/plugins/resources_decorator.pyi +2 -2
- metaflow-stubs/plugins/retry_decorator.pyi +2 -2
- metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
- metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +3 -3
- metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
- metaflow-stubs/plugins/storage_executor.pyi +2 -2
- metaflow-stubs/plugins/tag_cli.pyi +4 -4
- metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +3 -3
- metaflow-stubs/plugins/timeout_decorator.pyi +3 -3
- metaflow-stubs/procpoll.pyi +2 -2
- metaflow-stubs/pylint_wrapper.pyi +2 -2
- metaflow-stubs/tagging_util.pyi +2 -2
- metaflow-stubs/version.pyi +2 -2
- {metaflow_stubs-2.11.7.dist-info → metaflow_stubs-2.11.9.dist-info}/METADATA +2 -2
- metaflow_stubs-2.11.9.dist-info/RECORD +132 -0
- metaflow_stubs-2.11.7.dist-info/RECORD +0 -132
- {metaflow_stubs-2.11.7.dist-info → metaflow_stubs-2.11.9.dist-info}/WHEEL +0 -0
- {metaflow_stubs-2.11.7.dist-info → metaflow_stubs-2.11.9.dist-info}/top_level.txt +0 -0
metaflow-stubs/__init__.pyi
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
##################################################################################
|
2
2
|
# Auto-generated Metaflow stub file #
|
3
|
-
# MF version: 2.11.
|
4
|
-
# Generated on 2024-03-
|
3
|
+
# MF version: 2.11.9 #
|
4
|
+
# Generated on 2024-03-29T22:28:00.911417 #
|
5
5
|
##################################################################################
|
6
6
|
|
7
7
|
from __future__ import annotations
|
@@ -9,14 +9,14 @@ from __future__ import annotations
|
|
9
9
|
import typing
|
10
10
|
if typing.TYPE_CHECKING:
|
11
11
|
import metaflow.datastore.inputs
|
12
|
+
import metaflow.plugins.datatools.s3.s3
|
12
13
|
import metaflow.metaflow_current
|
13
|
-
import metaflow.parameters
|
14
14
|
import typing
|
15
|
-
import metaflow.client.core
|
16
15
|
import io
|
17
16
|
import metaflow.events
|
17
|
+
import metaflow.parameters
|
18
18
|
import datetime
|
19
|
-
import metaflow.
|
19
|
+
import metaflow.client.core
|
20
20
|
import metaflow._vendor.click.types
|
21
21
|
FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
|
22
22
|
StepFlag = typing.NewType("StepFlag", bool)
|
@@ -726,79 +726,53 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
|
|
726
726
|
...
|
727
727
|
|
728
728
|
@typing.overload
|
729
|
-
def
|
729
|
+
def card(*, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
730
730
|
"""
|
731
|
-
|
732
|
-
|
733
|
-
Use `@resources` to specify the resource requirements
|
734
|
-
independently of the specific compute layer (`@batch`, `@kubernetes`).
|
731
|
+
Creates a human-readable report, a Metaflow Card, after this step completes.
|
735
732
|
|
736
|
-
|
737
|
-
```
|
738
|
-
python myflow.py run --with batch
|
739
|
-
```
|
740
|
-
or
|
741
|
-
```
|
742
|
-
python myflow.py run --with kubernetes
|
743
|
-
```
|
744
|
-
which executes the flow on the desired system using the
|
745
|
-
requirements specified in `@resources`.
|
733
|
+
Note that you may add multiple `@card` decorators in a step with different parameters.
|
746
734
|
|
747
735
|
Parameters
|
748
736
|
----------
|
749
|
-
|
750
|
-
|
751
|
-
|
752
|
-
|
753
|
-
|
754
|
-
|
755
|
-
|
756
|
-
|
757
|
-
|
758
|
-
|
759
|
-
This parameter maps to the `--shm-size` option in Docker.
|
737
|
+
type : str, default 'default'
|
738
|
+
Card type.
|
739
|
+
id : str, optional, default None
|
740
|
+
If multiple cards are present, use this id to identify this card.
|
741
|
+
options : Dict[str, Any], default {}
|
742
|
+
Options passed to the card. The contents depend on the card type.
|
743
|
+
timeout : int, default 45
|
744
|
+
Interrupt reporting if it takes more than this many seconds.
|
745
|
+
|
746
|
+
|
760
747
|
"""
|
761
748
|
...
|
762
749
|
|
763
750
|
@typing.overload
|
764
|
-
def
|
751
|
+
def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
765
752
|
...
|
766
753
|
|
767
754
|
@typing.overload
|
768
|
-
def
|
755
|
+
def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
769
756
|
...
|
770
757
|
|
771
|
-
def
|
758
|
+
def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
|
772
759
|
"""
|
773
|
-
|
774
|
-
|
775
|
-
Use `@resources` to specify the resource requirements
|
776
|
-
independently of the specific compute layer (`@batch`, `@kubernetes`).
|
760
|
+
Creates a human-readable report, a Metaflow Card, after this step completes.
|
777
761
|
|
778
|
-
|
779
|
-
```
|
780
|
-
python myflow.py run --with batch
|
781
|
-
```
|
782
|
-
or
|
783
|
-
```
|
784
|
-
python myflow.py run --with kubernetes
|
785
|
-
```
|
786
|
-
which executes the flow on the desired system using the
|
787
|
-
requirements specified in `@resources`.
|
762
|
+
Note that you may add multiple `@card` decorators in a step with different parameters.
|
788
763
|
|
789
764
|
Parameters
|
790
765
|
----------
|
791
|
-
|
792
|
-
|
793
|
-
|
794
|
-
|
795
|
-
|
796
|
-
|
797
|
-
|
798
|
-
|
799
|
-
|
800
|
-
|
801
|
-
This parameter maps to the `--shm-size` option in Docker.
|
766
|
+
type : str, default 'default'
|
767
|
+
Card type.
|
768
|
+
id : str, optional, default None
|
769
|
+
If multiple cards are present, use this id to identify this card.
|
770
|
+
options : Dict[str, Any], default {}
|
771
|
+
Options passed to the card. The contents depend on the card type.
|
772
|
+
timeout : int, default 45
|
773
|
+
Interrupt reporting if it takes more than this many seconds.
|
774
|
+
|
775
|
+
|
802
776
|
"""
|
803
777
|
...
|
804
778
|
|
@@ -834,53 +808,112 @@ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], Non
|
|
834
808
|
...
|
835
809
|
|
836
810
|
@typing.overload
|
837
|
-
def
|
811
|
+
def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
838
812
|
"""
|
839
|
-
|
813
|
+
Specifies secrets to be retrieved and injected as environment variables prior to
|
814
|
+
the execution of a step.
|
840
815
|
|
841
|
-
|
816
|
+
Parameters
|
817
|
+
----------
|
818
|
+
sources : List[Union[str, Dict[str, Any]]], default: []
|
819
|
+
List of secret specs, defining how the secrets are to be retrieved
|
820
|
+
"""
|
821
|
+
...
|
822
|
+
|
823
|
+
@typing.overload
|
824
|
+
def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
825
|
+
...
|
826
|
+
|
827
|
+
@typing.overload
|
828
|
+
def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
829
|
+
...
|
830
|
+
|
831
|
+
def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
|
832
|
+
"""
|
833
|
+
Specifies secrets to be retrieved and injected as environment variables prior to
|
834
|
+
the execution of a step.
|
842
835
|
|
843
836
|
Parameters
|
844
837
|
----------
|
845
|
-
|
846
|
-
|
847
|
-
|
848
|
-
|
849
|
-
|
850
|
-
|
851
|
-
|
852
|
-
|
838
|
+
sources : List[Union[str, Dict[str, Any]]], default: []
|
839
|
+
List of secret specs, defining how the secrets are to be retrieved
|
840
|
+
"""
|
841
|
+
...
|
842
|
+
|
843
|
+
@typing.overload
|
844
|
+
def resources(*, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
845
|
+
"""
|
846
|
+
Specifies the resources needed when executing this step.
|
847
|
+
|
848
|
+
Use `@resources` to specify the resource requirements
|
849
|
+
independently of the specific compute layer (`@batch`, `@kubernetes`).
|
853
850
|
|
851
|
+
You can choose the compute layer on the command line by executing e.g.
|
852
|
+
```
|
853
|
+
python myflow.py run --with batch
|
854
|
+
```
|
855
|
+
or
|
856
|
+
```
|
857
|
+
python myflow.py run --with kubernetes
|
858
|
+
```
|
859
|
+
which executes the flow on the desired system using the
|
860
|
+
requirements specified in `@resources`.
|
854
861
|
|
862
|
+
Parameters
|
863
|
+
----------
|
864
|
+
cpu : int, default 1
|
865
|
+
Number of CPUs required for this step.
|
866
|
+
gpu : int, default 0
|
867
|
+
Number of GPUs required for this step.
|
868
|
+
disk : int, optional, default None
|
869
|
+
Disk size (in MB) required for this step. Only applies on Kubernetes.
|
870
|
+
memory : int, default 4096
|
871
|
+
Memory size (in MB) required for this step.
|
872
|
+
shared_memory : int, optional, default None
|
873
|
+
The value for the size (in MiB) of the /dev/shm volume for this step.
|
874
|
+
This parameter maps to the `--shm-size` option in Docker.
|
855
875
|
"""
|
856
876
|
...
|
857
877
|
|
858
878
|
@typing.overload
|
859
|
-
def
|
879
|
+
def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
860
880
|
...
|
861
881
|
|
862
882
|
@typing.overload
|
863
|
-
def
|
883
|
+
def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
864
884
|
...
|
865
885
|
|
866
|
-
def
|
886
|
+
def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
|
867
887
|
"""
|
868
|
-
|
888
|
+
Specifies the resources needed when executing this step.
|
869
889
|
|
870
|
-
|
890
|
+
Use `@resources` to specify the resource requirements
|
891
|
+
independently of the specific compute layer (`@batch`, `@kubernetes`).
|
892
|
+
|
893
|
+
You can choose the compute layer on the command line by executing e.g.
|
894
|
+
```
|
895
|
+
python myflow.py run --with batch
|
896
|
+
```
|
897
|
+
or
|
898
|
+
```
|
899
|
+
python myflow.py run --with kubernetes
|
900
|
+
```
|
901
|
+
which executes the flow on the desired system using the
|
902
|
+
requirements specified in `@resources`.
|
871
903
|
|
872
904
|
Parameters
|
873
905
|
----------
|
874
|
-
|
875
|
-
|
876
|
-
|
877
|
-
|
878
|
-
|
879
|
-
|
880
|
-
|
881
|
-
|
882
|
-
|
883
|
-
|
906
|
+
cpu : int, default 1
|
907
|
+
Number of CPUs required for this step.
|
908
|
+
gpu : int, default 0
|
909
|
+
Number of GPUs required for this step.
|
910
|
+
disk : int, optional, default None
|
911
|
+
Disk size (in MB) required for this step. Only applies on Kubernetes.
|
912
|
+
memory : int, default 4096
|
913
|
+
Memory size (in MB) required for this step.
|
914
|
+
shared_memory : int, optional, default None
|
915
|
+
The value for the size (in MiB) of the /dev/shm volume for this step.
|
916
|
+
This parameter maps to the `--shm-size` option in Docker.
|
884
917
|
"""
|
885
918
|
...
|
886
919
|
|
@@ -1032,108 +1065,153 @@ def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
|
|
1032
1065
|
...
|
1033
1066
|
|
1034
1067
|
@typing.overload
|
1035
|
-
def
|
1068
|
+
def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
1036
1069
|
"""
|
1037
|
-
Specifies
|
1070
|
+
Specifies the PyPI packages for the step.
|
1038
1071
|
|
1039
|
-
|
1040
|
-
|
1041
|
-
|
1042
|
-
|
1072
|
+
Information in this decorator will augment any
|
1073
|
+
attributes set in the `@pyi_base` flow-level decorator. Hence,
|
1074
|
+
you can use `@pypi_base` to set packages required by all
|
1075
|
+
steps and use `@pypi` to specify step-specific overrides.
|
1043
1076
|
|
1044
1077
|
Parameters
|
1045
1078
|
----------
|
1046
|
-
|
1047
|
-
|
1048
|
-
|
1049
|
-
|
1050
|
-
|
1051
|
-
|
1079
|
+
packages : Dict[str, str], default: {}
|
1080
|
+
Packages to use for this step. The key is the name of the package
|
1081
|
+
and the value is the version to use.
|
1082
|
+
python : str, optional, default: None
|
1083
|
+
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
1084
|
+
that the version used will correspond to the version of the Python interpreter used to start the run.
|
1052
1085
|
"""
|
1053
1086
|
...
|
1054
1087
|
|
1055
1088
|
@typing.overload
|
1056
|
-
def
|
1089
|
+
def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
1057
1090
|
...
|
1058
1091
|
|
1059
1092
|
@typing.overload
|
1060
|
-
def
|
1093
|
+
def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
1061
1094
|
...
|
1062
1095
|
|
1063
|
-
def
|
1096
|
+
def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
|
1064
1097
|
"""
|
1065
|
-
Specifies
|
1098
|
+
Specifies the PyPI packages for the step.
|
1066
1099
|
|
1067
|
-
|
1068
|
-
|
1069
|
-
|
1070
|
-
|
1100
|
+
Information in this decorator will augment any
|
1101
|
+
attributes set in the `@pyi_base` flow-level decorator. Hence,
|
1102
|
+
you can use `@pypi_base` to set packages required by all
|
1103
|
+
steps and use `@pypi` to specify step-specific overrides.
|
1071
1104
|
|
1072
1105
|
Parameters
|
1073
1106
|
----------
|
1074
|
-
|
1075
|
-
|
1076
|
-
|
1077
|
-
|
1078
|
-
|
1079
|
-
|
1107
|
+
packages : Dict[str, str], default: {}
|
1108
|
+
Packages to use for this step. The key is the name of the package
|
1109
|
+
and the value is the version to use.
|
1110
|
+
python : str, optional, default: None
|
1111
|
+
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
1112
|
+
that the version used will correspond to the version of the Python interpreter used to start the run.
|
1080
1113
|
"""
|
1081
1114
|
...
|
1082
1115
|
|
1083
1116
|
@typing.overload
|
1084
|
-
def
|
1117
|
+
def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
1085
1118
|
"""
|
1086
|
-
Specifies the
|
1119
|
+
Specifies the number of times the task corresponding
|
1120
|
+
to a step needs to be retried.
|
1087
1121
|
|
1088
|
-
|
1089
|
-
|
1090
|
-
|
1091
|
-
|
1122
|
+
This decorator is useful for handling transient errors, such as networking issues.
|
1123
|
+
If your task contains operations that can't be retried safely, e.g. database updates,
|
1124
|
+
it is advisable to annotate it with `@retry(times=0)`.
|
1125
|
+
|
1126
|
+
This can be used in conjunction with the `@catch` decorator. The `@catch`
|
1127
|
+
decorator will execute a no-op task after all retries have been exhausted,
|
1128
|
+
ensuring that the flow execution can continue.
|
1092
1129
|
|
1093
1130
|
Parameters
|
1094
1131
|
----------
|
1095
|
-
|
1096
|
-
|
1097
|
-
|
1098
|
-
|
1099
|
-
Supported for backward compatibility. When used with packages, packages will take precedence.
|
1100
|
-
python : str, optional, default None
|
1101
|
-
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
1102
|
-
that the version used will correspond to the version of the Python interpreter used to start the run.
|
1103
|
-
disabled : bool, default False
|
1104
|
-
If set to True, disables @conda.
|
1132
|
+
times : int, default 3
|
1133
|
+
Number of times to retry this task.
|
1134
|
+
minutes_between_retries : int, default 2
|
1135
|
+
Number of minutes between retries.
|
1105
1136
|
"""
|
1106
1137
|
...
|
1107
1138
|
|
1108
1139
|
@typing.overload
|
1109
|
-
def
|
1140
|
+
def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
1110
1141
|
...
|
1111
1142
|
|
1112
1143
|
@typing.overload
|
1113
|
-
def
|
1144
|
+
def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
1114
1145
|
...
|
1115
1146
|
|
1116
|
-
def
|
1147
|
+
def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
|
1117
1148
|
"""
|
1118
|
-
Specifies the
|
1149
|
+
Specifies the number of times the task corresponding
|
1150
|
+
to a step needs to be retried.
|
1119
1151
|
|
1120
|
-
|
1121
|
-
|
1122
|
-
|
1123
|
-
|
1152
|
+
This decorator is useful for handling transient errors, such as networking issues.
|
1153
|
+
If your task contains operations that can't be retried safely, e.g. database updates,
|
1154
|
+
it is advisable to annotate it with `@retry(times=0)`.
|
1155
|
+
|
1156
|
+
This can be used in conjunction with the `@catch` decorator. The `@catch`
|
1157
|
+
decorator will execute a no-op task after all retries have been exhausted,
|
1158
|
+
ensuring that the flow execution can continue.
|
1124
1159
|
|
1125
1160
|
Parameters
|
1126
1161
|
----------
|
1127
|
-
|
1128
|
-
|
1129
|
-
|
1130
|
-
|
1131
|
-
|
1132
|
-
|
1133
|
-
|
1134
|
-
|
1135
|
-
|
1136
|
-
|
1162
|
+
times : int, default 3
|
1163
|
+
Number of times to retry this task.
|
1164
|
+
minutes_between_retries : int, default 2
|
1165
|
+
Number of minutes between retries.
|
1166
|
+
"""
|
1167
|
+
...
|
1168
|
+
|
1169
|
+
@typing.overload
|
1170
|
+
def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
1171
|
+
"""
|
1172
|
+
Specifies that the step will success under all circumstances.
|
1173
|
+
|
1174
|
+
The decorator will create an optional artifact, specified by `var`, which
|
1175
|
+
contains the exception raised. You can use it to detect the presence
|
1176
|
+
of errors, indicating that all happy-path artifacts produced by the step
|
1177
|
+
are missing.
|
1178
|
+
|
1179
|
+
Parameters
|
1180
|
+
----------
|
1181
|
+
var : str, optional, default None
|
1182
|
+
Name of the artifact in which to store the caught exception.
|
1183
|
+
If not specified, the exception is not stored.
|
1184
|
+
print_exception : bool, default True
|
1185
|
+
Determines whether or not the exception is printed to
|
1186
|
+
stdout when caught.
|
1187
|
+
"""
|
1188
|
+
...
|
1189
|
+
|
1190
|
+
@typing.overload
|
1191
|
+
def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
1192
|
+
...
|
1193
|
+
|
1194
|
+
@typing.overload
|
1195
|
+
def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
1196
|
+
...
|
1197
|
+
|
1198
|
+
def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
|
1199
|
+
"""
|
1200
|
+
Specifies that the step will success under all circumstances.
|
1201
|
+
|
1202
|
+
The decorator will create an optional artifact, specified by `var`, which
|
1203
|
+
contains the exception raised. You can use it to detect the presence
|
1204
|
+
of errors, indicating that all happy-path artifacts produced by the step
|
1205
|
+
are missing.
|
1206
|
+
|
1207
|
+
Parameters
|
1208
|
+
----------
|
1209
|
+
var : str, optional, default None
|
1210
|
+
Name of the artifact in which to store the caught exception.
|
1211
|
+
If not specified, the exception is not stored.
|
1212
|
+
print_exception : bool, default True
|
1213
|
+
Determines whether or not the exception is printed to
|
1214
|
+
stdout when caught.
|
1137
1215
|
"""
|
1138
1216
|
...
|
1139
1217
|
|
@@ -1195,137 +1273,59 @@ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: ty
|
|
1195
1273
|
...
|
1196
1274
|
|
1197
1275
|
@typing.overload
|
1198
|
-
def
|
1199
|
-
"""
|
1200
|
-
Specifies secrets to be retrieved and injected as environment variables prior to
|
1201
|
-
the execution of a step.
|
1202
|
-
|
1203
|
-
Parameters
|
1204
|
-
----------
|
1205
|
-
sources : List[Union[str, Dict[str, Any]]], default: []
|
1206
|
-
List of secret specs, defining how the secrets are to be retrieved
|
1207
|
-
"""
|
1208
|
-
...
|
1209
|
-
|
1210
|
-
@typing.overload
|
1211
|
-
def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
1212
|
-
...
|
1213
|
-
|
1214
|
-
@typing.overload
|
1215
|
-
def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
1216
|
-
...
|
1217
|
-
|
1218
|
-
def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
|
1219
|
-
"""
|
1220
|
-
Specifies secrets to be retrieved and injected as environment variables prior to
|
1221
|
-
the execution of a step.
|
1222
|
-
|
1223
|
-
Parameters
|
1224
|
-
----------
|
1225
|
-
sources : List[Union[str, Dict[str, Any]]], default: []
|
1226
|
-
List of secret specs, defining how the secrets are to be retrieved
|
1227
|
-
"""
|
1228
|
-
...
|
1229
|
-
|
1230
|
-
@typing.overload
|
1231
|
-
def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
1276
|
+
def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
1232
1277
|
"""
|
1233
|
-
Specifies the
|
1278
|
+
Specifies the Conda environment for the step.
|
1234
1279
|
|
1235
1280
|
Information in this decorator will augment any
|
1236
|
-
attributes set in the `@
|
1237
|
-
you can use `@
|
1238
|
-
steps and use `@
|
1281
|
+
attributes set in the `@conda_base` flow-level decorator. Hence,
|
1282
|
+
you can use `@conda_base` to set packages required by all
|
1283
|
+
steps and use `@conda` to specify step-specific overrides.
|
1239
1284
|
|
1240
1285
|
Parameters
|
1241
1286
|
----------
|
1242
|
-
packages : Dict[str, str], default
|
1287
|
+
packages : Dict[str, str], default {}
|
1243
1288
|
Packages to use for this step. The key is the name of the package
|
1244
1289
|
and the value is the version to use.
|
1245
|
-
|
1290
|
+
libraries : Dict[str, str], default {}
|
1291
|
+
Supported for backward compatibility. When used with packages, packages will take precedence.
|
1292
|
+
python : str, optional, default None
|
1246
1293
|
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
1247
1294
|
that the version used will correspond to the version of the Python interpreter used to start the run.
|
1295
|
+
disabled : bool, default False
|
1296
|
+
If set to True, disables @conda.
|
1248
1297
|
"""
|
1249
1298
|
...
|
1250
1299
|
|
1251
1300
|
@typing.overload
|
1252
|
-
def
|
1301
|
+
def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
1253
1302
|
...
|
1254
1303
|
|
1255
1304
|
@typing.overload
|
1256
|
-
def
|
1305
|
+
def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
1257
1306
|
...
|
1258
1307
|
|
1259
|
-
def
|
1308
|
+
def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
|
1260
1309
|
"""
|
1261
|
-
Specifies the
|
1310
|
+
Specifies the Conda environment for the step.
|
1262
1311
|
|
1263
1312
|
Information in this decorator will augment any
|
1264
|
-
attributes set in the `@
|
1265
|
-
you can use `@
|
1266
|
-
steps and use `@
|
1313
|
+
attributes set in the `@conda_base` flow-level decorator. Hence,
|
1314
|
+
you can use `@conda_base` to set packages required by all
|
1315
|
+
steps and use `@conda` to specify step-specific overrides.
|
1267
1316
|
|
1268
1317
|
Parameters
|
1269
1318
|
----------
|
1270
|
-
packages : Dict[str, str], default
|
1319
|
+
packages : Dict[str, str], default {}
|
1271
1320
|
Packages to use for this step. The key is the name of the package
|
1272
1321
|
and the value is the version to use.
|
1273
|
-
|
1322
|
+
libraries : Dict[str, str], default {}
|
1323
|
+
Supported for backward compatibility. When used with packages, packages will take precedence.
|
1324
|
+
python : str, optional, default None
|
1274
1325
|
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
1275
1326
|
that the version used will correspond to the version of the Python interpreter used to start the run.
|
1276
|
-
|
1277
|
-
|
1278
|
-
|
1279
|
-
@typing.overload
|
1280
|
-
def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
1281
|
-
"""
|
1282
|
-
Specifies the number of times the task corresponding
|
1283
|
-
to a step needs to be retried.
|
1284
|
-
|
1285
|
-
This decorator is useful for handling transient errors, such as networking issues.
|
1286
|
-
If your task contains operations that can't be retried safely, e.g. database updates,
|
1287
|
-
it is advisable to annotate it with `@retry(times=0)`.
|
1288
|
-
|
1289
|
-
This can be used in conjunction with the `@catch` decorator. The `@catch`
|
1290
|
-
decorator will execute a no-op task after all retries have been exhausted,
|
1291
|
-
ensuring that the flow execution can continue.
|
1292
|
-
|
1293
|
-
Parameters
|
1294
|
-
----------
|
1295
|
-
times : int, default 3
|
1296
|
-
Number of times to retry this task.
|
1297
|
-
minutes_between_retries : int, default 2
|
1298
|
-
Number of minutes between retries.
|
1299
|
-
"""
|
1300
|
-
...
|
1301
|
-
|
1302
|
-
@typing.overload
|
1303
|
-
def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
1304
|
-
...
|
1305
|
-
|
1306
|
-
@typing.overload
|
1307
|
-
def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
1308
|
-
...
|
1309
|
-
|
1310
|
-
def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
|
1311
|
-
"""
|
1312
|
-
Specifies the number of times the task corresponding
|
1313
|
-
to a step needs to be retried.
|
1314
|
-
|
1315
|
-
This decorator is useful for handling transient errors, such as networking issues.
|
1316
|
-
If your task contains operations that can't be retried safely, e.g. database updates,
|
1317
|
-
it is advisable to annotate it with `@retry(times=0)`.
|
1318
|
-
|
1319
|
-
This can be used in conjunction with the `@catch` decorator. The `@catch`
|
1320
|
-
decorator will execute a no-op task after all retries have been exhausted,
|
1321
|
-
ensuring that the flow execution can continue.
|
1322
|
-
|
1323
|
-
Parameters
|
1324
|
-
----------
|
1325
|
-
times : int, default 3
|
1326
|
-
Number of times to retry this task.
|
1327
|
-
minutes_between_retries : int, default 2
|
1328
|
-
Number of minutes between retries.
|
1327
|
+
disabled : bool, default False
|
1328
|
+
If set to True, disables @conda.
|
1329
1329
|
"""
|
1330
1330
|
...
|
1331
1331
|
|
@@ -1386,52 +1386,116 @@ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
|
|
1386
1386
|
"""
|
1387
1387
|
...
|
1388
1388
|
|
1389
|
+
def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1390
|
+
"""
|
1391
|
+
Specifies what flows belong to the same project.
|
1392
|
+
|
1393
|
+
A project-specific namespace is created for all flows that
|
1394
|
+
use the same `@project(name)`.
|
1395
|
+
|
1396
|
+
Parameters
|
1397
|
+
----------
|
1398
|
+
name : str
|
1399
|
+
Project name. Make sure that the name is unique amongst all
|
1400
|
+
projects that use the same production scheduler. The name may
|
1401
|
+
contain only lowercase alphanumeric characters and underscores.
|
1402
|
+
|
1403
|
+
|
1404
|
+
"""
|
1405
|
+
...
|
1406
|
+
|
1389
1407
|
@typing.overload
|
1390
|
-
def
|
1408
|
+
def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1391
1409
|
"""
|
1392
|
-
Specifies the
|
1393
|
-
|
1410
|
+
Specifies the event(s) that this flow depends on.
|
1411
|
+
|
1412
|
+
```
|
1413
|
+
@trigger(event='foo')
|
1414
|
+
```
|
1415
|
+
or
|
1416
|
+
```
|
1417
|
+
@trigger(events=['foo', 'bar'])
|
1418
|
+
```
|
1419
|
+
|
1420
|
+
Additionally, you can specify the parameter mappings
|
1421
|
+
to map event payload to Metaflow parameters for the flow.
|
1422
|
+
```
|
1423
|
+
@trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
|
1424
|
+
```
|
1425
|
+
or
|
1426
|
+
```
|
1427
|
+
@trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
|
1428
|
+
{'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
|
1429
|
+
```
|
1430
|
+
|
1431
|
+
'parameters' can also be a list of strings and tuples like so:
|
1432
|
+
```
|
1433
|
+
@trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
|
1434
|
+
```
|
1435
|
+
This is equivalent to:
|
1436
|
+
```
|
1437
|
+
@trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
|
1438
|
+
```
|
1439
|
+
|
1440
|
+
Parameters
|
1441
|
+
----------
|
1442
|
+
event : Union[str, Dict[str, Any]], optional, default None
|
1443
|
+
Event dependency for this flow.
|
1444
|
+
events : List[Union[str, Dict[str, Any]]], default []
|
1445
|
+
Events dependency for this flow.
|
1446
|
+
options : Dict[str, Any], default {}
|
1447
|
+
Backend-specific configuration for tuning eventing behavior.
|
1448
|
+
|
1449
|
+
|
1450
|
+
"""
|
1451
|
+
...
|
1452
|
+
|
1453
|
+
@typing.overload
|
1454
|
+
def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
1455
|
+
...
|
1456
|
+
|
1457
|
+
def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
|
1458
|
+
"""
|
1459
|
+
Specifies the event(s) that this flow depends on.
|
1460
|
+
|
1461
|
+
```
|
1462
|
+
@trigger(event='foo')
|
1463
|
+
```
|
1464
|
+
or
|
1465
|
+
```
|
1466
|
+
@trigger(events=['foo', 'bar'])
|
1467
|
+
```
|
1468
|
+
|
1469
|
+
Additionally, you can specify the parameter mappings
|
1470
|
+
to map event payload to Metaflow parameters for the flow.
|
1471
|
+
```
|
1472
|
+
@trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
|
1473
|
+
```
|
1474
|
+
or
|
1475
|
+
```
|
1476
|
+
@trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
|
1477
|
+
{'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
|
1478
|
+
```
|
1479
|
+
|
1480
|
+
'parameters' can also be a list of strings and tuples like so:
|
1481
|
+
```
|
1482
|
+
@trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
|
1483
|
+
```
|
1484
|
+
This is equivalent to:
|
1485
|
+
```
|
1486
|
+
@trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
|
1487
|
+
```
|
1394
1488
|
|
1395
1489
|
Parameters
|
1396
1490
|
----------
|
1397
|
-
|
1398
|
-
|
1399
|
-
|
1400
|
-
|
1401
|
-
|
1402
|
-
|
1403
|
-
|
1404
|
-
Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
|
1405
|
-
specified by this expression.
|
1406
|
-
timezone : str, optional, default None
|
1407
|
-
Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
|
1408
|
-
which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
|
1409
|
-
"""
|
1410
|
-
...
|
1411
|
-
|
1412
|
-
@typing.overload
|
1413
|
-
def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
1414
|
-
...
|
1415
|
-
|
1416
|
-
def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
|
1417
|
-
"""
|
1418
|
-
Specifies the times when the flow should be run when running on a
|
1419
|
-
production scheduler.
|
1491
|
+
event : Union[str, Dict[str, Any]], optional, default None
|
1492
|
+
Event dependency for this flow.
|
1493
|
+
events : List[Union[str, Dict[str, Any]]], default []
|
1494
|
+
Events dependency for this flow.
|
1495
|
+
options : Dict[str, Any], default {}
|
1496
|
+
Backend-specific configuration for tuning eventing behavior.
|
1497
|
+
|
1420
1498
|
|
1421
|
-
Parameters
|
1422
|
-
----------
|
1423
|
-
hourly : bool, default False
|
1424
|
-
Run the workflow hourly.
|
1425
|
-
daily : bool, default True
|
1426
|
-
Run the workflow daily.
|
1427
|
-
weekly : bool, default False
|
1428
|
-
Run the workflow weekly.
|
1429
|
-
cron : str, optional, default None
|
1430
|
-
Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
|
1431
|
-
specified by this expression.
|
1432
|
-
timezone : str, optional, default None
|
1433
|
-
Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
|
1434
|
-
which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
|
1435
1499
|
"""
|
1436
1500
|
...
|
1437
1501
|
|
@@ -1474,24 +1538,6 @@ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packag
|
|
1474
1538
|
"""
|
1475
1539
|
...
|
1476
1540
|
|
1477
|
-
def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1478
|
-
"""
|
1479
|
-
Specifies what flows belong to the same project.
|
1480
|
-
|
1481
|
-
A project-specific namespace is created for all flows that
|
1482
|
-
use the same `@project(name)`.
|
1483
|
-
|
1484
|
-
Parameters
|
1485
|
-
----------
|
1486
|
-
name : str
|
1487
|
-
Project name. Make sure that the name is unique amongst all
|
1488
|
-
projects that use the same production scheduler. The name may
|
1489
|
-
contain only lowercase alphanumeric characters and underscores.
|
1490
|
-
|
1491
|
-
|
1492
|
-
"""
|
1493
|
-
...
|
1494
|
-
|
1495
1541
|
def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1496
1542
|
"""
|
1497
1543
|
The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
|
@@ -1534,48 +1580,6 @@ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, expone
|
|
1534
1580
|
"""
|
1535
1581
|
...
|
1536
1582
|
|
1537
|
-
def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1538
|
-
"""
|
1539
|
-
The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
|
1540
|
-
This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
|
1541
|
-
|
1542
|
-
Parameters
|
1543
|
-
----------
|
1544
|
-
timeout : int
|
1545
|
-
Time, in seconds before the task times out and fails. (Default: 3600)
|
1546
|
-
poke_interval : int
|
1547
|
-
Time in seconds that the job should wait in between each try. (Default: 60)
|
1548
|
-
mode : str
|
1549
|
-
How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
|
1550
|
-
exponential_backoff : bool
|
1551
|
-
allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
|
1552
|
-
pool : str
|
1553
|
-
the slot pool this task should run in,
|
1554
|
-
slot pools are a way to limit concurrency for certain tasks. (Default:None)
|
1555
|
-
soft_fail : bool
|
1556
|
-
Set to true to mark the task as SKIPPED on failure. (Default: False)
|
1557
|
-
name : str
|
1558
|
-
Name of the sensor on Airflow
|
1559
|
-
description : str
|
1560
|
-
Description of sensor in the Airflow UI
|
1561
|
-
external_dag_id : str
|
1562
|
-
The dag_id that contains the task you want to wait for.
|
1563
|
-
external_task_ids : List[str]
|
1564
|
-
The list of task_ids that you want to wait for.
|
1565
|
-
If None (default value) the sensor waits for the DAG. (Default: None)
|
1566
|
-
allowed_states : List[str]
|
1567
|
-
Iterable of allowed states, (Default: ['success'])
|
1568
|
-
failed_states : List[str]
|
1569
|
-
Iterable of failed or dis-allowed states. (Default: None)
|
1570
|
-
execution_delta : datetime.timedelta
|
1571
|
-
time difference with the previous execution to look at,
|
1572
|
-
the default is the same logical date as the current task or DAG. (Default: None)
|
1573
|
-
check_existence: bool
|
1574
|
-
Set to True to check if the external task exists or check if
|
1575
|
-
the DAG to wait for exists. (Default: True)
|
1576
|
-
"""
|
1577
|
-
...
|
1578
|
-
|
1579
1583
|
@typing.overload
|
1580
1584
|
def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1581
1585
|
"""
|
@@ -1729,97 +1733,93 @@ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *
|
|
1729
1733
|
...
|
1730
1734
|
|
1731
1735
|
@typing.overload
|
1732
|
-
def
|
1736
|
+
def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1733
1737
|
"""
|
1734
|
-
Specifies the
|
1735
|
-
|
1736
|
-
```
|
1737
|
-
@trigger(event='foo')
|
1738
|
-
```
|
1739
|
-
or
|
1740
|
-
```
|
1741
|
-
@trigger(events=['foo', 'bar'])
|
1742
|
-
```
|
1743
|
-
|
1744
|
-
Additionally, you can specify the parameter mappings
|
1745
|
-
to map event payload to Metaflow parameters for the flow.
|
1746
|
-
```
|
1747
|
-
@trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
|
1748
|
-
```
|
1749
|
-
or
|
1750
|
-
```
|
1751
|
-
@trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
|
1752
|
-
{'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
|
1753
|
-
```
|
1754
|
-
|
1755
|
-
'parameters' can also be a list of strings and tuples like so:
|
1756
|
-
```
|
1757
|
-
@trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
|
1758
|
-
```
|
1759
|
-
This is equivalent to:
|
1760
|
-
```
|
1761
|
-
@trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
|
1762
|
-
```
|
1738
|
+
Specifies the times when the flow should be run when running on a
|
1739
|
+
production scheduler.
|
1763
1740
|
|
1764
1741
|
Parameters
|
1765
1742
|
----------
|
1766
|
-
|
1767
|
-
|
1768
|
-
|
1769
|
-
|
1770
|
-
|
1771
|
-
|
1772
|
-
|
1773
|
-
|
1743
|
+
hourly : bool, default False
|
1744
|
+
Run the workflow hourly.
|
1745
|
+
daily : bool, default True
|
1746
|
+
Run the workflow daily.
|
1747
|
+
weekly : bool, default False
|
1748
|
+
Run the workflow weekly.
|
1749
|
+
cron : str, optional, default None
|
1750
|
+
Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
|
1751
|
+
specified by this expression.
|
1752
|
+
timezone : str, optional, default None
|
1753
|
+
Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
|
1754
|
+
which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
|
1774
1755
|
"""
|
1775
1756
|
...
|
1776
1757
|
|
1777
1758
|
@typing.overload
|
1778
|
-
def
|
1759
|
+
def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
1779
1760
|
...
|
1780
1761
|
|
1781
|
-
def
|
1762
|
+
def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
|
1782
1763
|
"""
|
1783
|
-
Specifies the
|
1784
|
-
|
1785
|
-
```
|
1786
|
-
@trigger(event='foo')
|
1787
|
-
```
|
1788
|
-
or
|
1789
|
-
```
|
1790
|
-
@trigger(events=['foo', 'bar'])
|
1791
|
-
```
|
1792
|
-
|
1793
|
-
Additionally, you can specify the parameter mappings
|
1794
|
-
to map event payload to Metaflow parameters for the flow.
|
1795
|
-
```
|
1796
|
-
@trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
|
1797
|
-
```
|
1798
|
-
or
|
1799
|
-
```
|
1800
|
-
@trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
|
1801
|
-
{'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
|
1802
|
-
```
|
1803
|
-
|
1804
|
-
'parameters' can also be a list of strings and tuples like so:
|
1805
|
-
```
|
1806
|
-
@trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
|
1807
|
-
```
|
1808
|
-
This is equivalent to:
|
1809
|
-
```
|
1810
|
-
@trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
|
1811
|
-
```
|
1764
|
+
Specifies the times when the flow should be run when running on a
|
1765
|
+
production scheduler.
|
1812
1766
|
|
1813
1767
|
Parameters
|
1814
1768
|
----------
|
1815
|
-
|
1816
|
-
|
1817
|
-
|
1818
|
-
|
1819
|
-
|
1820
|
-
|
1821
|
-
|
1769
|
+
hourly : bool, default False
|
1770
|
+
Run the workflow hourly.
|
1771
|
+
daily : bool, default True
|
1772
|
+
Run the workflow daily.
|
1773
|
+
weekly : bool, default False
|
1774
|
+
Run the workflow weekly.
|
1775
|
+
cron : str, optional, default None
|
1776
|
+
Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
|
1777
|
+
specified by this expression.
|
1778
|
+
timezone : str, optional, default None
|
1779
|
+
Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
|
1780
|
+
which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
|
1781
|
+
"""
|
1782
|
+
...
|
1783
|
+
|
1784
|
+
def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1785
|
+
"""
|
1786
|
+
The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
|
1787
|
+
This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
|
1822
1788
|
|
1789
|
+
Parameters
|
1790
|
+
----------
|
1791
|
+
timeout : int
|
1792
|
+
Time, in seconds before the task times out and fails. (Default: 3600)
|
1793
|
+
poke_interval : int
|
1794
|
+
Time in seconds that the job should wait in between each try. (Default: 60)
|
1795
|
+
mode : str
|
1796
|
+
How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
|
1797
|
+
exponential_backoff : bool
|
1798
|
+
allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
|
1799
|
+
pool : str
|
1800
|
+
the slot pool this task should run in,
|
1801
|
+
slot pools are a way to limit concurrency for certain tasks. (Default:None)
|
1802
|
+
soft_fail : bool
|
1803
|
+
Set to true to mark the task as SKIPPED on failure. (Default: False)
|
1804
|
+
name : str
|
1805
|
+
Name of the sensor on Airflow
|
1806
|
+
description : str
|
1807
|
+
Description of sensor in the Airflow UI
|
1808
|
+
external_dag_id : str
|
1809
|
+
The dag_id that contains the task you want to wait for.
|
1810
|
+
external_task_ids : List[str]
|
1811
|
+
The list of task_ids that you want to wait for.
|
1812
|
+
If None (default value) the sensor waits for the DAG. (Default: None)
|
1813
|
+
allowed_states : List[str]
|
1814
|
+
Iterable of allowed states, (Default: ['success'])
|
1815
|
+
failed_states : List[str]
|
1816
|
+
Iterable of failed or dis-allowed states. (Default: None)
|
1817
|
+
execution_delta : datetime.timedelta
|
1818
|
+
time difference with the previous execution to look at,
|
1819
|
+
the default is the same logical date as the current task or DAG. (Default: None)
|
1820
|
+
check_existence: bool
|
1821
|
+
Set to True to check if the external task exists or check if
|
1822
|
+
the DAG to wait for exists. (Default: True)
|
1823
1823
|
"""
|
1824
1824
|
...
|
1825
1825
|
|