metaflow-stubs 2.15.13__py2.py3-none-any.whl → 2.15.14__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- metaflow-stubs/__init__.pyi +472 -472
- metaflow-stubs/cards.pyi +2 -2
- metaflow-stubs/cli.pyi +2 -2
- metaflow-stubs/cli_components/__init__.pyi +2 -2
- metaflow-stubs/cli_components/utils.pyi +2 -2
- metaflow-stubs/client/__init__.pyi +2 -2
- metaflow-stubs/client/core.pyi +7 -7
- metaflow-stubs/client/filecache.pyi +3 -3
- metaflow-stubs/events.pyi +3 -3
- metaflow-stubs/exception.pyi +2 -2
- metaflow-stubs/flowspec.pyi +5 -5
- metaflow-stubs/generated_for.txt +1 -1
- metaflow-stubs/includefile.pyi +4 -4
- metaflow-stubs/info_file.pyi +2 -2
- metaflow-stubs/metadata_provider/__init__.pyi +2 -2
- metaflow-stubs/metadata_provider/heartbeat.pyi +2 -2
- metaflow-stubs/metadata_provider/metadata.pyi +3 -3
- metaflow-stubs/metadata_provider/util.pyi +2 -2
- metaflow-stubs/metaflow_config.pyi +4 -2
- metaflow-stubs/metaflow_current.pyi +6 -6
- metaflow-stubs/metaflow_git.pyi +2 -2
- metaflow-stubs/multicore_utils.pyi +2 -2
- metaflow-stubs/parameters.pyi +4 -4
- metaflow-stubs/plugins/__init__.pyi +11 -11
- metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
- metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
- metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
- metaflow-stubs/plugins/airflow/exception.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
- metaflow-stubs/plugins/argo/__init__.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_workflows.pyi +4 -4
- metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +3 -3
- metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +4 -4
- metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +4 -4
- metaflow-stubs/plugins/aws/__init__.pyi +3 -3
- metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
- metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
- metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +4 -4
- metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +5 -5
- metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +4 -4
- metaflow-stubs/plugins/azure/__init__.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +4 -4
- metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
- metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
- metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
- metaflow-stubs/plugins/cards/__init__.pyi +2 -2
- metaflow-stubs/plugins/cards/card_client.pyi +3 -3
- metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
- metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
- metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/basic.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/components.pyi +5 -5
- metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
- metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
- metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
- metaflow-stubs/plugins/cards/exception.pyi +2 -2
- metaflow-stubs/plugins/catch_decorator.pyi +2 -2
- metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
- metaflow-stubs/plugins/datatools/local.pyi +2 -2
- metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
- metaflow-stubs/plugins/datatools/s3/s3.pyi +4 -4
- metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
- metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
- metaflow-stubs/plugins/debug_logger.pyi +2 -2
- metaflow-stubs/plugins/debug_monitor.pyi +2 -2
- metaflow-stubs/plugins/environment_decorator.pyi +2 -2
- metaflow-stubs/plugins/events_decorator.pyi +2 -2
- metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
- metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
- metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
- metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +4 -4
- metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
- metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
- metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
- metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kube_utils.pyi +3 -3
- metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +2 -2
- metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
- metaflow-stubs/plugins/project_decorator.pyi +2 -2
- metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
- metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
- metaflow-stubs/plugins/pypi/conda_environment.pyi +5 -5
- metaflow-stubs/plugins/pypi/parsers.pyi +2 -2
- metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
- metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
- metaflow-stubs/plugins/pypi/utils.pyi +2 -2
- metaflow-stubs/plugins/resources_decorator.pyi +2 -2
- metaflow-stubs/plugins/retry_decorator.pyi +2 -2
- metaflow-stubs/plugins/secrets/__init__.pyi +3 -3
- metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +3 -3
- metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
- metaflow-stubs/plugins/storage_executor.pyi +2 -2
- metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +2 -2
- metaflow-stubs/plugins/timeout_decorator.pyi +2 -2
- metaflow-stubs/plugins/uv/__init__.pyi +2 -2
- metaflow-stubs/plugins/uv/uv_environment.pyi +2 -2
- metaflow-stubs/pylint_wrapper.pyi +2 -2
- metaflow-stubs/runner/__init__.pyi +2 -2
- metaflow-stubs/runner/deployer.pyi +5 -5
- metaflow-stubs/runner/deployer_impl.pyi +2 -2
- metaflow-stubs/runner/metaflow_runner.pyi +5 -5
- metaflow-stubs/runner/nbdeploy.pyi +2 -2
- metaflow-stubs/runner/nbrun.pyi +2 -2
- metaflow-stubs/runner/subprocess_manager.pyi +2 -2
- metaflow-stubs/runner/utils.pyi +4 -4
- metaflow-stubs/system/__init__.pyi +2 -2
- metaflow-stubs/system/system_logger.pyi +2 -2
- metaflow-stubs/system/system_monitor.pyi +2 -2
- metaflow-stubs/tagging_util.pyi +2 -2
- metaflow-stubs/tuple_util.pyi +2 -2
- metaflow-stubs/user_configs/__init__.pyi +2 -2
- metaflow-stubs/user_configs/config_decorators.pyi +7 -7
- metaflow-stubs/user_configs/config_options.pyi +4 -4
- metaflow-stubs/user_configs/config_parameters.pyi +5 -5
- metaflow-stubs/version.pyi +2 -2
- {metaflow_stubs-2.15.13.dist-info → metaflow_stubs-2.15.14.dist-info}/METADATA +2 -2
- metaflow_stubs-2.15.14.dist-info/RECORD +149 -0
- metaflow_stubs-2.15.13.dist-info/RECORD +0 -149
- {metaflow_stubs-2.15.13.dist-info → metaflow_stubs-2.15.14.dist-info}/WHEEL +0 -0
- {metaflow_stubs-2.15.13.dist-info → metaflow_stubs-2.15.14.dist-info}/top_level.txt +0 -0
metaflow-stubs/__init__.pyi
CHANGED
@@ -1,15 +1,15 @@
|
|
1
1
|
######################################################################################################
|
2
2
|
# Auto-generated Metaflow stub file #
|
3
|
-
# MF version: 2.15.
|
4
|
-
# Generated on 2025-05-
|
3
|
+
# MF version: 2.15.14 #
|
4
|
+
# Generated on 2025-05-21T14:01:03.779738 #
|
5
5
|
######################################################################################################
|
6
6
|
|
7
7
|
from __future__ import annotations
|
8
8
|
|
9
9
|
import typing
|
10
10
|
if typing.TYPE_CHECKING:
|
11
|
-
import datetime
|
12
11
|
import typing
|
12
|
+
import datetime
|
13
13
|
FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
|
14
14
|
StepFlag = typing.NewType("StepFlag", bool)
|
15
15
|
|
@@ -36,16 +36,16 @@ from .user_configs.config_parameters import config_expr as config_expr
|
|
36
36
|
from .user_configs.config_decorators import CustomFlowDecorator as CustomFlowDecorator
|
37
37
|
from .user_configs.config_decorators import CustomStepDecorator as CustomStepDecorator
|
38
38
|
from . import tuple_util as tuple_util
|
39
|
-
from . import metaflow_git as metaflow_git
|
40
39
|
from . import events as events
|
40
|
+
from . import metaflow_git as metaflow_git
|
41
41
|
from . import runner as runner
|
42
42
|
from . import plugins as plugins
|
43
43
|
from .plugins.datatools.s3.s3 import S3 as S3
|
44
44
|
from . import includefile as includefile
|
45
45
|
from .includefile import IncludeFile as IncludeFile
|
46
|
-
from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
|
47
46
|
from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
|
48
47
|
from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
|
48
|
+
from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
|
49
49
|
from . import cards as cards
|
50
50
|
from . import client as client
|
51
51
|
from .client.core import namespace as namespace
|
@@ -147,37 +147,142 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
|
|
147
147
|
...
|
148
148
|
|
149
149
|
@typing.overload
|
150
|
-
def
|
150
|
+
def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
151
151
|
"""
|
152
|
-
Specifies
|
153
|
-
|
152
|
+
Specifies the number of times the task corresponding
|
153
|
+
to a step needs to be retried.
|
154
|
+
|
155
|
+
This decorator is useful for handling transient errors, such as networking issues.
|
156
|
+
If your task contains operations that can't be retried safely, e.g. database updates,
|
157
|
+
it is advisable to annotate it with `@retry(times=0)`.
|
158
|
+
|
159
|
+
This can be used in conjunction with the `@catch` decorator. The `@catch`
|
160
|
+
decorator will execute a no-op task after all retries have been exhausted,
|
161
|
+
ensuring that the flow execution can continue.
|
154
162
|
|
155
163
|
|
156
164
|
Parameters
|
157
165
|
----------
|
158
|
-
|
159
|
-
|
166
|
+
times : int, default 3
|
167
|
+
Number of times to retry this task.
|
168
|
+
minutes_between_retries : int, default 2
|
169
|
+
Number of minutes between retries.
|
160
170
|
"""
|
161
171
|
...
|
162
172
|
|
163
173
|
@typing.overload
|
164
|
-
def
|
174
|
+
def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
165
175
|
...
|
166
176
|
|
167
177
|
@typing.overload
|
168
|
-
def
|
178
|
+
def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
169
179
|
...
|
170
180
|
|
171
|
-
def
|
181
|
+
def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
|
172
182
|
"""
|
173
|
-
Specifies
|
174
|
-
|
183
|
+
Specifies the number of times the task corresponding
|
184
|
+
to a step needs to be retried.
|
185
|
+
|
186
|
+
This decorator is useful for handling transient errors, such as networking issues.
|
187
|
+
If your task contains operations that can't be retried safely, e.g. database updates,
|
188
|
+
it is advisable to annotate it with `@retry(times=0)`.
|
189
|
+
|
190
|
+
This can be used in conjunction with the `@catch` decorator. The `@catch`
|
191
|
+
decorator will execute a no-op task after all retries have been exhausted,
|
192
|
+
ensuring that the flow execution can continue.
|
175
193
|
|
176
194
|
|
177
195
|
Parameters
|
178
196
|
----------
|
179
|
-
|
180
|
-
|
197
|
+
times : int, default 3
|
198
|
+
Number of times to retry this task.
|
199
|
+
minutes_between_retries : int, default 2
|
200
|
+
Number of minutes between retries.
|
201
|
+
"""
|
202
|
+
...
|
203
|
+
|
204
|
+
def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
205
|
+
"""
|
206
|
+
Specifies that this step should execute on Kubernetes.
|
207
|
+
|
208
|
+
|
209
|
+
Parameters
|
210
|
+
----------
|
211
|
+
cpu : int, default 1
|
212
|
+
Number of CPUs required for this step. If `@resources` is
|
213
|
+
also present, the maximum value from all decorators is used.
|
214
|
+
memory : int, default 4096
|
215
|
+
Memory size (in MB) required for this step. If
|
216
|
+
`@resources` is also present, the maximum value from all decorators is
|
217
|
+
used.
|
218
|
+
disk : int, default 10240
|
219
|
+
Disk size (in MB) required for this step. If
|
220
|
+
`@resources` is also present, the maximum value from all decorators is
|
221
|
+
used.
|
222
|
+
image : str, optional, default None
|
223
|
+
Docker image to use when launching on Kubernetes. If not specified, and
|
224
|
+
METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
|
225
|
+
not, a default Docker image mapping to the current version of Python is used.
|
226
|
+
image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
|
227
|
+
If given, the imagePullPolicy to be applied to the Docker image of the step.
|
228
|
+
service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
|
229
|
+
Kubernetes service account to use when launching pod in Kubernetes.
|
230
|
+
secrets : List[str], optional, default None
|
231
|
+
Kubernetes secrets to use when launching pod in Kubernetes. These
|
232
|
+
secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
|
233
|
+
in Metaflow configuration.
|
234
|
+
node_selector: Union[Dict[str,str], str], optional, default None
|
235
|
+
Kubernetes node selector(s) to apply to the pod running the task.
|
236
|
+
Can be passed in as a comma separated string of values e.g.
|
237
|
+
'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
|
238
|
+
{'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
|
239
|
+
namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
|
240
|
+
Kubernetes namespace to use when launching pod in Kubernetes.
|
241
|
+
gpu : int, optional, default None
|
242
|
+
Number of GPUs required for this step. A value of zero implies that
|
243
|
+
the scheduled node should not have GPUs.
|
244
|
+
gpu_vendor : str, default KUBERNETES_GPU_VENDOR
|
245
|
+
The vendor of the GPUs to be used for this step.
|
246
|
+
tolerations : List[str], default []
|
247
|
+
The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
|
248
|
+
Kubernetes tolerations to use when launching pod in Kubernetes.
|
249
|
+
labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
|
250
|
+
Kubernetes labels to use when launching pod in Kubernetes.
|
251
|
+
annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
|
252
|
+
Kubernetes annotations to use when launching pod in Kubernetes.
|
253
|
+
use_tmpfs : bool, default False
|
254
|
+
This enables an explicit tmpfs mount for this step.
|
255
|
+
tmpfs_tempdir : bool, default True
|
256
|
+
sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
|
257
|
+
tmpfs_size : int, optional, default: None
|
258
|
+
The value for the size (in MiB) of the tmpfs mount for this step.
|
259
|
+
This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
|
260
|
+
memory allocated for this step.
|
261
|
+
tmpfs_path : str, optional, default /metaflow_temp
|
262
|
+
Path to tmpfs mount for this step.
|
263
|
+
persistent_volume_claims : Dict[str, str], optional, default None
|
264
|
+
A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
|
265
|
+
volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
|
266
|
+
shared_memory: int, optional
|
267
|
+
Shared memory size (in MiB) required for this step
|
268
|
+
port: int, optional
|
269
|
+
Port number to specify in the Kubernetes job object
|
270
|
+
compute_pool : str, optional, default None
|
271
|
+
Compute pool to be used for for this step.
|
272
|
+
If not specified, any accessible compute pool within the perimeter is used.
|
273
|
+
hostname_resolution_timeout: int, default 10 * 60
|
274
|
+
Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
|
275
|
+
Only applicable when @parallel is used.
|
276
|
+
qos: str, default: Burstable
|
277
|
+
Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
|
278
|
+
|
279
|
+
security_context: Dict[str, Any], optional, default None
|
280
|
+
Container security context. Applies to the task container. Allows the following keys:
|
281
|
+
- privileged: bool, optional, default None
|
282
|
+
- allow_privilege_escalation: bool, optional, default None
|
283
|
+
- run_as_user: int, optional, default None
|
284
|
+
- run_as_group: int, optional, default None
|
285
|
+
- run_as_non_root: bool, optional, default None
|
181
286
|
"""
|
182
287
|
...
|
183
288
|
|
@@ -389,57 +494,6 @@ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
|
|
389
494
|
"""
|
390
495
|
...
|
391
496
|
|
392
|
-
@typing.overload
|
393
|
-
def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
394
|
-
"""
|
395
|
-
Specifies that the step will success under all circumstances.
|
396
|
-
|
397
|
-
The decorator will create an optional artifact, specified by `var`, which
|
398
|
-
contains the exception raised. You can use it to detect the presence
|
399
|
-
of errors, indicating that all happy-path artifacts produced by the step
|
400
|
-
are missing.
|
401
|
-
|
402
|
-
|
403
|
-
Parameters
|
404
|
-
----------
|
405
|
-
var : str, optional, default None
|
406
|
-
Name of the artifact in which to store the caught exception.
|
407
|
-
If not specified, the exception is not stored.
|
408
|
-
print_exception : bool, default True
|
409
|
-
Determines whether or not the exception is printed to
|
410
|
-
stdout when caught.
|
411
|
-
"""
|
412
|
-
...
|
413
|
-
|
414
|
-
@typing.overload
|
415
|
-
def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
416
|
-
...
|
417
|
-
|
418
|
-
@typing.overload
|
419
|
-
def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
420
|
-
...
|
421
|
-
|
422
|
-
def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
|
423
|
-
"""
|
424
|
-
Specifies that the step will success under all circumstances.
|
425
|
-
|
426
|
-
The decorator will create an optional artifact, specified by `var`, which
|
427
|
-
contains the exception raised. You can use it to detect the presence
|
428
|
-
of errors, indicating that all happy-path artifacts produced by the step
|
429
|
-
are missing.
|
430
|
-
|
431
|
-
|
432
|
-
Parameters
|
433
|
-
----------
|
434
|
-
var : str, optional, default None
|
435
|
-
Name of the artifact in which to store the caught exception.
|
436
|
-
If not specified, the exception is not stored.
|
437
|
-
print_exception : bool, default True
|
438
|
-
Determines whether or not the exception is printed to
|
439
|
-
stdout when caught.
|
440
|
-
"""
|
441
|
-
...
|
442
|
-
|
443
497
|
@typing.overload
|
444
498
|
def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
445
499
|
"""
|
@@ -520,253 +574,199 @@ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None]
|
|
520
574
|
...
|
521
575
|
|
522
576
|
@typing.overload
|
523
|
-
def
|
577
|
+
def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
524
578
|
"""
|
525
|
-
|
526
|
-
|
527
|
-
This decorator is useful if this step may hang indefinitely.
|
528
|
-
|
529
|
-
This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
|
530
|
-
A timeout is considered to be an exception thrown by the step. It will cause the step to be
|
531
|
-
retried if needed and the exception will be caught by the `@catch` decorator, if present.
|
532
|
-
|
533
|
-
Note that all the values specified in parameters are added together so if you specify
|
534
|
-
60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
|
535
|
-
|
536
|
-
|
537
|
-
Parameters
|
538
|
-
----------
|
539
|
-
seconds : int, default 0
|
540
|
-
Number of seconds to wait prior to timing out.
|
541
|
-
minutes : int, default 0
|
542
|
-
Number of minutes to wait prior to timing out.
|
543
|
-
hours : int, default 0
|
544
|
-
Number of hours to wait prior to timing out.
|
579
|
+
Decorator prototype for all step decorators. This function gets specialized
|
580
|
+
and imported for all decorators types by _import_plugin_decorators().
|
545
581
|
"""
|
546
582
|
...
|
547
583
|
|
548
584
|
@typing.overload
|
549
|
-
def
|
585
|
+
def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
550
586
|
...
|
551
587
|
|
552
|
-
|
553
|
-
|
588
|
+
def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
|
589
|
+
"""
|
590
|
+
Decorator prototype for all step decorators. This function gets specialized
|
591
|
+
and imported for all decorators types by _import_plugin_decorators().
|
592
|
+
"""
|
554
593
|
...
|
555
594
|
|
556
|
-
|
595
|
+
@typing.overload
|
596
|
+
def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
557
597
|
"""
|
558
|
-
Specifies
|
559
|
-
|
560
|
-
This decorator is useful if this step may hang indefinitely.
|
561
|
-
|
562
|
-
This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
|
563
|
-
A timeout is considered to be an exception thrown by the step. It will cause the step to be
|
564
|
-
retried if needed and the exception will be caught by the `@catch` decorator, if present.
|
565
|
-
|
566
|
-
Note that all the values specified in parameters are added together so if you specify
|
567
|
-
60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
|
598
|
+
Specifies environment variables to be set prior to the execution of a step.
|
568
599
|
|
569
600
|
|
570
601
|
Parameters
|
571
602
|
----------
|
572
|
-
|
573
|
-
|
574
|
-
minutes : int, default 0
|
575
|
-
Number of minutes to wait prior to timing out.
|
576
|
-
hours : int, default 0
|
577
|
-
Number of hours to wait prior to timing out.
|
603
|
+
vars : Dict[str, str], default {}
|
604
|
+
Dictionary of environment variables to set.
|
578
605
|
"""
|
579
606
|
...
|
580
607
|
|
581
608
|
@typing.overload
|
582
|
-
def
|
583
|
-
"""
|
584
|
-
Decorator prototype for all step decorators. This function gets specialized
|
585
|
-
and imported for all decorators types by _import_plugin_decorators().
|
586
|
-
"""
|
609
|
+
def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
587
610
|
...
|
588
611
|
|
589
612
|
@typing.overload
|
590
|
-
def
|
613
|
+
def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
591
614
|
...
|
592
615
|
|
593
|
-
def
|
594
|
-
"""
|
595
|
-
Decorator prototype for all step decorators. This function gets specialized
|
596
|
-
and imported for all decorators types by _import_plugin_decorators().
|
616
|
+
def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
|
597
617
|
"""
|
598
|
-
|
599
|
-
|
600
|
-
|
618
|
+
Specifies environment variables to be set prior to the execution of a step.
|
619
|
+
|
620
|
+
|
621
|
+
Parameters
|
622
|
+
----------
|
623
|
+
vars : Dict[str, str], default {}
|
624
|
+
Dictionary of environment variables to set.
|
601
625
|
"""
|
602
|
-
|
626
|
+
...
|
627
|
+
|
628
|
+
@typing.overload
|
629
|
+
def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
630
|
+
"""
|
631
|
+
Specifies the PyPI packages for the step.
|
632
|
+
|
633
|
+
Information in this decorator will augment any
|
634
|
+
attributes set in the `@pyi_base` flow-level decorator. Hence,
|
635
|
+
you can use `@pypi_base` to set packages required by all
|
636
|
+
steps and use `@pypi` to specify step-specific overrides.
|
603
637
|
|
604
638
|
|
605
639
|
Parameters
|
606
640
|
----------
|
607
|
-
|
608
|
-
|
609
|
-
|
610
|
-
|
611
|
-
|
612
|
-
|
613
|
-
|
614
|
-
|
615
|
-
|
616
|
-
|
617
|
-
|
618
|
-
|
619
|
-
|
620
|
-
|
621
|
-
|
622
|
-
|
623
|
-
|
624
|
-
|
625
|
-
|
626
|
-
|
627
|
-
Kubernetes secrets to use when launching pod in Kubernetes. These
|
628
|
-
secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
|
629
|
-
in Metaflow configuration.
|
630
|
-
node_selector: Union[Dict[str,str], str], optional, default None
|
631
|
-
Kubernetes node selector(s) to apply to the pod running the task.
|
632
|
-
Can be passed in as a comma separated string of values e.g.
|
633
|
-
'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
|
634
|
-
{'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
|
635
|
-
namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
|
636
|
-
Kubernetes namespace to use when launching pod in Kubernetes.
|
637
|
-
gpu : int, optional, default None
|
638
|
-
Number of GPUs required for this step. A value of zero implies that
|
639
|
-
the scheduled node should not have GPUs.
|
640
|
-
gpu_vendor : str, default KUBERNETES_GPU_VENDOR
|
641
|
-
The vendor of the GPUs to be used for this step.
|
642
|
-
tolerations : List[str], default []
|
643
|
-
The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
|
644
|
-
Kubernetes tolerations to use when launching pod in Kubernetes.
|
645
|
-
labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
|
646
|
-
Kubernetes labels to use when launching pod in Kubernetes.
|
647
|
-
annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
|
648
|
-
Kubernetes annotations to use when launching pod in Kubernetes.
|
649
|
-
use_tmpfs : bool, default False
|
650
|
-
This enables an explicit tmpfs mount for this step.
|
651
|
-
tmpfs_tempdir : bool, default True
|
652
|
-
sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
|
653
|
-
tmpfs_size : int, optional, default: None
|
654
|
-
The value for the size (in MiB) of the tmpfs mount for this step.
|
655
|
-
This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
|
656
|
-
memory allocated for this step.
|
657
|
-
tmpfs_path : str, optional, default /metaflow_temp
|
658
|
-
Path to tmpfs mount for this step.
|
659
|
-
persistent_volume_claims : Dict[str, str], optional, default None
|
660
|
-
A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
|
661
|
-
volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
|
662
|
-
shared_memory: int, optional
|
663
|
-
Shared memory size (in MiB) required for this step
|
664
|
-
port: int, optional
|
665
|
-
Port number to specify in the Kubernetes job object
|
666
|
-
compute_pool : str, optional, default None
|
667
|
-
Compute pool to be used for for this step.
|
668
|
-
If not specified, any accessible compute pool within the perimeter is used.
|
669
|
-
hostname_resolution_timeout: int, default 10 * 60
|
670
|
-
Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
|
671
|
-
Only applicable when @parallel is used.
|
672
|
-
qos: str, default: Burstable
|
673
|
-
Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
|
641
|
+
packages : Dict[str, str], default: {}
|
642
|
+
Packages to use for this step. The key is the name of the package
|
643
|
+
and the value is the version to use.
|
644
|
+
python : str, optional, default: None
|
645
|
+
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
646
|
+
that the version used will correspond to the version of the Python interpreter used to start the run.
|
647
|
+
"""
|
648
|
+
...
|
649
|
+
|
650
|
+
@typing.overload
|
651
|
+
def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
652
|
+
...
|
653
|
+
|
654
|
+
@typing.overload
|
655
|
+
def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
656
|
+
...
|
657
|
+
|
658
|
+
def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
|
659
|
+
"""
|
660
|
+
Specifies the PyPI packages for the step.
|
674
661
|
|
675
|
-
|
676
|
-
|
677
|
-
|
678
|
-
|
679
|
-
|
680
|
-
|
681
|
-
|
662
|
+
Information in this decorator will augment any
|
663
|
+
attributes set in the `@pyi_base` flow-level decorator. Hence,
|
664
|
+
you can use `@pypi_base` to set packages required by all
|
665
|
+
steps and use `@pypi` to specify step-specific overrides.
|
666
|
+
|
667
|
+
|
668
|
+
Parameters
|
669
|
+
----------
|
670
|
+
packages : Dict[str, str], default: {}
|
671
|
+
Packages to use for this step. The key is the name of the package
|
672
|
+
and the value is the version to use.
|
673
|
+
python : str, optional, default: None
|
674
|
+
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
675
|
+
that the version used will correspond to the version of the Python interpreter used to start the run.
|
682
676
|
"""
|
683
677
|
...
|
684
678
|
|
685
679
|
@typing.overload
|
686
|
-
def
|
680
|
+
def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
687
681
|
"""
|
688
|
-
Specifies
|
682
|
+
Specifies secrets to be retrieved and injected as environment variables prior to
|
683
|
+
the execution of a step.
|
689
684
|
|
690
685
|
|
691
686
|
Parameters
|
692
687
|
----------
|
693
|
-
|
694
|
-
|
688
|
+
sources : List[Union[str, Dict[str, Any]]], default: []
|
689
|
+
List of secret specs, defining how the secrets are to be retrieved
|
695
690
|
"""
|
696
691
|
...
|
697
692
|
|
698
693
|
@typing.overload
|
699
|
-
def
|
694
|
+
def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
700
695
|
...
|
701
696
|
|
702
697
|
@typing.overload
|
703
|
-
def
|
698
|
+
def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
704
699
|
...
|
705
700
|
|
706
|
-
def
|
701
|
+
def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
|
707
702
|
"""
|
708
|
-
Specifies
|
703
|
+
Specifies secrets to be retrieved and injected as environment variables prior to
|
704
|
+
the execution of a step.
|
709
705
|
|
710
706
|
|
711
707
|
Parameters
|
712
708
|
----------
|
713
|
-
|
714
|
-
|
709
|
+
sources : List[Union[str, Dict[str, Any]]], default: []
|
710
|
+
List of secret specs, defining how the secrets are to be retrieved
|
715
711
|
"""
|
716
712
|
...
|
717
713
|
|
718
714
|
@typing.overload
|
719
|
-
def
|
715
|
+
def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
720
716
|
"""
|
721
|
-
Specifies
|
722
|
-
to a step needs to be retried.
|
717
|
+
Specifies a timeout for your step.
|
723
718
|
|
724
|
-
This decorator is useful
|
725
|
-
If your task contains operations that can't be retried safely, e.g. database updates,
|
726
|
-
it is advisable to annotate it with `@retry(times=0)`.
|
719
|
+
This decorator is useful if this step may hang indefinitely.
|
727
720
|
|
728
|
-
This can be used in conjunction with the `@
|
729
|
-
|
730
|
-
|
721
|
+
This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
|
722
|
+
A timeout is considered to be an exception thrown by the step. It will cause the step to be
|
723
|
+
retried if needed and the exception will be caught by the `@catch` decorator, if present.
|
724
|
+
|
725
|
+
Note that all the values specified in parameters are added together so if you specify
|
726
|
+
60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
|
731
727
|
|
732
728
|
|
733
729
|
Parameters
|
734
730
|
----------
|
735
|
-
|
736
|
-
Number of
|
737
|
-
|
738
|
-
Number of minutes
|
731
|
+
seconds : int, default 0
|
732
|
+
Number of seconds to wait prior to timing out.
|
733
|
+
minutes : int, default 0
|
734
|
+
Number of minutes to wait prior to timing out.
|
735
|
+
hours : int, default 0
|
736
|
+
Number of hours to wait prior to timing out.
|
739
737
|
"""
|
740
738
|
...
|
741
739
|
|
742
740
|
@typing.overload
|
743
|
-
def
|
741
|
+
def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
744
742
|
...
|
745
743
|
|
746
744
|
@typing.overload
|
747
|
-
def
|
745
|
+
def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
748
746
|
...
|
749
747
|
|
750
|
-
def
|
748
|
+
def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
|
751
749
|
"""
|
752
|
-
Specifies
|
753
|
-
to a step needs to be retried.
|
750
|
+
Specifies a timeout for your step.
|
754
751
|
|
755
|
-
This decorator is useful
|
756
|
-
If your task contains operations that can't be retried safely, e.g. database updates,
|
757
|
-
it is advisable to annotate it with `@retry(times=0)`.
|
752
|
+
This decorator is useful if this step may hang indefinitely.
|
758
753
|
|
759
|
-
This can be used in conjunction with the `@
|
760
|
-
|
761
|
-
|
754
|
+
This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
|
755
|
+
A timeout is considered to be an exception thrown by the step. It will cause the step to be
|
756
|
+
retried if needed and the exception will be caught by the `@catch` decorator, if present.
|
757
|
+
|
758
|
+
Note that all the values specified in parameters are added together so if you specify
|
759
|
+
60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
|
762
760
|
|
763
761
|
|
764
762
|
Parameters
|
765
763
|
----------
|
766
|
-
|
767
|
-
Number of
|
768
|
-
|
769
|
-
Number of minutes
|
764
|
+
seconds : int, default 0
|
765
|
+
Number of seconds to wait prior to timing out.
|
766
|
+
minutes : int, default 0
|
767
|
+
Number of minutes to wait prior to timing out.
|
768
|
+
hours : int, default 0
|
769
|
+
Number of hours to wait prior to timing out.
|
770
770
|
"""
|
771
771
|
...
|
772
772
|
|
@@ -820,149 +820,200 @@ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
|
|
820
820
|
...
|
821
821
|
|
822
822
|
@typing.overload
|
823
|
-
def
|
823
|
+
def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
824
824
|
"""
|
825
|
-
Specifies the
|
825
|
+
Specifies that the step will success under all circumstances.
|
826
826
|
|
827
|
-
|
828
|
-
|
829
|
-
|
830
|
-
|
827
|
+
The decorator will create an optional artifact, specified by `var`, which
|
828
|
+
contains the exception raised. You can use it to detect the presence
|
829
|
+
of errors, indicating that all happy-path artifacts produced by the step
|
830
|
+
are missing.
|
831
831
|
|
832
832
|
|
833
833
|
Parameters
|
834
834
|
----------
|
835
|
-
|
836
|
-
|
837
|
-
|
838
|
-
|
839
|
-
|
840
|
-
|
835
|
+
var : str, optional, default None
|
836
|
+
Name of the artifact in which to store the caught exception.
|
837
|
+
If not specified, the exception is not stored.
|
838
|
+
print_exception : bool, default True
|
839
|
+
Determines whether or not the exception is printed to
|
840
|
+
stdout when caught.
|
841
841
|
"""
|
842
842
|
...
|
843
843
|
|
844
844
|
@typing.overload
|
845
|
-
def
|
845
|
+
def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
846
846
|
...
|
847
847
|
|
848
848
|
@typing.overload
|
849
|
-
def
|
849
|
+
def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
850
850
|
...
|
851
851
|
|
852
|
-
def
|
852
|
+
def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
|
853
853
|
"""
|
854
|
-
Specifies the
|
854
|
+
Specifies that the step will success under all circumstances.
|
855
855
|
|
856
|
-
|
857
|
-
|
858
|
-
|
859
|
-
|
856
|
+
The decorator will create an optional artifact, specified by `var`, which
|
857
|
+
contains the exception raised. You can use it to detect the presence
|
858
|
+
of errors, indicating that all happy-path artifacts produced by the step
|
859
|
+
are missing.
|
860
860
|
|
861
861
|
|
862
862
|
Parameters
|
863
863
|
----------
|
864
|
-
|
865
|
-
|
866
|
-
|
867
|
-
|
868
|
-
|
869
|
-
|
864
|
+
var : str, optional, default None
|
865
|
+
Name of the artifact in which to store the caught exception.
|
866
|
+
If not specified, the exception is not stored.
|
867
|
+
print_exception : bool, default True
|
868
|
+
Determines whether or not the exception is printed to
|
869
|
+
stdout when caught.
|
870
870
|
"""
|
871
871
|
...
|
872
872
|
|
873
873
|
@typing.overload
|
874
|
-
def
|
874
|
+
def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
875
875
|
"""
|
876
|
-
Specifies the
|
876
|
+
Specifies the flow(s) that this flow depends on.
|
877
877
|
|
878
878
|
```
|
879
|
-
@
|
879
|
+
@trigger_on_finish(flow='FooFlow')
|
880
880
|
```
|
881
881
|
or
|
882
882
|
```
|
883
|
-
@
|
883
|
+
@trigger_on_finish(flows=['FooFlow', 'BarFlow'])
|
884
884
|
```
|
885
|
+
This decorator respects the @project decorator and triggers the flow
|
886
|
+
when upstream runs within the same namespace complete successfully
|
885
887
|
|
886
|
-
Additionally, you can specify
|
887
|
-
|
888
|
+
Additionally, you can specify project aware upstream flow dependencies
|
889
|
+
by specifying the fully qualified project_flow_name.
|
888
890
|
```
|
889
|
-
@
|
891
|
+
@trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
|
890
892
|
```
|
891
893
|
or
|
892
894
|
```
|
893
|
-
@
|
894
|
-
{'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
|
895
|
+
@trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
|
895
896
|
```
|
896
897
|
|
897
|
-
|
898
|
-
|
899
|
-
@trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
|
900
|
-
```
|
901
|
-
This is equivalent to:
|
898
|
+
You can also specify just the project or project branch (other values will be
|
899
|
+
inferred from the current project or project branch):
|
902
900
|
```
|
903
|
-
@
|
901
|
+
@trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
|
904
902
|
```
|
905
903
|
|
904
|
+
Note that `branch` is typically one of:
|
905
|
+
- `prod`
|
906
|
+
- `user.bob`
|
907
|
+
- `test.my_experiment`
|
908
|
+
- `prod.staging`
|
909
|
+
|
906
910
|
|
907
911
|
Parameters
|
908
912
|
----------
|
909
|
-
|
910
|
-
|
911
|
-
|
912
|
-
|
913
|
+
flow : Union[str, Dict[str, str]], optional, default None
|
914
|
+
Upstream flow dependency for this flow.
|
915
|
+
flows : List[Union[str, Dict[str, str]]], default []
|
916
|
+
Upstream flow dependencies for this flow.
|
913
917
|
options : Dict[str, Any], default {}
|
914
918
|
Backend-specific configuration for tuning eventing behavior.
|
915
919
|
"""
|
916
920
|
...
|
917
921
|
|
918
922
|
@typing.overload
|
919
|
-
def
|
923
|
+
def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
920
924
|
...
|
921
925
|
|
922
|
-
def
|
926
|
+
def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
|
923
927
|
"""
|
924
|
-
Specifies the
|
928
|
+
Specifies the flow(s) that this flow depends on.
|
925
929
|
|
926
930
|
```
|
927
|
-
@
|
931
|
+
@trigger_on_finish(flow='FooFlow')
|
928
932
|
```
|
929
933
|
or
|
930
934
|
```
|
931
|
-
@
|
935
|
+
@trigger_on_finish(flows=['FooFlow', 'BarFlow'])
|
932
936
|
```
|
937
|
+
This decorator respects the @project decorator and triggers the flow
|
938
|
+
when upstream runs within the same namespace complete successfully
|
933
939
|
|
934
|
-
Additionally, you can specify
|
935
|
-
|
940
|
+
Additionally, you can specify project aware upstream flow dependencies
|
941
|
+
by specifying the fully qualified project_flow_name.
|
936
942
|
```
|
937
|
-
@
|
943
|
+
@trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
|
938
944
|
```
|
939
945
|
or
|
940
946
|
```
|
941
|
-
@
|
942
|
-
{'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
|
947
|
+
@trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
|
943
948
|
```
|
944
949
|
|
945
|
-
|
946
|
-
|
947
|
-
@trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
|
948
|
-
```
|
949
|
-
This is equivalent to:
|
950
|
+
You can also specify just the project or project branch (other values will be
|
951
|
+
inferred from the current project or project branch):
|
950
952
|
```
|
951
|
-
@
|
953
|
+
@trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
|
952
954
|
```
|
953
955
|
|
956
|
+
Note that `branch` is typically one of:
|
957
|
+
- `prod`
|
958
|
+
- `user.bob`
|
959
|
+
- `test.my_experiment`
|
960
|
+
- `prod.staging`
|
961
|
+
|
954
962
|
|
955
963
|
Parameters
|
956
964
|
----------
|
957
|
-
|
958
|
-
|
959
|
-
|
960
|
-
|
965
|
+
flow : Union[str, Dict[str, str]], optional, default None
|
966
|
+
Upstream flow dependency for this flow.
|
967
|
+
flows : List[Union[str, Dict[str, str]]], default []
|
968
|
+
Upstream flow dependencies for this flow.
|
961
969
|
options : Dict[str, Any], default {}
|
962
970
|
Backend-specific configuration for tuning eventing behavior.
|
963
971
|
"""
|
964
972
|
...
|
965
973
|
|
974
|
+
def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
975
|
+
"""
|
976
|
+
The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
|
977
|
+
This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
|
978
|
+
|
979
|
+
|
980
|
+
Parameters
|
981
|
+
----------
|
982
|
+
timeout : int
|
983
|
+
Time, in seconds before the task times out and fails. (Default: 3600)
|
984
|
+
poke_interval : int
|
985
|
+
Time in seconds that the job should wait in between each try. (Default: 60)
|
986
|
+
mode : str
|
987
|
+
How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
|
988
|
+
exponential_backoff : bool
|
989
|
+
allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
|
990
|
+
pool : str
|
991
|
+
the slot pool this task should run in,
|
992
|
+
slot pools are a way to limit concurrency for certain tasks. (Default:None)
|
993
|
+
soft_fail : bool
|
994
|
+
Set to true to mark the task as SKIPPED on failure. (Default: False)
|
995
|
+
name : str
|
996
|
+
Name of the sensor on Airflow
|
997
|
+
description : str
|
998
|
+
Description of sensor in the Airflow UI
|
999
|
+
external_dag_id : str
|
1000
|
+
The dag_id that contains the task you want to wait for.
|
1001
|
+
external_task_ids : List[str]
|
1002
|
+
The list of task_ids that you want to wait for.
|
1003
|
+
If None (default value) the sensor waits for the DAG. (Default: None)
|
1004
|
+
allowed_states : List[str]
|
1005
|
+
Iterable of allowed states, (Default: ['success'])
|
1006
|
+
failed_states : List[str]
|
1007
|
+
Iterable of failed or dis-allowed states. (Default: None)
|
1008
|
+
execution_delta : datetime.timedelta
|
1009
|
+
time difference with the previous execution to look at,
|
1010
|
+
the default is the same logical date as the current task or DAG. (Default: None)
|
1011
|
+
check_existence: bool
|
1012
|
+
Set to True to check if the external task exists or check if
|
1013
|
+
the DAG to wait for exists. (Default: True)
|
1014
|
+
"""
|
1015
|
+
...
|
1016
|
+
|
966
1017
|
@typing.overload
|
967
1018
|
def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
968
1019
|
"""
|
@@ -1004,10 +1055,13 @@ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packag
|
|
1004
1055
|
"""
|
1005
1056
|
...
|
1006
1057
|
|
1007
|
-
def
|
1058
|
+
def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1008
1059
|
"""
|
1009
|
-
The `@
|
1010
|
-
This decorator only works when a flow is scheduled on Airflow
|
1060
|
+
The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
|
1061
|
+
before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
|
1062
|
+
and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
|
1063
|
+
added as a flow decorators. Adding more than one decorator will ensure that `start` step
|
1064
|
+
starts only after all sensors finish.
|
1011
1065
|
|
1012
1066
|
|
1013
1067
|
Parameters
|
@@ -1016,138 +1070,178 @@ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str,
|
|
1016
1070
|
Time, in seconds before the task times out and fails. (Default: 3600)
|
1017
1071
|
poke_interval : int
|
1018
1072
|
Time in seconds that the job should wait in between each try. (Default: 60)
|
1019
|
-
mode : str
|
1020
|
-
How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
|
1021
|
-
exponential_backoff : bool
|
1022
|
-
allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
|
1023
|
-
pool : str
|
1024
|
-
the slot pool this task should run in,
|
1025
|
-
slot pools are a way to limit concurrency for certain tasks. (Default:None)
|
1026
|
-
soft_fail : bool
|
1027
|
-
Set to true to mark the task as SKIPPED on failure. (Default: False)
|
1028
|
-
name : str
|
1029
|
-
Name of the sensor on Airflow
|
1030
|
-
description : str
|
1031
|
-
Description of sensor in the Airflow UI
|
1032
|
-
|
1033
|
-
The
|
1034
|
-
|
1035
|
-
|
1036
|
-
|
1037
|
-
|
1038
|
-
|
1039
|
-
|
1040
|
-
|
1041
|
-
|
1042
|
-
|
1043
|
-
|
1044
|
-
check_existence: bool
|
1045
|
-
Set to True to check if the external task exists or check if
|
1046
|
-
the DAG to wait for exists. (Default: True)
|
1073
|
+
mode : str
|
1074
|
+
How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
|
1075
|
+
exponential_backoff : bool
|
1076
|
+
allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
|
1077
|
+
pool : str
|
1078
|
+
the slot pool this task should run in,
|
1079
|
+
slot pools are a way to limit concurrency for certain tasks. (Default:None)
|
1080
|
+
soft_fail : bool
|
1081
|
+
Set to true to mark the task as SKIPPED on failure. (Default: False)
|
1082
|
+
name : str
|
1083
|
+
Name of the sensor on Airflow
|
1084
|
+
description : str
|
1085
|
+
Description of sensor in the Airflow UI
|
1086
|
+
bucket_key : Union[str, List[str]]
|
1087
|
+
The key(s) being waited on. Supports full s3:// style url or relative path from root level.
|
1088
|
+
When it's specified as a full s3:// url, please leave `bucket_name` as None
|
1089
|
+
bucket_name : str
|
1090
|
+
Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
|
1091
|
+
When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
|
1092
|
+
wildcard_match : bool
|
1093
|
+
whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
|
1094
|
+
aws_conn_id : str
|
1095
|
+
a reference to the s3 connection on Airflow. (Default: None)
|
1096
|
+
verify : bool
|
1097
|
+
Whether or not to verify SSL certificates for S3 connection. (Default: None)
|
1047
1098
|
"""
|
1048
1099
|
...
|
1049
1100
|
|
1050
1101
|
@typing.overload
|
1051
|
-
def
|
1102
|
+
def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1052
1103
|
"""
|
1053
|
-
Specifies the
|
1104
|
+
Specifies the event(s) that this flow depends on.
|
1054
1105
|
|
1055
1106
|
```
|
1056
|
-
@
|
1107
|
+
@trigger(event='foo')
|
1057
1108
|
```
|
1058
1109
|
or
|
1059
1110
|
```
|
1060
|
-
@
|
1111
|
+
@trigger(events=['foo', 'bar'])
|
1061
1112
|
```
|
1062
|
-
This decorator respects the @project decorator and triggers the flow
|
1063
|
-
when upstream runs within the same namespace complete successfully
|
1064
1113
|
|
1065
|
-
Additionally, you can specify
|
1066
|
-
|
1114
|
+
Additionally, you can specify the parameter mappings
|
1115
|
+
to map event payload to Metaflow parameters for the flow.
|
1067
1116
|
```
|
1068
|
-
@
|
1117
|
+
@trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
|
1069
1118
|
```
|
1070
1119
|
or
|
1071
1120
|
```
|
1072
|
-
@
|
1121
|
+
@trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
|
1122
|
+
{'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
|
1073
1123
|
```
|
1074
1124
|
|
1075
|
-
|
1076
|
-
inferred from the current project or project branch):
|
1125
|
+
'parameters' can also be a list of strings and tuples like so:
|
1077
1126
|
```
|
1078
|
-
@
|
1127
|
+
@trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
|
1128
|
+
```
|
1129
|
+
This is equivalent to:
|
1130
|
+
```
|
1131
|
+
@trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
|
1079
1132
|
```
|
1080
|
-
|
1081
|
-
Note that `branch` is typically one of:
|
1082
|
-
- `prod`
|
1083
|
-
- `user.bob`
|
1084
|
-
- `test.my_experiment`
|
1085
|
-
- `prod.staging`
|
1086
1133
|
|
1087
1134
|
|
1088
1135
|
Parameters
|
1089
1136
|
----------
|
1090
|
-
|
1091
|
-
|
1092
|
-
|
1093
|
-
|
1137
|
+
event : Union[str, Dict[str, Any]], optional, default None
|
1138
|
+
Event dependency for this flow.
|
1139
|
+
events : List[Union[str, Dict[str, Any]]], default []
|
1140
|
+
Events dependency for this flow.
|
1094
1141
|
options : Dict[str, Any], default {}
|
1095
1142
|
Backend-specific configuration for tuning eventing behavior.
|
1096
1143
|
"""
|
1097
1144
|
...
|
1098
1145
|
|
1099
1146
|
@typing.overload
|
1100
|
-
def
|
1147
|
+
def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
1101
1148
|
...
|
1102
1149
|
|
1103
|
-
def
|
1150
|
+
def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
|
1104
1151
|
"""
|
1105
|
-
Specifies the
|
1152
|
+
Specifies the event(s) that this flow depends on.
|
1106
1153
|
|
1107
1154
|
```
|
1108
|
-
@
|
1155
|
+
@trigger(event='foo')
|
1109
1156
|
```
|
1110
1157
|
or
|
1111
1158
|
```
|
1112
|
-
@
|
1159
|
+
@trigger(events=['foo', 'bar'])
|
1113
1160
|
```
|
1114
|
-
This decorator respects the @project decorator and triggers the flow
|
1115
|
-
when upstream runs within the same namespace complete successfully
|
1116
1161
|
|
1117
|
-
Additionally, you can specify
|
1118
|
-
|
1162
|
+
Additionally, you can specify the parameter mappings
|
1163
|
+
to map event payload to Metaflow parameters for the flow.
|
1119
1164
|
```
|
1120
|
-
@
|
1165
|
+
@trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
|
1121
1166
|
```
|
1122
1167
|
or
|
1123
1168
|
```
|
1124
|
-
@
|
1169
|
+
@trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
|
1170
|
+
{'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
|
1125
1171
|
```
|
1126
1172
|
|
1127
|
-
|
1128
|
-
inferred from the current project or project branch):
|
1173
|
+
'parameters' can also be a list of strings and tuples like so:
|
1129
1174
|
```
|
1130
|
-
@
|
1175
|
+
@trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
|
1176
|
+
```
|
1177
|
+
This is equivalent to:
|
1178
|
+
```
|
1179
|
+
@trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
|
1131
1180
|
```
|
1132
|
-
|
1133
|
-
Note that `branch` is typically one of:
|
1134
|
-
- `prod`
|
1135
|
-
- `user.bob`
|
1136
|
-
- `test.my_experiment`
|
1137
|
-
- `prod.staging`
|
1138
1181
|
|
1139
1182
|
|
1140
1183
|
Parameters
|
1141
1184
|
----------
|
1142
|
-
|
1143
|
-
|
1144
|
-
|
1145
|
-
|
1185
|
+
event : Union[str, Dict[str, Any]], optional, default None
|
1186
|
+
Event dependency for this flow.
|
1187
|
+
events : List[Union[str, Dict[str, Any]]], default []
|
1188
|
+
Events dependency for this flow.
|
1146
1189
|
options : Dict[str, Any], default {}
|
1147
1190
|
Backend-specific configuration for tuning eventing behavior.
|
1148
1191
|
"""
|
1149
1192
|
...
|
1150
1193
|
|
1194
|
+
@typing.overload
|
1195
|
+
def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1196
|
+
"""
|
1197
|
+
Specifies the times when the flow should be run when running on a
|
1198
|
+
production scheduler.
|
1199
|
+
|
1200
|
+
|
1201
|
+
Parameters
|
1202
|
+
----------
|
1203
|
+
hourly : bool, default False
|
1204
|
+
Run the workflow hourly.
|
1205
|
+
daily : bool, default True
|
1206
|
+
Run the workflow daily.
|
1207
|
+
weekly : bool, default False
|
1208
|
+
Run the workflow weekly.
|
1209
|
+
cron : str, optional, default None
|
1210
|
+
Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
|
1211
|
+
specified by this expression.
|
1212
|
+
timezone : str, optional, default None
|
1213
|
+
Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
|
1214
|
+
which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
|
1215
|
+
"""
|
1216
|
+
...
|
1217
|
+
|
1218
|
+
@typing.overload
|
1219
|
+
def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
1220
|
+
...
|
1221
|
+
|
1222
|
+
def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
|
1223
|
+
"""
|
1224
|
+
Specifies the times when the flow should be run when running on a
|
1225
|
+
production scheduler.
|
1226
|
+
|
1227
|
+
|
1228
|
+
Parameters
|
1229
|
+
----------
|
1230
|
+
hourly : bool, default False
|
1231
|
+
Run the workflow hourly.
|
1232
|
+
daily : bool, default True
|
1233
|
+
Run the workflow daily.
|
1234
|
+
weekly : bool, default False
|
1235
|
+
Run the workflow weekly.
|
1236
|
+
cron : str, optional, default None
|
1237
|
+
Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
|
1238
|
+
specified by this expression.
|
1239
|
+
timezone : str, optional, default None
|
1240
|
+
Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
|
1241
|
+
which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
|
1242
|
+
"""
|
1243
|
+
...
|
1244
|
+
|
1151
1245
|
def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1152
1246
|
"""
|
1153
1247
|
Specifies what flows belong to the same project.
|
@@ -1234,97 +1328,3 @@ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packa
|
|
1234
1328
|
"""
|
1235
1329
|
...
|
1236
1330
|
|
1237
|
-
@typing.overload
|
1238
|
-
def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1239
|
-
"""
|
1240
|
-
Specifies the times when the flow should be run when running on a
|
1241
|
-
production scheduler.
|
1242
|
-
|
1243
|
-
|
1244
|
-
Parameters
|
1245
|
-
----------
|
1246
|
-
hourly : bool, default False
|
1247
|
-
Run the workflow hourly.
|
1248
|
-
daily : bool, default True
|
1249
|
-
Run the workflow daily.
|
1250
|
-
weekly : bool, default False
|
1251
|
-
Run the workflow weekly.
|
1252
|
-
cron : str, optional, default None
|
1253
|
-
Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
|
1254
|
-
specified by this expression.
|
1255
|
-
timezone : str, optional, default None
|
1256
|
-
Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
|
1257
|
-
which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
|
1258
|
-
"""
|
1259
|
-
...
|
1260
|
-
|
1261
|
-
@typing.overload
|
1262
|
-
def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
1263
|
-
...
|
1264
|
-
|
1265
|
-
def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
|
1266
|
-
"""
|
1267
|
-
Specifies the times when the flow should be run when running on a
|
1268
|
-
production scheduler.
|
1269
|
-
|
1270
|
-
|
1271
|
-
Parameters
|
1272
|
-
----------
|
1273
|
-
hourly : bool, default False
|
1274
|
-
Run the workflow hourly.
|
1275
|
-
daily : bool, default True
|
1276
|
-
Run the workflow daily.
|
1277
|
-
weekly : bool, default False
|
1278
|
-
Run the workflow weekly.
|
1279
|
-
cron : str, optional, default None
|
1280
|
-
Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
|
1281
|
-
specified by this expression.
|
1282
|
-
timezone : str, optional, default None
|
1283
|
-
Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
|
1284
|
-
which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
|
1285
|
-
"""
|
1286
|
-
...
|
1287
|
-
|
1288
|
-
def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1289
|
-
"""
|
1290
|
-
The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
|
1291
|
-
before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
|
1292
|
-
and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
|
1293
|
-
added as a flow decorators. Adding more than one decorator will ensure that `start` step
|
1294
|
-
starts only after all sensors finish.
|
1295
|
-
|
1296
|
-
|
1297
|
-
Parameters
|
1298
|
-
----------
|
1299
|
-
timeout : int
|
1300
|
-
Time, in seconds before the task times out and fails. (Default: 3600)
|
1301
|
-
poke_interval : int
|
1302
|
-
Time in seconds that the job should wait in between each try. (Default: 60)
|
1303
|
-
mode : str
|
1304
|
-
How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
|
1305
|
-
exponential_backoff : bool
|
1306
|
-
allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
|
1307
|
-
pool : str
|
1308
|
-
the slot pool this task should run in,
|
1309
|
-
slot pools are a way to limit concurrency for certain tasks. (Default:None)
|
1310
|
-
soft_fail : bool
|
1311
|
-
Set to true to mark the task as SKIPPED on failure. (Default: False)
|
1312
|
-
name : str
|
1313
|
-
Name of the sensor on Airflow
|
1314
|
-
description : str
|
1315
|
-
Description of sensor in the Airflow UI
|
1316
|
-
bucket_key : Union[str, List[str]]
|
1317
|
-
The key(s) being waited on. Supports full s3:// style url or relative path from root level.
|
1318
|
-
When it's specified as a full s3:// url, please leave `bucket_name` as None
|
1319
|
-
bucket_name : str
|
1320
|
-
Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
|
1321
|
-
When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
|
1322
|
-
wildcard_match : bool
|
1323
|
-
whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
|
1324
|
-
aws_conn_id : str
|
1325
|
-
a reference to the s3 connection on Airflow. (Default: None)
|
1326
|
-
verify : bool
|
1327
|
-
Whether or not to verify SSL certificates for S3 connection. (Default: None)
|
1328
|
-
"""
|
1329
|
-
...
|
1330
|
-
|