metaflow-stubs 2.13.4__py2.py3-none-any.whl → 2.13.6__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- metaflow-stubs/__init__.pyi +208 -208
- metaflow-stubs/cards.pyi +2 -2
- metaflow-stubs/cli.pyi +2 -2
- metaflow-stubs/cli_components/__init__.pyi +2 -2
- metaflow-stubs/cli_components/utils.pyi +2 -2
- metaflow-stubs/client/__init__.pyi +2 -2
- metaflow-stubs/client/core.pyi +6 -6
- metaflow-stubs/client/filecache.pyi +3 -3
- metaflow-stubs/events.pyi +3 -3
- metaflow-stubs/exception.pyi +2 -2
- metaflow-stubs/flowspec.pyi +5 -5
- metaflow-stubs/generated_for.txt +1 -1
- metaflow-stubs/includefile.pyi +4 -4
- metaflow-stubs/info_file.pyi +2 -2
- metaflow-stubs/metadata_provider/__init__.pyi +2 -2
- metaflow-stubs/metadata_provider/heartbeat.pyi +2 -2
- metaflow-stubs/metadata_provider/metadata.pyi +2 -2
- metaflow-stubs/metadata_provider/util.pyi +2 -2
- metaflow-stubs/metaflow_config.pyi +2 -2
- metaflow-stubs/metaflow_current.pyi +23 -23
- metaflow-stubs/multicore_utils.pyi +2 -2
- metaflow-stubs/parameters.pyi +4 -4
- metaflow-stubs/plugins/__init__.pyi +12 -12
- metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
- metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
- metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
- metaflow-stubs/plugins/airflow/exception.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
- metaflow-stubs/plugins/argo/__init__.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_workflows.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +3 -3
- metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +3 -3
- metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +4 -4
- metaflow-stubs/plugins/aws/__init__.pyi +4 -4
- metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
- metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
- metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +4 -4
- metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +3 -3
- metaflow-stubs/plugins/azure/__init__.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
- metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
- metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
- metaflow-stubs/plugins/cards/__init__.pyi +2 -2
- metaflow-stubs/plugins/cards/card_client.pyi +3 -3
- metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
- metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
- metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/basic.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/components.pyi +4 -4
- metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
- metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
- metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
- metaflow-stubs/plugins/cards/exception.pyi +2 -2
- metaflow-stubs/plugins/catch_decorator.pyi +3 -3
- metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
- metaflow-stubs/plugins/datatools/local.pyi +2 -2
- metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
- metaflow-stubs/plugins/datatools/s3/s3.pyi +4 -4
- metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
- metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
- metaflow-stubs/plugins/debug_logger.pyi +2 -2
- metaflow-stubs/plugins/debug_monitor.pyi +2 -2
- metaflow-stubs/plugins/environment_decorator.pyi +2 -2
- metaflow-stubs/plugins/events_decorator.pyi +2 -2
- metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
- metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
- metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
- metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +2 -2
- metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
- metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
- metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
- metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/__init__.pyi +3 -2
- metaflow-stubs/plugins/kubernetes/kube_utils.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +20 -0
- metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
- metaflow-stubs/plugins/project_decorator.pyi +2 -2
- metaflow-stubs/plugins/pypi/__init__.pyi +3 -3
- metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
- metaflow-stubs/plugins/pypi/conda_environment.pyi +5 -5
- metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
- metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
- metaflow-stubs/plugins/pypi/utils.pyi +2 -2
- metaflow-stubs/plugins/resources_decorator.pyi +2 -2
- metaflow-stubs/plugins/retry_decorator.pyi +2 -2
- metaflow-stubs/plugins/secrets/__init__.pyi +3 -3
- metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +3 -3
- metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
- metaflow-stubs/plugins/storage_executor.pyi +2 -2
- metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +2 -2
- metaflow-stubs/plugins/timeout_decorator.pyi +3 -3
- metaflow-stubs/pylint_wrapper.pyi +2 -2
- metaflow-stubs/runner/__init__.pyi +2 -2
- metaflow-stubs/runner/deployer.pyi +29 -29
- metaflow-stubs/runner/deployer_impl.pyi +3 -3
- metaflow-stubs/runner/metaflow_runner.pyi +4 -4
- metaflow-stubs/runner/nbdeploy.pyi +2 -2
- metaflow-stubs/runner/nbrun.pyi +2 -2
- metaflow-stubs/runner/subprocess_manager.pyi +2 -2
- metaflow-stubs/runner/utils.pyi +3 -3
- metaflow-stubs/system/__init__.pyi +2 -2
- metaflow-stubs/system/system_logger.pyi +3 -3
- metaflow-stubs/system/system_monitor.pyi +2 -2
- metaflow-stubs/tagging_util.pyi +2 -2
- metaflow-stubs/tuple_util.pyi +2 -2
- metaflow-stubs/user_configs/__init__.pyi +2 -2
- metaflow-stubs/user_configs/config_decorators.pyi +5 -5
- metaflow-stubs/user_configs/config_options.pyi +3 -3
- metaflow-stubs/user_configs/config_parameters.pyi +6 -6
- metaflow-stubs/version.pyi +2 -2
- {metaflow_stubs-2.13.4.dist-info → metaflow_stubs-2.13.6.dist-info}/METADATA +2 -2
- metaflow_stubs-2.13.6.dist-info/RECORD +145 -0
- metaflow_stubs-2.13.4.dist-info/RECORD +0 -144
- {metaflow_stubs-2.13.4.dist-info → metaflow_stubs-2.13.6.dist-info}/WHEEL +0 -0
- {metaflow_stubs-2.13.4.dist-info → metaflow_stubs-2.13.6.dist-info}/top_level.txt +0 -0
metaflow-stubs/__init__.pyi
CHANGED
@@ -1,15 +1,15 @@
|
|
1
1
|
######################################################################################################
|
2
2
|
# Auto-generated Metaflow stub file #
|
3
|
-
# MF version: 2.13.
|
4
|
-
# Generated on 2025-01-
|
3
|
+
# MF version: 2.13.6 #
|
4
|
+
# Generated on 2025-01-23T12:09:53.031777 #
|
5
5
|
######################################################################################################
|
6
6
|
|
7
7
|
from __future__ import annotations
|
8
8
|
|
9
9
|
import typing
|
10
10
|
if typing.TYPE_CHECKING:
|
11
|
-
import datetime
|
12
11
|
import typing
|
12
|
+
import datetime
|
13
13
|
FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
|
14
14
|
StepFlag = typing.NewType("StepFlag", bool)
|
15
15
|
|
@@ -35,8 +35,8 @@ from .user_configs.config_parameters import ConfigValue as ConfigValue
|
|
35
35
|
from .user_configs.config_parameters import config_expr as config_expr
|
36
36
|
from .user_configs.config_decorators import CustomFlowDecorator as CustomFlowDecorator
|
37
37
|
from .user_configs.config_decorators import CustomStepDecorator as CustomStepDecorator
|
38
|
-
from . import events as events
|
39
38
|
from . import tuple_util as tuple_util
|
39
|
+
from . import events as events
|
40
40
|
from . import runner as runner
|
41
41
|
from . import plugins as plugins
|
42
42
|
from .plugins.datatools.s3.s3 import S3 as S3
|
@@ -143,69 +143,62 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
|
|
143
143
|
...
|
144
144
|
|
145
145
|
@typing.overload
|
146
|
-
def
|
147
|
-
"""
|
148
|
-
Decorator prototype for all step decorators. This function gets specialized
|
149
|
-
and imported for all decorators types by _import_plugin_decorators().
|
150
|
-
"""
|
151
|
-
...
|
152
|
-
|
153
|
-
@typing.overload
|
154
|
-
def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
155
|
-
...
|
156
|
-
|
157
|
-
def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
|
158
|
-
"""
|
159
|
-
Decorator prototype for all step decorators. This function gets specialized
|
160
|
-
and imported for all decorators types by _import_plugin_decorators().
|
161
|
-
"""
|
162
|
-
...
|
163
|
-
|
164
|
-
@typing.overload
|
165
|
-
def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
146
|
+
def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
166
147
|
"""
|
167
|
-
|
148
|
+
Specifies a timeout for your step.
|
168
149
|
|
169
|
-
|
150
|
+
This decorator is useful if this step may hang indefinitely.
|
151
|
+
|
152
|
+
This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
|
153
|
+
A timeout is considered to be an exception thrown by the step. It will cause the step to be
|
154
|
+
retried if needed and the exception will be caught by the `@catch` decorator, if present.
|
155
|
+
|
156
|
+
Note that all the values specified in parameters are added together so if you specify
|
157
|
+
60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
|
170
158
|
"""
|
171
159
|
...
|
172
160
|
|
173
161
|
@typing.overload
|
174
|
-
def
|
162
|
+
def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
175
163
|
...
|
176
164
|
|
177
165
|
@typing.overload
|
178
|
-
def
|
166
|
+
def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
179
167
|
...
|
180
168
|
|
181
|
-
def
|
169
|
+
def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
|
182
170
|
"""
|
183
|
-
|
171
|
+
Specifies a timeout for your step.
|
184
172
|
|
185
|
-
|
173
|
+
This decorator is useful if this step may hang indefinitely.
|
174
|
+
|
175
|
+
This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
|
176
|
+
A timeout is considered to be an exception thrown by the step. It will cause the step to be
|
177
|
+
retried if needed and the exception will be caught by the `@catch` decorator, if present.
|
178
|
+
|
179
|
+
Note that all the values specified in parameters are added together so if you specify
|
180
|
+
60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
|
186
181
|
"""
|
187
182
|
...
|
188
183
|
|
189
184
|
@typing.overload
|
190
|
-
def
|
185
|
+
def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
191
186
|
"""
|
192
|
-
Specifies
|
193
|
-
the execution of a step.
|
187
|
+
Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
|
194
188
|
"""
|
195
189
|
...
|
196
190
|
|
197
191
|
@typing.overload
|
198
|
-
def
|
192
|
+
def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
199
193
|
...
|
200
194
|
|
201
195
|
@typing.overload
|
202
|
-
def
|
196
|
+
def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
203
197
|
...
|
204
198
|
|
205
|
-
def
|
199
|
+
def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
|
206
200
|
"""
|
207
|
-
Specifies
|
208
|
-
the execution of a step.
|
201
|
+
Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
|
209
202
|
"""
|
210
203
|
...
|
211
204
|
|
@@ -248,100 +241,88 @@ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
|
|
248
241
|
"""
|
249
242
|
...
|
250
243
|
|
251
|
-
def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable') -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
252
|
-
"""
|
253
|
-
Specifies that this step should execute on Kubernetes.
|
254
|
-
"""
|
255
|
-
...
|
256
|
-
|
257
244
|
@typing.overload
|
258
|
-
def
|
245
|
+
def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
259
246
|
"""
|
260
|
-
Specifies
|
261
|
-
|
262
|
-
This decorator is useful if this step may hang indefinitely.
|
263
|
-
|
264
|
-
This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
|
265
|
-
A timeout is considered to be an exception thrown by the step. It will cause the step to be
|
266
|
-
retried if needed and the exception will be caught by the `@catch` decorator, if present.
|
247
|
+
Specifies the Conda environment for the step.
|
267
248
|
|
268
|
-
|
269
|
-
|
249
|
+
Information in this decorator will augment any
|
250
|
+
attributes set in the `@conda_base` flow-level decorator. Hence,
|
251
|
+
you can use `@conda_base` to set packages required by all
|
252
|
+
steps and use `@conda` to specify step-specific overrides.
|
270
253
|
"""
|
271
254
|
...
|
272
255
|
|
273
256
|
@typing.overload
|
274
|
-
def
|
257
|
+
def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
275
258
|
...
|
276
259
|
|
277
260
|
@typing.overload
|
278
|
-
def
|
261
|
+
def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
279
262
|
...
|
280
263
|
|
281
|
-
def
|
264
|
+
def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
|
282
265
|
"""
|
283
|
-
Specifies
|
284
|
-
|
285
|
-
This decorator is useful if this step may hang indefinitely.
|
286
|
-
|
287
|
-
This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
|
288
|
-
A timeout is considered to be an exception thrown by the step. It will cause the step to be
|
289
|
-
retried if needed and the exception will be caught by the `@catch` decorator, if present.
|
266
|
+
Specifies the Conda environment for the step.
|
290
267
|
|
291
|
-
|
292
|
-
|
268
|
+
Information in this decorator will augment any
|
269
|
+
attributes set in the `@conda_base` flow-level decorator. Hence,
|
270
|
+
you can use `@conda_base` to set packages required by all
|
271
|
+
steps and use `@conda` to specify step-specific overrides.
|
272
|
+
"""
|
273
|
+
...
|
274
|
+
|
275
|
+
def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable') -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
276
|
+
"""
|
277
|
+
Specifies that this step should execute on Kubernetes.
|
293
278
|
"""
|
294
279
|
...
|
295
280
|
|
296
281
|
@typing.overload
|
297
|
-
def
|
282
|
+
def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
298
283
|
"""
|
299
|
-
Specifies
|
300
|
-
|
301
|
-
The decorator will create an optional artifact, specified by `var`, which
|
302
|
-
contains the exception raised. You can use it to detect the presence
|
303
|
-
of errors, indicating that all happy-path artifacts produced by the step
|
304
|
-
are missing.
|
284
|
+
Specifies secrets to be retrieved and injected as environment variables prior to
|
285
|
+
the execution of a step.
|
305
286
|
"""
|
306
287
|
...
|
307
288
|
|
308
289
|
@typing.overload
|
309
|
-
def
|
290
|
+
def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
310
291
|
...
|
311
292
|
|
312
293
|
@typing.overload
|
313
|
-
def
|
294
|
+
def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
314
295
|
...
|
315
296
|
|
316
|
-
def
|
297
|
+
def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
|
317
298
|
"""
|
318
|
-
Specifies
|
319
|
-
|
320
|
-
The decorator will create an optional artifact, specified by `var`, which
|
321
|
-
contains the exception raised. You can use it to detect the presence
|
322
|
-
of errors, indicating that all happy-path artifacts produced by the step
|
323
|
-
are missing.
|
299
|
+
Specifies secrets to be retrieved and injected as environment variables prior to
|
300
|
+
the execution of a step.
|
324
301
|
"""
|
325
302
|
...
|
326
303
|
|
327
304
|
@typing.overload
|
328
|
-
def
|
305
|
+
def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
329
306
|
"""
|
330
|
-
|
307
|
+
Creates a human-readable report, a Metaflow Card, after this step completes.
|
308
|
+
|
309
|
+
Note that you may add multiple `@card` decorators in a step with different parameters.
|
331
310
|
"""
|
332
311
|
...
|
333
312
|
|
334
313
|
@typing.overload
|
335
|
-
def
|
314
|
+
def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
336
315
|
...
|
337
316
|
|
338
317
|
@typing.overload
|
339
|
-
def
|
318
|
+
def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
340
319
|
...
|
341
320
|
|
342
|
-
def
|
321
|
+
def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
|
343
322
|
"""
|
344
|
-
|
323
|
+
Creates a human-readable report, a Metaflow Card, after this step completes.
|
324
|
+
|
325
|
+
Note that you may add multiple `@card` decorators in a step with different parameters.
|
345
326
|
"""
|
346
327
|
...
|
347
328
|
|
@@ -376,6 +357,37 @@ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
|
|
376
357
|
"""
|
377
358
|
...
|
378
359
|
|
360
|
+
@typing.overload
|
361
|
+
def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
362
|
+
"""
|
363
|
+
Specifies that the step will success under all circumstances.
|
364
|
+
|
365
|
+
The decorator will create an optional artifact, specified by `var`, which
|
366
|
+
contains the exception raised. You can use it to detect the presence
|
367
|
+
of errors, indicating that all happy-path artifacts produced by the step
|
368
|
+
are missing.
|
369
|
+
"""
|
370
|
+
...
|
371
|
+
|
372
|
+
@typing.overload
|
373
|
+
def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
374
|
+
...
|
375
|
+
|
376
|
+
@typing.overload
|
377
|
+
def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
378
|
+
...
|
379
|
+
|
380
|
+
def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
|
381
|
+
"""
|
382
|
+
Specifies that the step will success under all circumstances.
|
383
|
+
|
384
|
+
The decorator will create an optional artifact, specified by `var`, which
|
385
|
+
contains the exception raised. You can use it to detect the presence
|
386
|
+
of errors, indicating that all happy-path artifacts produced by the step
|
387
|
+
are missing.
|
388
|
+
"""
|
389
|
+
...
|
390
|
+
|
379
391
|
@typing.overload
|
380
392
|
def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
381
393
|
"""
|
@@ -447,33 +459,102 @@ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None]
|
|
447
459
|
...
|
448
460
|
|
449
461
|
@typing.overload
|
450
|
-
def
|
462
|
+
def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
451
463
|
"""
|
452
|
-
|
453
|
-
|
454
|
-
Information in this decorator will augment any
|
455
|
-
attributes set in the `@conda_base` flow-level decorator. Hence,
|
456
|
-
you can use `@conda_base` to set packages required by all
|
457
|
-
steps and use `@conda` to specify step-specific overrides.
|
464
|
+
Decorator prototype for all step decorators. This function gets specialized
|
465
|
+
and imported for all decorators types by _import_plugin_decorators().
|
458
466
|
"""
|
459
467
|
...
|
460
468
|
|
461
469
|
@typing.overload
|
462
|
-
def
|
470
|
+
def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
471
|
+
...
|
472
|
+
|
473
|
+
def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
|
474
|
+
"""
|
475
|
+
Decorator prototype for all step decorators. This function gets specialized
|
476
|
+
and imported for all decorators types by _import_plugin_decorators().
|
477
|
+
"""
|
463
478
|
...
|
464
479
|
|
465
480
|
@typing.overload
|
466
|
-
def
|
481
|
+
def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
482
|
+
"""
|
483
|
+
Specifies the flow(s) that this flow depends on.
|
484
|
+
|
485
|
+
```
|
486
|
+
@trigger_on_finish(flow='FooFlow')
|
487
|
+
```
|
488
|
+
or
|
489
|
+
```
|
490
|
+
@trigger_on_finish(flows=['FooFlow', 'BarFlow'])
|
491
|
+
```
|
492
|
+
This decorator respects the @project decorator and triggers the flow
|
493
|
+
when upstream runs within the same namespace complete successfully
|
494
|
+
|
495
|
+
Additionally, you can specify project aware upstream flow dependencies
|
496
|
+
by specifying the fully qualified project_flow_name.
|
497
|
+
```
|
498
|
+
@trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
|
499
|
+
```
|
500
|
+
or
|
501
|
+
```
|
502
|
+
@trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
|
503
|
+
```
|
504
|
+
|
505
|
+
You can also specify just the project or project branch (other values will be
|
506
|
+
inferred from the current project or project branch):
|
507
|
+
```
|
508
|
+
@trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
|
509
|
+
```
|
510
|
+
|
511
|
+
Note that `branch` is typically one of:
|
512
|
+
- `prod`
|
513
|
+
- `user.bob`
|
514
|
+
- `test.my_experiment`
|
515
|
+
- `prod.staging`
|
516
|
+
"""
|
467
517
|
...
|
468
518
|
|
469
|
-
|
519
|
+
@typing.overload
|
520
|
+
def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
521
|
+
...
|
522
|
+
|
523
|
+
def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
|
470
524
|
"""
|
471
|
-
Specifies the
|
525
|
+
Specifies the flow(s) that this flow depends on.
|
472
526
|
|
473
|
-
|
474
|
-
|
475
|
-
|
476
|
-
|
527
|
+
```
|
528
|
+
@trigger_on_finish(flow='FooFlow')
|
529
|
+
```
|
530
|
+
or
|
531
|
+
```
|
532
|
+
@trigger_on_finish(flows=['FooFlow', 'BarFlow'])
|
533
|
+
```
|
534
|
+
This decorator respects the @project decorator and triggers the flow
|
535
|
+
when upstream runs within the same namespace complete successfully
|
536
|
+
|
537
|
+
Additionally, you can specify project aware upstream flow dependencies
|
538
|
+
by specifying the fully qualified project_flow_name.
|
539
|
+
```
|
540
|
+
@trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
|
541
|
+
```
|
542
|
+
or
|
543
|
+
```
|
544
|
+
@trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
|
545
|
+
```
|
546
|
+
|
547
|
+
You can also specify just the project or project branch (other values will be
|
548
|
+
inferred from the current project or project branch):
|
549
|
+
```
|
550
|
+
@trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
|
551
|
+
```
|
552
|
+
|
553
|
+
Note that `branch` is typically one of:
|
554
|
+
- `prod`
|
555
|
+
- `user.bob`
|
556
|
+
- `test.my_experiment`
|
557
|
+
- `prod.staging`
|
477
558
|
"""
|
478
559
|
...
|
479
560
|
|
@@ -550,110 +631,22 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
|
|
550
631
|
"""
|
551
632
|
...
|
552
633
|
|
553
|
-
def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
554
|
-
"""
|
555
|
-
Specifies what flows belong to the same project.
|
556
|
-
|
557
|
-
A project-specific namespace is created for all flows that
|
558
|
-
use the same `@project(name)`.
|
559
|
-
"""
|
560
|
-
...
|
561
|
-
|
562
|
-
def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
563
|
-
"""
|
564
|
-
The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
|
565
|
-
This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
|
566
|
-
"""
|
567
|
-
...
|
568
|
-
|
569
|
-
def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
570
|
-
"""
|
571
|
-
The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
|
572
|
-
before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
|
573
|
-
and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
|
574
|
-
added as a flow decorators. Adding more than one decorator will ensure that `start` step
|
575
|
-
starts only after all sensors finish.
|
576
|
-
"""
|
577
|
-
...
|
578
|
-
|
579
634
|
@typing.overload
|
580
|
-
def
|
635
|
+
def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
581
636
|
"""
|
582
|
-
Specifies the
|
583
|
-
|
584
|
-
```
|
585
|
-
@trigger_on_finish(flow='FooFlow')
|
586
|
-
```
|
587
|
-
or
|
588
|
-
```
|
589
|
-
@trigger_on_finish(flows=['FooFlow', 'BarFlow'])
|
590
|
-
```
|
591
|
-
This decorator respects the @project decorator and triggers the flow
|
592
|
-
when upstream runs within the same namespace complete successfully
|
593
|
-
|
594
|
-
Additionally, you can specify project aware upstream flow dependencies
|
595
|
-
by specifying the fully qualified project_flow_name.
|
596
|
-
```
|
597
|
-
@trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
|
598
|
-
```
|
599
|
-
or
|
600
|
-
```
|
601
|
-
@trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
|
602
|
-
```
|
603
|
-
|
604
|
-
You can also specify just the project or project branch (other values will be
|
605
|
-
inferred from the current project or project branch):
|
606
|
-
```
|
607
|
-
@trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
|
608
|
-
```
|
609
|
-
|
610
|
-
Note that `branch` is typically one of:
|
611
|
-
- `prod`
|
612
|
-
- `user.bob`
|
613
|
-
- `test.my_experiment`
|
614
|
-
- `prod.staging`
|
637
|
+
Specifies the times when the flow should be run when running on a
|
638
|
+
production scheduler.
|
615
639
|
"""
|
616
640
|
...
|
617
641
|
|
618
642
|
@typing.overload
|
619
|
-
def
|
643
|
+
def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
620
644
|
...
|
621
645
|
|
622
|
-
def
|
646
|
+
def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
|
623
647
|
"""
|
624
|
-
Specifies the
|
625
|
-
|
626
|
-
```
|
627
|
-
@trigger_on_finish(flow='FooFlow')
|
628
|
-
```
|
629
|
-
or
|
630
|
-
```
|
631
|
-
@trigger_on_finish(flows=['FooFlow', 'BarFlow'])
|
632
|
-
```
|
633
|
-
This decorator respects the @project decorator and triggers the flow
|
634
|
-
when upstream runs within the same namespace complete successfully
|
635
|
-
|
636
|
-
Additionally, you can specify project aware upstream flow dependencies
|
637
|
-
by specifying the fully qualified project_flow_name.
|
638
|
-
```
|
639
|
-
@trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
|
640
|
-
```
|
641
|
-
or
|
642
|
-
```
|
643
|
-
@trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
|
644
|
-
```
|
645
|
-
|
646
|
-
You can also specify just the project or project branch (other values will be
|
647
|
-
inferred from the current project or project branch):
|
648
|
-
```
|
649
|
-
@trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
|
650
|
-
```
|
651
|
-
|
652
|
-
Note that `branch` is typically one of:
|
653
|
-
- `prod`
|
654
|
-
- `user.bob`
|
655
|
-
- `test.my_experiment`
|
656
|
-
- `prod.staging`
|
648
|
+
Specifies the times when the flow should be run when running on a
|
649
|
+
production scheduler.
|
657
650
|
"""
|
658
651
|
...
|
659
652
|
|
@@ -703,22 +696,29 @@ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packa
|
|
703
696
|
"""
|
704
697
|
...
|
705
698
|
|
706
|
-
|
707
|
-
def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
699
|
+
def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
708
700
|
"""
|
709
|
-
|
710
|
-
|
701
|
+
The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
|
702
|
+
This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
|
711
703
|
"""
|
712
704
|
...
|
713
705
|
|
714
|
-
|
715
|
-
|
706
|
+
def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
707
|
+
"""
|
708
|
+
Specifies what flows belong to the same project.
|
709
|
+
|
710
|
+
A project-specific namespace is created for all flows that
|
711
|
+
use the same `@project(name)`.
|
712
|
+
"""
|
716
713
|
...
|
717
714
|
|
718
|
-
def
|
715
|
+
def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
719
716
|
"""
|
720
|
-
|
721
|
-
|
717
|
+
The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
|
718
|
+
before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
|
719
|
+
and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
|
720
|
+
added as a flow decorators. Adding more than one decorator will ensure that `start` step
|
721
|
+
starts only after all sensors finish.
|
722
722
|
"""
|
723
723
|
...
|
724
724
|
|
metaflow-stubs/cards.pyi
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
######################################################################################################
|
2
2
|
# Auto-generated Metaflow stub file #
|
3
|
-
# MF version: 2.13.
|
4
|
-
# Generated on 2025-01-
|
3
|
+
# MF version: 2.13.6 #
|
4
|
+
# Generated on 2025-01-23T12:09:52.952052 #
|
5
5
|
######################################################################################################
|
6
6
|
|
7
7
|
from __future__ import annotations
|
metaflow-stubs/cli.pyi
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
######################################################################################################
|
2
2
|
# Auto-generated Metaflow stub file #
|
3
|
-
# MF version: 2.13.
|
4
|
-
# Generated on 2025-01-
|
3
|
+
# MF version: 2.13.6 #
|
4
|
+
# Generated on 2025-01-23T12:09:52.961230 #
|
5
5
|
######################################################################################################
|
6
6
|
|
7
7
|
from __future__ import annotations
|
@@ -1,7 +1,7 @@
|
|
1
1
|
######################################################################################################
|
2
2
|
# Auto-generated Metaflow stub file #
|
3
|
-
# MF version: 2.13.
|
4
|
-
# Generated on 2025-01-
|
3
|
+
# MF version: 2.13.6 #
|
4
|
+
# Generated on 2025-01-23T12:09:52.959924 #
|
5
5
|
######################################################################################################
|
6
6
|
|
7
7
|
from __future__ import annotations
|