metaflow-stubs 2.13.8__py2.py3-none-any.whl → 2.13.9__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- metaflow-stubs/__init__.pyi +270 -270
- metaflow-stubs/cards.pyi +2 -2
- metaflow-stubs/cli.pyi +2 -2
- metaflow-stubs/cli_components/__init__.pyi +2 -2
- metaflow-stubs/cli_components/utils.pyi +2 -2
- metaflow-stubs/client/__init__.pyi +2 -2
- metaflow-stubs/client/core.pyi +4 -4
- metaflow-stubs/client/filecache.pyi +3 -3
- metaflow-stubs/events.pyi +2 -2
- metaflow-stubs/exception.pyi +2 -2
- metaflow-stubs/flowspec.pyi +6 -6
- metaflow-stubs/generated_for.txt +1 -1
- metaflow-stubs/includefile.pyi +3 -3
- metaflow-stubs/info_file.pyi +2 -2
- metaflow-stubs/metadata_provider/__init__.pyi +2 -2
- metaflow-stubs/metadata_provider/heartbeat.pyi +2 -2
- metaflow-stubs/metadata_provider/metadata.pyi +2 -2
- metaflow-stubs/metadata_provider/util.pyi +2 -2
- metaflow-stubs/metaflow_config.pyi +2 -2
- metaflow-stubs/metaflow_current.pyi +23 -23
- metaflow-stubs/multicore_utils.pyi +2 -2
- metaflow-stubs/parameters.pyi +3 -3
- metaflow-stubs/plugins/__init__.pyi +14 -14
- metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
- metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
- metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
- metaflow-stubs/plugins/airflow/exception.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
- metaflow-stubs/plugins/argo/__init__.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_workflows.pyi +3 -3
- metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +4 -4
- metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +3 -3
- metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +2 -2
- metaflow-stubs/plugins/aws/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
- metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
- metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +2 -2
- metaflow-stubs/plugins/azure/__init__.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
- metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
- metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
- metaflow-stubs/plugins/cards/__init__.pyi +2 -2
- metaflow-stubs/plugins/cards/card_client.pyi +3 -3
- metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
- metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
- metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/basic.pyi +3 -3
- metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/components.pyi +3 -3
- metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
- metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
- metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
- metaflow-stubs/plugins/cards/exception.pyi +2 -2
- metaflow-stubs/plugins/catch_decorator.pyi +2 -2
- metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
- metaflow-stubs/plugins/datatools/local.pyi +2 -2
- metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
- metaflow-stubs/plugins/datatools/s3/s3.pyi +3 -3
- metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
- metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
- metaflow-stubs/plugins/debug_logger.pyi +2 -2
- metaflow-stubs/plugins/debug_monitor.pyi +2 -2
- metaflow-stubs/plugins/environment_decorator.pyi +2 -2
- metaflow-stubs/plugins/events_decorator.pyi +2 -2
- metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
- metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
- metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
- metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +2 -2
- metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
- metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
- metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
- metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kube_utils.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +2 -2
- metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
- metaflow-stubs/plugins/project_decorator.pyi +2 -2
- metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
- metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
- metaflow-stubs/plugins/pypi/conda_environment.pyi +4 -4
- metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
- metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
- metaflow-stubs/plugins/pypi/utils.pyi +2 -2
- metaflow-stubs/plugins/resources_decorator.pyi +2 -2
- metaflow-stubs/plugins/retry_decorator.pyi +2 -2
- metaflow-stubs/plugins/secrets/__init__.pyi +3 -3
- metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +2 -2
- metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
- metaflow-stubs/plugins/storage_executor.pyi +2 -2
- metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +3 -3
- metaflow-stubs/plugins/timeout_decorator.pyi +2 -2
- metaflow-stubs/pylint_wrapper.pyi +2 -2
- metaflow-stubs/runner/__init__.pyi +2 -2
- metaflow-stubs/runner/deployer.pyi +6 -6
- metaflow-stubs/runner/deployer_impl.pyi +2 -2
- metaflow-stubs/runner/metaflow_runner.pyi +2 -2
- metaflow-stubs/runner/nbdeploy.pyi +2 -2
- metaflow-stubs/runner/nbrun.pyi +2 -2
- metaflow-stubs/runner/subprocess_manager.pyi +2 -2
- metaflow-stubs/runner/utils.pyi +3 -3
- metaflow-stubs/system/__init__.pyi +2 -2
- metaflow-stubs/system/system_logger.pyi +3 -3
- metaflow-stubs/system/system_monitor.pyi +2 -2
- metaflow-stubs/tagging_util.pyi +2 -2
- metaflow-stubs/tuple_util.pyi +2 -2
- metaflow-stubs/user_configs/__init__.pyi +2 -2
- metaflow-stubs/user_configs/config_decorators.pyi +6 -6
- metaflow-stubs/user_configs/config_options.pyi +2 -2
- metaflow-stubs/user_configs/config_parameters.pyi +6 -6
- metaflow-stubs/version.pyi +2 -2
- {metaflow_stubs-2.13.8.dist-info → metaflow_stubs-2.13.9.dist-info}/METADATA +2 -2
- metaflow_stubs-2.13.9.dist-info/RECORD +145 -0
- metaflow_stubs-2.13.8.dist-info/RECORD +0 -145
- {metaflow_stubs-2.13.8.dist-info → metaflow_stubs-2.13.9.dist-info}/WHEEL +0 -0
- {metaflow_stubs-2.13.8.dist-info → metaflow_stubs-2.13.9.dist-info}/top_level.txt +0 -0
metaflow-stubs/__init__.pyi
CHANGED
@@ -1,15 +1,15 @@
|
|
1
1
|
######################################################################################################
|
2
2
|
# Auto-generated Metaflow stub file #
|
3
|
-
# MF version: 2.13.
|
4
|
-
# Generated on 2025-01-
|
3
|
+
# MF version: 2.13.9 #
|
4
|
+
# Generated on 2025-01-31T17:23:41.630174 #
|
5
5
|
######################################################################################################
|
6
6
|
|
7
7
|
from __future__ import annotations
|
8
8
|
|
9
9
|
import typing
|
10
10
|
if typing.TYPE_CHECKING:
|
11
|
-
import typing
|
12
11
|
import datetime
|
12
|
+
import typing
|
13
13
|
FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
|
14
14
|
StepFlag = typing.NewType("StepFlag", bool)
|
15
15
|
|
@@ -35,8 +35,8 @@ from .user_configs.config_parameters import ConfigValue as ConfigValue
|
|
35
35
|
from .user_configs.config_parameters import config_expr as config_expr
|
36
36
|
from .user_configs.config_decorators import CustomFlowDecorator as CustomFlowDecorator
|
37
37
|
from .user_configs.config_decorators import CustomStepDecorator as CustomStepDecorator
|
38
|
-
from . import events as events
|
39
38
|
from . import tuple_util as tuple_util
|
39
|
+
from . import events as events
|
40
40
|
from . import runner as runner
|
41
41
|
from . import plugins as plugins
|
42
42
|
from .plugins.datatools.s3.s3 import S3 as S3
|
@@ -143,228 +143,171 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
|
|
143
143
|
...
|
144
144
|
|
145
145
|
@typing.overload
|
146
|
-
def
|
146
|
+
def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
147
147
|
"""
|
148
|
-
Specifies
|
148
|
+
Specifies a timeout for your step.
|
149
149
|
|
150
|
-
|
151
|
-
independently of the specific compute layer (`@batch`, `@kubernetes`).
|
150
|
+
This decorator is useful if this step may hang indefinitely.
|
152
151
|
|
153
|
-
|
154
|
-
|
155
|
-
|
156
|
-
|
157
|
-
|
158
|
-
|
159
|
-
python myflow.py run --with kubernetes
|
160
|
-
```
|
161
|
-
which executes the flow on the desired system using the
|
162
|
-
requirements specified in `@resources`.
|
152
|
+
This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
|
153
|
+
A timeout is considered to be an exception thrown by the step. It will cause the step to be
|
154
|
+
retried if needed and the exception will be caught by the `@catch` decorator, if present.
|
155
|
+
|
156
|
+
Note that all the values specified in parameters are added together so if you specify
|
157
|
+
60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
|
163
158
|
"""
|
164
159
|
...
|
165
160
|
|
166
161
|
@typing.overload
|
167
|
-
def
|
162
|
+
def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
168
163
|
...
|
169
164
|
|
170
165
|
@typing.overload
|
171
|
-
def
|
166
|
+
def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
172
167
|
...
|
173
168
|
|
174
|
-
def
|
169
|
+
def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
|
175
170
|
"""
|
176
|
-
Specifies
|
171
|
+
Specifies a timeout for your step.
|
177
172
|
|
178
|
-
|
179
|
-
independently of the specific compute layer (`@batch`, `@kubernetes`).
|
173
|
+
This decorator is useful if this step may hang indefinitely.
|
180
174
|
|
181
|
-
|
182
|
-
|
183
|
-
|
184
|
-
|
185
|
-
|
186
|
-
|
187
|
-
python myflow.py run --with kubernetes
|
188
|
-
```
|
189
|
-
which executes the flow on the desired system using the
|
190
|
-
requirements specified in `@resources`.
|
191
|
-
"""
|
192
|
-
...
|
193
|
-
|
194
|
-
@typing.overload
|
195
|
-
def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
196
|
-
"""
|
197
|
-
Decorator prototype for all step decorators. This function gets specialized
|
198
|
-
and imported for all decorators types by _import_plugin_decorators().
|
199
|
-
"""
|
200
|
-
...
|
201
|
-
|
202
|
-
@typing.overload
|
203
|
-
def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
204
|
-
...
|
205
|
-
|
206
|
-
def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
|
207
|
-
"""
|
208
|
-
Decorator prototype for all step decorators. This function gets specialized
|
209
|
-
and imported for all decorators types by _import_plugin_decorators().
|
210
|
-
"""
|
211
|
-
...
|
212
|
-
|
213
|
-
@typing.overload
|
214
|
-
def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
215
|
-
"""
|
216
|
-
Specifies environment variables to be set prior to the execution of a step.
|
175
|
+
This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
|
176
|
+
A timeout is considered to be an exception thrown by the step. It will cause the step to be
|
177
|
+
retried if needed and the exception will be caught by the `@catch` decorator, if present.
|
178
|
+
|
179
|
+
Note that all the values specified in parameters are added together so if you specify
|
180
|
+
60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
|
217
181
|
"""
|
218
182
|
...
|
219
183
|
|
220
|
-
|
221
|
-
def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
222
|
-
...
|
223
|
-
|
224
|
-
@typing.overload
|
225
|
-
def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
226
|
-
...
|
227
|
-
|
228
|
-
def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
|
184
|
+
def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable') -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
229
185
|
"""
|
230
|
-
Specifies
|
186
|
+
Specifies that this step should execute on Kubernetes.
|
231
187
|
"""
|
232
188
|
...
|
233
189
|
|
234
190
|
@typing.overload
|
235
|
-
def
|
191
|
+
def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
236
192
|
"""
|
237
|
-
Specifies
|
238
|
-
the execution of a step.
|
193
|
+
Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
|
239
194
|
"""
|
240
195
|
...
|
241
196
|
|
242
197
|
@typing.overload
|
243
|
-
def
|
198
|
+
def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
244
199
|
...
|
245
200
|
|
246
201
|
@typing.overload
|
247
|
-
def
|
202
|
+
def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
248
203
|
...
|
249
204
|
|
250
|
-
def
|
205
|
+
def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
|
251
206
|
"""
|
252
|
-
Specifies
|
253
|
-
the execution of a step.
|
207
|
+
Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
|
254
208
|
"""
|
255
209
|
...
|
256
210
|
|
257
211
|
@typing.overload
|
258
|
-
def
|
212
|
+
def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
259
213
|
"""
|
260
|
-
|
214
|
+
Specifies the PyPI packages for the step.
|
261
215
|
|
262
|
-
|
216
|
+
Information in this decorator will augment any
|
217
|
+
attributes set in the `@pyi_base` flow-level decorator. Hence,
|
218
|
+
you can use `@pypi_base` to set packages required by all
|
219
|
+
steps and use `@pypi` to specify step-specific overrides.
|
263
220
|
"""
|
264
221
|
...
|
265
222
|
|
266
223
|
@typing.overload
|
267
|
-
def
|
224
|
+
def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
268
225
|
...
|
269
226
|
|
270
227
|
@typing.overload
|
271
|
-
def
|
228
|
+
def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
272
229
|
...
|
273
230
|
|
274
|
-
def
|
231
|
+
def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
|
275
232
|
"""
|
276
|
-
|
233
|
+
Specifies the PyPI packages for the step.
|
277
234
|
|
278
|
-
|
279
|
-
|
280
|
-
|
281
|
-
|
282
|
-
def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable') -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
283
|
-
"""
|
284
|
-
Specifies that this step should execute on Kubernetes.
|
285
|
-
"""
|
286
|
-
...
|
287
|
-
|
288
|
-
@typing.overload
|
289
|
-
def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
290
|
-
"""
|
291
|
-
Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
|
292
|
-
"""
|
293
|
-
...
|
294
|
-
|
295
|
-
@typing.overload
|
296
|
-
def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
297
|
-
...
|
298
|
-
|
299
|
-
@typing.overload
|
300
|
-
def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
301
|
-
...
|
302
|
-
|
303
|
-
def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
|
304
|
-
"""
|
305
|
-
Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
|
235
|
+
Information in this decorator will augment any
|
236
|
+
attributes set in the `@pyi_base` flow-level decorator. Hence,
|
237
|
+
you can use `@pypi_base` to set packages required by all
|
238
|
+
steps and use `@pypi` to specify step-specific overrides.
|
306
239
|
"""
|
307
240
|
...
|
308
241
|
|
309
242
|
@typing.overload
|
310
|
-
def
|
243
|
+
def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
311
244
|
"""
|
312
|
-
Specifies
|
245
|
+
Specifies the resources needed when executing this step.
|
313
246
|
|
314
|
-
|
315
|
-
|
316
|
-
|
317
|
-
|
247
|
+
Use `@resources` to specify the resource requirements
|
248
|
+
independently of the specific compute layer (`@batch`, `@kubernetes`).
|
249
|
+
|
250
|
+
You can choose the compute layer on the command line by executing e.g.
|
251
|
+
```
|
252
|
+
python myflow.py run --with batch
|
253
|
+
```
|
254
|
+
or
|
255
|
+
```
|
256
|
+
python myflow.py run --with kubernetes
|
257
|
+
```
|
258
|
+
which executes the flow on the desired system using the
|
259
|
+
requirements specified in `@resources`.
|
318
260
|
"""
|
319
261
|
...
|
320
262
|
|
321
263
|
@typing.overload
|
322
|
-
def
|
264
|
+
def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
323
265
|
...
|
324
266
|
|
325
267
|
@typing.overload
|
326
|
-
def
|
268
|
+
def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
327
269
|
...
|
328
270
|
|
329
|
-
def
|
271
|
+
def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
|
330
272
|
"""
|
331
|
-
Specifies
|
273
|
+
Specifies the resources needed when executing this step.
|
332
274
|
|
333
|
-
|
334
|
-
|
335
|
-
|
336
|
-
|
275
|
+
Use `@resources` to specify the resource requirements
|
276
|
+
independently of the specific compute layer (`@batch`, `@kubernetes`).
|
277
|
+
|
278
|
+
You can choose the compute layer on the command line by executing e.g.
|
279
|
+
```
|
280
|
+
python myflow.py run --with batch
|
281
|
+
```
|
282
|
+
or
|
283
|
+
```
|
284
|
+
python myflow.py run --with kubernetes
|
285
|
+
```
|
286
|
+
which executes the flow on the desired system using the
|
287
|
+
requirements specified in `@resources`.
|
337
288
|
"""
|
338
289
|
...
|
339
290
|
|
340
291
|
@typing.overload
|
341
|
-
def
|
292
|
+
def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
342
293
|
"""
|
343
|
-
Specifies
|
344
|
-
|
345
|
-
Information in this decorator will augment any
|
346
|
-
attributes set in the `@pyi_base` flow-level decorator. Hence,
|
347
|
-
you can use `@pypi_base` to set packages required by all
|
348
|
-
steps and use `@pypi` to specify step-specific overrides.
|
294
|
+
Specifies secrets to be retrieved and injected as environment variables prior to
|
295
|
+
the execution of a step.
|
349
296
|
"""
|
350
297
|
...
|
351
298
|
|
352
299
|
@typing.overload
|
353
|
-
def
|
300
|
+
def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
354
301
|
...
|
355
302
|
|
356
303
|
@typing.overload
|
357
|
-
def
|
304
|
+
def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
358
305
|
...
|
359
306
|
|
360
|
-
def
|
307
|
+
def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
|
361
308
|
"""
|
362
|
-
Specifies
|
363
|
-
|
364
|
-
Information in this decorator will augment any
|
365
|
-
attributes set in the `@pyi_base` flow-level decorator. Hence,
|
366
|
-
you can use `@pypi_base` to set packages required by all
|
367
|
-
steps and use `@pypi` to specify step-specific overrides.
|
309
|
+
Specifies secrets to be retrieved and injected as environment variables prior to
|
310
|
+
the execution of a step.
|
368
311
|
"""
|
369
312
|
...
|
370
313
|
|
@@ -408,41 +351,27 @@ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
|
|
408
351
|
...
|
409
352
|
|
410
353
|
@typing.overload
|
411
|
-
def
|
354
|
+
def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
412
355
|
"""
|
413
|
-
|
414
|
-
|
415
|
-
This decorator is useful if this step may hang indefinitely.
|
416
|
-
|
417
|
-
This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
|
418
|
-
A timeout is considered to be an exception thrown by the step. It will cause the step to be
|
419
|
-
retried if needed and the exception will be caught by the `@catch` decorator, if present.
|
356
|
+
Creates a human-readable report, a Metaflow Card, after this step completes.
|
420
357
|
|
421
|
-
Note that
|
422
|
-
60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
|
358
|
+
Note that you may add multiple `@card` decorators in a step with different parameters.
|
423
359
|
"""
|
424
360
|
...
|
425
361
|
|
426
362
|
@typing.overload
|
427
|
-
def
|
363
|
+
def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
428
364
|
...
|
429
365
|
|
430
366
|
@typing.overload
|
431
|
-
def
|
367
|
+
def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
432
368
|
...
|
433
369
|
|
434
|
-
def
|
370
|
+
def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
|
435
371
|
"""
|
436
|
-
|
437
|
-
|
438
|
-
This decorator is useful if this step may hang indefinitely.
|
439
|
-
|
440
|
-
This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
|
441
|
-
A timeout is considered to be an exception thrown by the step. It will cause the step to be
|
442
|
-
retried if needed and the exception will be caught by the `@catch` decorator, if present.
|
372
|
+
Creates a human-readable report, a Metaflow Card, after this step completes.
|
443
373
|
|
444
|
-
Note that
|
445
|
-
60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
|
374
|
+
Note that you may add multiple `@card` decorators in a step with different parameters.
|
446
375
|
"""
|
447
376
|
...
|
448
377
|
|
@@ -478,83 +407,83 @@ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
|
|
478
407
|
...
|
479
408
|
|
480
409
|
@typing.overload
|
481
|
-
def
|
410
|
+
def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
482
411
|
"""
|
483
|
-
Specifies
|
484
|
-
|
485
|
-
```
|
486
|
-
@trigger_on_finish(flow='FooFlow')
|
487
|
-
```
|
488
|
-
or
|
489
|
-
```
|
490
|
-
@trigger_on_finish(flows=['FooFlow', 'BarFlow'])
|
491
|
-
```
|
492
|
-
This decorator respects the @project decorator and triggers the flow
|
493
|
-
when upstream runs within the same namespace complete successfully
|
494
|
-
|
495
|
-
Additionally, you can specify project aware upstream flow dependencies
|
496
|
-
by specifying the fully qualified project_flow_name.
|
497
|
-
```
|
498
|
-
@trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
|
499
|
-
```
|
500
|
-
or
|
501
|
-
```
|
502
|
-
@trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
|
503
|
-
```
|
504
|
-
|
505
|
-
You can also specify just the project or project branch (other values will be
|
506
|
-
inferred from the current project or project branch):
|
507
|
-
```
|
508
|
-
@trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
|
509
|
-
```
|
510
|
-
|
511
|
-
Note that `branch` is typically one of:
|
512
|
-
- `prod`
|
513
|
-
- `user.bob`
|
514
|
-
- `test.my_experiment`
|
515
|
-
- `prod.staging`
|
412
|
+
Specifies environment variables to be set prior to the execution of a step.
|
516
413
|
"""
|
517
414
|
...
|
518
415
|
|
519
416
|
@typing.overload
|
520
|
-
def
|
417
|
+
def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
521
418
|
...
|
522
419
|
|
523
|
-
|
420
|
+
@typing.overload
|
421
|
+
def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
422
|
+
...
|
423
|
+
|
424
|
+
def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
|
524
425
|
"""
|
525
|
-
Specifies
|
526
|
-
|
527
|
-
|
528
|
-
|
529
|
-
|
530
|
-
|
531
|
-
|
532
|
-
|
533
|
-
|
534
|
-
|
535
|
-
|
536
|
-
|
537
|
-
|
538
|
-
|
539
|
-
|
540
|
-
|
541
|
-
|
542
|
-
|
543
|
-
|
544
|
-
|
545
|
-
|
426
|
+
Specifies environment variables to be set prior to the execution of a step.
|
427
|
+
"""
|
428
|
+
...
|
429
|
+
|
430
|
+
@typing.overload
|
431
|
+
def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
432
|
+
"""
|
433
|
+
Decorator prototype for all step decorators. This function gets specialized
|
434
|
+
and imported for all decorators types by _import_plugin_decorators().
|
435
|
+
"""
|
436
|
+
...
|
437
|
+
|
438
|
+
@typing.overload
|
439
|
+
def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
440
|
+
...
|
441
|
+
|
442
|
+
def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
|
443
|
+
"""
|
444
|
+
Decorator prototype for all step decorators. This function gets specialized
|
445
|
+
and imported for all decorators types by _import_plugin_decorators().
|
446
|
+
"""
|
447
|
+
...
|
448
|
+
|
449
|
+
@typing.overload
|
450
|
+
def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
451
|
+
"""
|
452
|
+
Specifies that the step will success under all circumstances.
|
546
453
|
|
547
|
-
|
548
|
-
|
549
|
-
|
550
|
-
|
551
|
-
|
454
|
+
The decorator will create an optional artifact, specified by `var`, which
|
455
|
+
contains the exception raised. You can use it to detect the presence
|
456
|
+
of errors, indicating that all happy-path artifacts produced by the step
|
457
|
+
are missing.
|
458
|
+
"""
|
459
|
+
...
|
460
|
+
|
461
|
+
@typing.overload
|
462
|
+
def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
463
|
+
...
|
464
|
+
|
465
|
+
@typing.overload
|
466
|
+
def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
467
|
+
...
|
468
|
+
|
469
|
+
def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
|
470
|
+
"""
|
471
|
+
Specifies that the step will success under all circumstances.
|
552
472
|
|
553
|
-
|
554
|
-
|
555
|
-
|
556
|
-
|
557
|
-
|
473
|
+
The decorator will create an optional artifact, specified by `var`, which
|
474
|
+
contains the exception raised. You can use it to detect the presence
|
475
|
+
of errors, indicating that all happy-path artifacts produced by the step
|
476
|
+
are missing.
|
477
|
+
"""
|
478
|
+
...
|
479
|
+
|
480
|
+
def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
481
|
+
"""
|
482
|
+
The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
|
483
|
+
before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
|
484
|
+
and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
|
485
|
+
added as a flow decorators. Adding more than one decorator will ensure that `start` step
|
486
|
+
starts only after all sensors finish.
|
558
487
|
"""
|
559
488
|
...
|
560
489
|
|
@@ -632,54 +561,141 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
|
|
632
561
|
...
|
633
562
|
|
634
563
|
@typing.overload
|
635
|
-
def
|
564
|
+
def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
636
565
|
"""
|
637
|
-
Specifies the
|
638
|
-
|
566
|
+
Specifies the PyPI packages for all steps of the flow.
|
567
|
+
|
568
|
+
Use `@pypi_base` to set common packages required by all
|
569
|
+
steps and use `@pypi` to specify step-specific overrides.
|
639
570
|
"""
|
640
571
|
...
|
641
572
|
|
642
573
|
@typing.overload
|
643
|
-
def
|
574
|
+
def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
644
575
|
...
|
645
576
|
|
646
|
-
def
|
577
|
+
def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
|
647
578
|
"""
|
648
|
-
Specifies the
|
649
|
-
|
579
|
+
Specifies the PyPI packages for all steps of the flow.
|
580
|
+
|
581
|
+
Use `@pypi_base` to set common packages required by all
|
582
|
+
steps and use `@pypi` to specify step-specific overrides.
|
650
583
|
"""
|
651
584
|
...
|
652
585
|
|
653
|
-
def
|
586
|
+
def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
654
587
|
"""
|
655
|
-
|
656
|
-
|
657
|
-
|
658
|
-
|
659
|
-
|
588
|
+
Specifies what flows belong to the same project.
|
589
|
+
|
590
|
+
A project-specific namespace is created for all flows that
|
591
|
+
use the same `@project(name)`.
|
592
|
+
"""
|
593
|
+
...
|
594
|
+
|
595
|
+
def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
596
|
+
"""
|
597
|
+
The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
|
598
|
+
This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
|
660
599
|
"""
|
661
600
|
...
|
662
601
|
|
663
602
|
@typing.overload
|
664
|
-
def
|
603
|
+
def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
665
604
|
"""
|
666
|
-
Specifies the
|
605
|
+
Specifies the flow(s) that this flow depends on.
|
667
606
|
|
668
|
-
|
669
|
-
|
607
|
+
```
|
608
|
+
@trigger_on_finish(flow='FooFlow')
|
609
|
+
```
|
610
|
+
or
|
611
|
+
```
|
612
|
+
@trigger_on_finish(flows=['FooFlow', 'BarFlow'])
|
613
|
+
```
|
614
|
+
This decorator respects the @project decorator and triggers the flow
|
615
|
+
when upstream runs within the same namespace complete successfully
|
616
|
+
|
617
|
+
Additionally, you can specify project aware upstream flow dependencies
|
618
|
+
by specifying the fully qualified project_flow_name.
|
619
|
+
```
|
620
|
+
@trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
|
621
|
+
```
|
622
|
+
or
|
623
|
+
```
|
624
|
+
@trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
|
625
|
+
```
|
626
|
+
|
627
|
+
You can also specify just the project or project branch (other values will be
|
628
|
+
inferred from the current project or project branch):
|
629
|
+
```
|
630
|
+
@trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
|
631
|
+
```
|
632
|
+
|
633
|
+
Note that `branch` is typically one of:
|
634
|
+
- `prod`
|
635
|
+
- `user.bob`
|
636
|
+
- `test.my_experiment`
|
637
|
+
- `prod.staging`
|
670
638
|
"""
|
671
639
|
...
|
672
640
|
|
673
641
|
@typing.overload
|
674
|
-
def
|
642
|
+
def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
675
643
|
...
|
676
644
|
|
677
|
-
def
|
645
|
+
def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
|
678
646
|
"""
|
679
|
-
Specifies the
|
647
|
+
Specifies the flow(s) that this flow depends on.
|
680
648
|
|
681
|
-
|
682
|
-
|
649
|
+
```
|
650
|
+
@trigger_on_finish(flow='FooFlow')
|
651
|
+
```
|
652
|
+
or
|
653
|
+
```
|
654
|
+
@trigger_on_finish(flows=['FooFlow', 'BarFlow'])
|
655
|
+
```
|
656
|
+
This decorator respects the @project decorator and triggers the flow
|
657
|
+
when upstream runs within the same namespace complete successfully
|
658
|
+
|
659
|
+
Additionally, you can specify project aware upstream flow dependencies
|
660
|
+
by specifying the fully qualified project_flow_name.
|
661
|
+
```
|
662
|
+
@trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
|
663
|
+
```
|
664
|
+
or
|
665
|
+
```
|
666
|
+
@trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
|
667
|
+
```
|
668
|
+
|
669
|
+
You can also specify just the project or project branch (other values will be
|
670
|
+
inferred from the current project or project branch):
|
671
|
+
```
|
672
|
+
@trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
|
673
|
+
```
|
674
|
+
|
675
|
+
Note that `branch` is typically one of:
|
676
|
+
- `prod`
|
677
|
+
- `user.bob`
|
678
|
+
- `test.my_experiment`
|
679
|
+
- `prod.staging`
|
680
|
+
"""
|
681
|
+
...
|
682
|
+
|
683
|
+
@typing.overload
|
684
|
+
def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
685
|
+
"""
|
686
|
+
Specifies the times when the flow should be run when running on a
|
687
|
+
production scheduler.
|
688
|
+
"""
|
689
|
+
...
|
690
|
+
|
691
|
+
@typing.overload
|
692
|
+
def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
693
|
+
...
|
694
|
+
|
695
|
+
def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
|
696
|
+
"""
|
697
|
+
Specifies the times when the flow should be run when running on a
|
698
|
+
production scheduler.
|
683
699
|
"""
|
684
700
|
...
|
685
701
|
|
@@ -706,19 +722,3 @@ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packa
|
|
706
722
|
"""
|
707
723
|
...
|
708
724
|
|
709
|
-
def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
710
|
-
"""
|
711
|
-
Specifies what flows belong to the same project.
|
712
|
-
|
713
|
-
A project-specific namespace is created for all flows that
|
714
|
-
use the same `@project(name)`.
|
715
|
-
"""
|
716
|
-
...
|
717
|
-
|
718
|
-
def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
719
|
-
"""
|
720
|
-
The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
|
721
|
-
This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
|
722
|
-
"""
|
723
|
-
...
|
724
|
-
|