ob-metaflow-stubs 6.0.3.118__py2.py3-none-any.whl → 6.0.3.119__py2.py3-none-any.whl
Sign up to get free protection for your applications and to get access to all the features.
- metaflow-stubs/__init__.pyi +267 -267
- metaflow-stubs/cards.pyi +2 -2
- metaflow-stubs/cli.pyi +2 -2
- metaflow-stubs/client/__init__.pyi +2 -2
- metaflow-stubs/client/core.pyi +5 -5
- metaflow-stubs/client/filecache.pyi +2 -2
- metaflow-stubs/clone_util.pyi +2 -2
- metaflow-stubs/events.pyi +3 -3
- metaflow-stubs/exception.pyi +2 -2
- metaflow-stubs/flowspec.pyi +4 -4
- metaflow-stubs/generated_for.txt +1 -1
- metaflow-stubs/includefile.pyi +3 -3
- metaflow-stubs/info_file.pyi +2 -2
- metaflow-stubs/metadata_provider/__init__.pyi +2 -2
- metaflow-stubs/metadata_provider/heartbeat.pyi +2 -2
- metaflow-stubs/metadata_provider/metadata.pyi +2 -2
- metaflow-stubs/metadata_provider/util.pyi +2 -2
- metaflow-stubs/metaflow_config.pyi +2 -2
- metaflow-stubs/metaflow_current.pyi +126 -126
- metaflow-stubs/mf_extensions/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/checkpoint_storage.pyi +4 -4
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/constructors.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/core.pyi +3 -3
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/exceptions.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/final_api.pyi +3 -3
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/core.pyi +3 -3
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/exceptions.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/task_utils.pyi +3 -3
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/utils.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastructures.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/exceptions.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/flowspec_utils.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/general.pyi +2 -2
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/identity_utils.pyi +3 -3
- metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/tar_utils.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/profilers/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/profilers/gpu.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/remote_config.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/toplevel/__init__.pyi +2 -2
- metaflow-stubs/mf_extensions/outerbounds/toplevel/global_aliases_for_metaflow_package.pyi +2 -2
- metaflow-stubs/mflog/__init__.pyi +2 -2
- metaflow-stubs/mflog/mflog.pyi +2 -2
- metaflow-stubs/multicore_utils.pyi +2 -2
- metaflow-stubs/parameters.pyi +3 -3
- metaflow-stubs/plugins/__init__.pyi +12 -12
- metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
- metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
- metaflow-stubs/plugins/airflow/exception.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
- metaflow-stubs/plugins/argo/__init__.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_workflows.pyi +3 -3
- metaflow-stubs/plugins/argo/argo_workflows_cli.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +3 -3
- metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +3 -3
- metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +3 -3
- metaflow-stubs/plugins/aws/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
- metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
- metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/production_token.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +3 -3
- metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +2 -2
- metaflow-stubs/plugins/azure/__init__.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
- metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
- metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
- metaflow-stubs/plugins/cards/__init__.pyi +2 -2
- metaflow-stubs/plugins/cards/card_cli.pyi +2 -2
- metaflow-stubs/plugins/cards/card_client.pyi +3 -3
- metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
- metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
- metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/basic.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/components.pyi +3 -3
- metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
- metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
- metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
- metaflow-stubs/plugins/cards/exception.pyi +2 -2
- metaflow-stubs/plugins/catch_decorator.pyi +2 -2
- metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
- metaflow-stubs/plugins/datatools/local.pyi +2 -2
- metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
- metaflow-stubs/plugins/datatools/s3/s3.pyi +3 -3
- metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
- metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
- metaflow-stubs/plugins/debug_logger.pyi +2 -2
- metaflow-stubs/plugins/debug_monitor.pyi +2 -2
- metaflow-stubs/plugins/environment_decorator.pyi +2 -2
- metaflow-stubs/plugins/events_decorator.pyi +2 -2
- metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
- metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
- metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
- metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +2 -2
- metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
- metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
- metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
- metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/__init__.pyi +3 -3
- metaflow-stubs/plugins/kubernetes/kube_utils.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_cli.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
- metaflow-stubs/plugins/logs_cli.pyi +2 -2
- metaflow-stubs/plugins/package_cli.pyi +2 -2
- metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
- metaflow-stubs/plugins/perimeters.pyi +2 -2
- metaflow-stubs/plugins/project_decorator.pyi +2 -2
- metaflow-stubs/plugins/pypi/__init__.pyi +3 -3
- metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
- metaflow-stubs/plugins/pypi/conda_environment.pyi +4 -4
- metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
- metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
- metaflow-stubs/plugins/pypi/utils.pyi +2 -2
- metaflow-stubs/plugins/resources_decorator.pyi +2 -2
- metaflow-stubs/plugins/retry_decorator.pyi +2 -2
- metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
- metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +2 -2
- metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
- metaflow-stubs/plugins/storage_executor.pyi +2 -2
- metaflow-stubs/plugins/tag_cli.pyi +2 -2
- metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +3 -3
- metaflow-stubs/plugins/timeout_decorator.pyi +2 -2
- metaflow-stubs/procpoll.pyi +2 -2
- metaflow-stubs/profilers/__init__.pyi +2 -2
- metaflow-stubs/pylint_wrapper.pyi +2 -2
- metaflow-stubs/runner/__init__.pyi +2 -2
- metaflow-stubs/runner/deployer.pyi +29 -29
- metaflow-stubs/runner/deployer_impl.pyi +3 -3
- metaflow-stubs/runner/metaflow_runner.pyi +3 -3
- metaflow-stubs/runner/nbdeploy.pyi +2 -2
- metaflow-stubs/runner/nbrun.pyi +2 -2
- metaflow-stubs/runner/subprocess_manager.pyi +2 -2
- metaflow-stubs/runner/utils.pyi +4 -4
- metaflow-stubs/system/__init__.pyi +2 -2
- metaflow-stubs/system/system_logger.pyi +3 -3
- metaflow-stubs/system/system_monitor.pyi +2 -2
- metaflow-stubs/tagging_util.pyi +2 -2
- metaflow-stubs/tuple_util.pyi +2 -2
- {ob_metaflow_stubs-6.0.3.118.dist-info → ob_metaflow_stubs-6.0.3.119.dist-info}/METADATA +1 -1
- ob_metaflow_stubs-6.0.3.119.dist-info/RECORD +174 -0
- ob_metaflow_stubs-6.0.3.118.dist-info/RECORD +0 -174
- {ob_metaflow_stubs-6.0.3.118.dist-info → ob_metaflow_stubs-6.0.3.119.dist-info}/WHEEL +0 -0
- {ob_metaflow_stubs-6.0.3.118.dist-info → ob_metaflow_stubs-6.0.3.119.dist-info}/top_level.txt +0 -0
metaflow-stubs/__init__.pyi
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
######################################################################################################
|
2
2
|
# Auto-generated Metaflow stub file #
|
3
|
-
# MF version: 2.12.30.
|
4
|
-
# Generated on 2024-11-
|
3
|
+
# MF version: 2.12.30.2+obcheckpoint(0.1.4);ob(v1) #
|
4
|
+
# Generated on 2024-11-21T22:12:20.897909 #
|
5
5
|
######################################################################################################
|
6
6
|
|
7
7
|
from __future__ import annotations
|
@@ -140,90 +140,114 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
|
|
140
140
|
"""
|
141
141
|
...
|
142
142
|
|
143
|
+
def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
144
|
+
"""
|
145
|
+
Specifies that this step should execute on Kubernetes.
|
146
|
+
"""
|
147
|
+
...
|
148
|
+
|
143
149
|
@typing.overload
|
144
|
-
def
|
150
|
+
def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
145
151
|
"""
|
146
|
-
Specifies
|
152
|
+
Specifies a timeout for your step.
|
147
153
|
|
148
|
-
|
149
|
-
|
150
|
-
|
151
|
-
|
154
|
+
This decorator is useful if this step may hang indefinitely.
|
155
|
+
|
156
|
+
This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
|
157
|
+
A timeout is considered to be an exception thrown by the step. It will cause the step to be
|
158
|
+
retried if needed and the exception will be caught by the `@catch` decorator, if present.
|
159
|
+
|
160
|
+
Note that all the values specified in parameters are added together so if you specify
|
161
|
+
60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
|
152
162
|
"""
|
153
163
|
...
|
154
164
|
|
155
165
|
@typing.overload
|
156
|
-
def
|
166
|
+
def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
157
167
|
...
|
158
168
|
|
159
169
|
@typing.overload
|
160
|
-
def
|
170
|
+
def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
161
171
|
...
|
162
172
|
|
163
|
-
def
|
173
|
+
def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
|
164
174
|
"""
|
165
|
-
Specifies
|
175
|
+
Specifies a timeout for your step.
|
166
176
|
|
167
|
-
|
168
|
-
|
169
|
-
|
170
|
-
|
177
|
+
This decorator is useful if this step may hang indefinitely.
|
178
|
+
|
179
|
+
This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
|
180
|
+
A timeout is considered to be an exception thrown by the step. It will cause the step to be
|
181
|
+
retried if needed and the exception will be caught by the `@catch` decorator, if present.
|
182
|
+
|
183
|
+
Note that all the values specified in parameters are added together so if you specify
|
184
|
+
60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
|
171
185
|
"""
|
172
186
|
...
|
173
187
|
|
174
188
|
@typing.overload
|
175
|
-
def
|
189
|
+
def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
176
190
|
"""
|
177
|
-
Specifies the
|
191
|
+
Specifies the resources needed when executing this step.
|
178
192
|
|
179
|
-
|
180
|
-
|
181
|
-
|
182
|
-
|
193
|
+
Use `@resources` to specify the resource requirements
|
194
|
+
independently of the specific compute layer (`@batch`, `@kubernetes`).
|
195
|
+
|
196
|
+
You can choose the compute layer on the command line by executing e.g.
|
197
|
+
```
|
198
|
+
python myflow.py run --with batch
|
199
|
+
```
|
200
|
+
or
|
201
|
+
```
|
202
|
+
python myflow.py run --with kubernetes
|
203
|
+
```
|
204
|
+
which executes the flow on the desired system using the
|
205
|
+
requirements specified in `@resources`.
|
183
206
|
"""
|
184
207
|
...
|
185
208
|
|
186
209
|
@typing.overload
|
187
|
-
def
|
210
|
+
def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
188
211
|
...
|
189
212
|
|
190
213
|
@typing.overload
|
191
|
-
def
|
214
|
+
def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
192
215
|
...
|
193
216
|
|
194
|
-
def
|
217
|
+
def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
|
195
218
|
"""
|
196
|
-
Specifies the
|
219
|
+
Specifies the resources needed when executing this step.
|
197
220
|
|
198
|
-
|
199
|
-
|
200
|
-
|
201
|
-
|
202
|
-
|
203
|
-
|
204
|
-
|
205
|
-
|
206
|
-
|
207
|
-
|
221
|
+
Use `@resources` to specify the resource requirements
|
222
|
+
independently of the specific compute layer (`@batch`, `@kubernetes`).
|
223
|
+
|
224
|
+
You can choose the compute layer on the command line by executing e.g.
|
225
|
+
```
|
226
|
+
python myflow.py run --with batch
|
227
|
+
```
|
228
|
+
or
|
229
|
+
```
|
230
|
+
python myflow.py run --with kubernetes
|
231
|
+
```
|
232
|
+
which executes the flow on the desired system using the
|
233
|
+
requirements specified in `@resources`.
|
208
234
|
"""
|
209
235
|
...
|
210
236
|
|
211
237
|
@typing.overload
|
212
|
-
def
|
238
|
+
def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
213
239
|
"""
|
214
|
-
|
215
|
-
and imported for all decorators types by _import_plugin_decorators().
|
240
|
+
Internal decorator to support Fast bakery
|
216
241
|
"""
|
217
242
|
...
|
218
243
|
|
219
244
|
@typing.overload
|
220
|
-
def
|
245
|
+
def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
221
246
|
...
|
222
247
|
|
223
|
-
def
|
248
|
+
def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
|
224
249
|
"""
|
225
|
-
|
226
|
-
and imported for all decorators types by _import_plugin_decorators().
|
250
|
+
Internal decorator to support Fast bakery
|
227
251
|
"""
|
228
252
|
...
|
229
253
|
|
@@ -259,72 +283,48 @@ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
|
|
259
283
|
...
|
260
284
|
|
261
285
|
@typing.overload
|
262
|
-
def
|
286
|
+
def model(*, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
263
287
|
"""
|
264
|
-
|
265
|
-
|
266
|
-
Use `@resources` to specify the resource requirements
|
267
|
-
independently of the specific compute layer (`@batch`, `@kubernetes`).
|
268
|
-
|
269
|
-
You can choose the compute layer on the command line by executing e.g.
|
270
|
-
```
|
271
|
-
python myflow.py run --with batch
|
272
|
-
```
|
273
|
-
or
|
274
|
-
```
|
275
|
-
python myflow.py run --with kubernetes
|
276
|
-
```
|
277
|
-
which executes the flow on the desired system using the
|
278
|
-
requirements specified in `@resources`.
|
288
|
+
Enables loading / saving of models within a step.
|
279
289
|
"""
|
280
290
|
...
|
281
291
|
|
282
292
|
@typing.overload
|
283
|
-
def
|
293
|
+
def model(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
284
294
|
...
|
285
295
|
|
286
296
|
@typing.overload
|
287
|
-
def
|
297
|
+
def model(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
288
298
|
...
|
289
299
|
|
290
|
-
def
|
300
|
+
def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None):
|
291
301
|
"""
|
292
|
-
|
293
|
-
|
294
|
-
Use `@resources` to specify the resource requirements
|
295
|
-
independently of the specific compute layer (`@batch`, `@kubernetes`).
|
296
|
-
|
297
|
-
You can choose the compute layer on the command line by executing e.g.
|
298
|
-
```
|
299
|
-
python myflow.py run --with batch
|
300
|
-
```
|
301
|
-
or
|
302
|
-
```
|
303
|
-
python myflow.py run --with kubernetes
|
304
|
-
```
|
305
|
-
which executes the flow on the desired system using the
|
306
|
-
requirements specified in `@resources`.
|
302
|
+
Enables loading / saving of models within a step.
|
307
303
|
"""
|
308
304
|
...
|
309
305
|
|
310
306
|
@typing.overload
|
311
|
-
def
|
307
|
+
def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
312
308
|
"""
|
313
|
-
|
309
|
+
Creates a human-readable report, a Metaflow Card, after this step completes.
|
310
|
+
|
311
|
+
Note that you may add multiple `@card` decorators in a step with different parameters.
|
314
312
|
"""
|
315
313
|
...
|
316
314
|
|
317
315
|
@typing.overload
|
318
|
-
def
|
316
|
+
def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
319
317
|
...
|
320
318
|
|
321
319
|
@typing.overload
|
322
|
-
def
|
320
|
+
def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
323
321
|
...
|
324
322
|
|
325
|
-
def
|
323
|
+
def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
|
326
324
|
"""
|
327
|
-
|
325
|
+
Creates a human-readable report, a Metaflow Card, after this step completes.
|
326
|
+
|
327
|
+
Note that you may add multiple `@card` decorators in a step with different parameters.
|
328
328
|
"""
|
329
329
|
...
|
330
330
|
|
@@ -368,154 +368,184 @@ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
|
|
368
368
|
...
|
369
369
|
|
370
370
|
@typing.overload
|
371
|
-
def
|
371
|
+
def checkpoint(*, load_policy: str = 'fresh', temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
372
372
|
"""
|
373
|
-
|
374
|
-
the execution of a step.
|
373
|
+
Enables checkpointing for a step.
|
375
374
|
"""
|
376
375
|
...
|
377
376
|
|
378
377
|
@typing.overload
|
379
|
-
def
|
378
|
+
def checkpoint(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
380
379
|
...
|
381
380
|
|
382
381
|
@typing.overload
|
383
|
-
def
|
382
|
+
def checkpoint(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
384
383
|
...
|
385
384
|
|
386
|
-
def
|
385
|
+
def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load_policy: str = 'fresh', temp_dir_root: str = None):
|
387
386
|
"""
|
388
|
-
|
389
|
-
the execution of a step.
|
387
|
+
Enables checkpointing for a step.
|
390
388
|
"""
|
391
389
|
...
|
392
390
|
|
393
391
|
@typing.overload
|
394
|
-
def
|
392
|
+
def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
395
393
|
"""
|
396
|
-
|
394
|
+
Specifies the PyPI packages for the step.
|
395
|
+
|
396
|
+
Information in this decorator will augment any
|
397
|
+
attributes set in the `@pyi_base` flow-level decorator. Hence,
|
398
|
+
you can use `@pypi_base` to set packages required by all
|
399
|
+
steps and use `@pypi` to specify step-specific overrides.
|
397
400
|
"""
|
398
401
|
...
|
399
402
|
|
400
403
|
@typing.overload
|
401
|
-
def
|
404
|
+
def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
402
405
|
...
|
403
406
|
|
404
407
|
@typing.overload
|
405
|
-
def
|
408
|
+
def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
406
409
|
...
|
407
410
|
|
408
|
-
def
|
411
|
+
def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
|
409
412
|
"""
|
410
|
-
|
413
|
+
Specifies the PyPI packages for the step.
|
414
|
+
|
415
|
+
Information in this decorator will augment any
|
416
|
+
attributes set in the `@pyi_base` flow-level decorator. Hence,
|
417
|
+
you can use `@pypi_base` to set packages required by all
|
418
|
+
steps and use `@pypi` to specify step-specific overrides.
|
411
419
|
"""
|
412
420
|
...
|
413
421
|
|
414
422
|
@typing.overload
|
415
|
-
def
|
423
|
+
def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
416
424
|
"""
|
417
|
-
|
425
|
+
Specifies the Conda environment for the step.
|
426
|
+
|
427
|
+
Information in this decorator will augment any
|
428
|
+
attributes set in the `@conda_base` flow-level decorator. Hence,
|
429
|
+
you can use `@conda_base` to set packages required by all
|
430
|
+
steps and use `@conda` to specify step-specific overrides.
|
418
431
|
"""
|
419
432
|
...
|
420
433
|
|
421
434
|
@typing.overload
|
422
|
-
def
|
435
|
+
def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
423
436
|
...
|
424
437
|
|
425
|
-
|
426
|
-
|
427
|
-
Internal decorator to support Fast bakery
|
428
|
-
"""
|
438
|
+
@typing.overload
|
439
|
+
def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
429
440
|
...
|
430
441
|
|
431
|
-
def
|
442
|
+
def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
|
432
443
|
"""
|
433
|
-
Specifies
|
444
|
+
Specifies the Conda environment for the step.
|
445
|
+
|
446
|
+
Information in this decorator will augment any
|
447
|
+
attributes set in the `@conda_base` flow-level decorator. Hence,
|
448
|
+
you can use `@conda_base` to set packages required by all
|
449
|
+
steps and use `@conda` to specify step-specific overrides.
|
434
450
|
"""
|
435
451
|
...
|
436
452
|
|
437
453
|
@typing.overload
|
438
|
-
def
|
454
|
+
def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
439
455
|
"""
|
440
|
-
|
456
|
+
Specifies secrets to be retrieved and injected as environment variables prior to
|
457
|
+
the execution of a step.
|
441
458
|
"""
|
442
459
|
...
|
443
460
|
|
444
461
|
@typing.overload
|
445
|
-
def
|
462
|
+
def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
446
463
|
...
|
447
464
|
|
448
465
|
@typing.overload
|
449
|
-
def
|
466
|
+
def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
450
467
|
...
|
451
468
|
|
452
|
-
def
|
469
|
+
def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
|
453
470
|
"""
|
454
|
-
|
471
|
+
Specifies secrets to be retrieved and injected as environment variables prior to
|
472
|
+
the execution of a step.
|
455
473
|
"""
|
456
474
|
...
|
457
475
|
|
458
|
-
|
459
|
-
def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
476
|
+
def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
460
477
|
"""
|
461
|
-
|
462
|
-
|
463
|
-
This decorator is useful if this step may hang indefinitely.
|
464
|
-
|
465
|
-
This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
|
466
|
-
A timeout is considered to be an exception thrown by the step. It will cause the step to be
|
467
|
-
retried if needed and the exception will be caught by the `@catch` decorator, if present.
|
468
|
-
|
469
|
-
Note that all the values specified in parameters are added together so if you specify
|
470
|
-
60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
|
478
|
+
Decorator that helps cache, version and store models/datasets from huggingface hub.
|
471
479
|
"""
|
472
480
|
...
|
473
481
|
|
474
482
|
@typing.overload
|
475
|
-
def
|
476
|
-
|
477
|
-
|
478
|
-
|
479
|
-
|
483
|
+
def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
484
|
+
"""
|
485
|
+
Decorator prototype for all step decorators. This function gets specialized
|
486
|
+
and imported for all decorators types by _import_plugin_decorators().
|
487
|
+
"""
|
480
488
|
...
|
481
489
|
|
482
|
-
|
490
|
+
@typing.overload
|
491
|
+
def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
492
|
+
...
|
493
|
+
|
494
|
+
def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
|
483
495
|
"""
|
484
|
-
|
485
|
-
|
486
|
-
This decorator is useful if this step may hang indefinitely.
|
487
|
-
|
488
|
-
This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
|
489
|
-
A timeout is considered to be an exception thrown by the step. It will cause the step to be
|
490
|
-
retried if needed and the exception will be caught by the `@catch` decorator, if present.
|
491
|
-
|
492
|
-
Note that all the values specified in parameters are added together so if you specify
|
493
|
-
60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
|
496
|
+
Decorator prototype for all step decorators. This function gets specialized
|
497
|
+
and imported for all decorators types by _import_plugin_decorators().
|
494
498
|
"""
|
495
499
|
...
|
496
500
|
|
497
501
|
@typing.overload
|
498
|
-
def
|
502
|
+
def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
499
503
|
"""
|
500
|
-
|
501
|
-
|
502
|
-
Note that you may add multiple `@card` decorators in a step with different parameters.
|
504
|
+
Specifies environment variables to be set prior to the execution of a step.
|
503
505
|
"""
|
504
506
|
...
|
505
507
|
|
506
508
|
@typing.overload
|
507
|
-
def
|
509
|
+
def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
508
510
|
...
|
509
511
|
|
510
512
|
@typing.overload
|
511
|
-
def
|
513
|
+
def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
512
514
|
...
|
513
515
|
|
514
|
-
def
|
516
|
+
def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
|
515
517
|
"""
|
516
|
-
|
518
|
+
Specifies environment variables to be set prior to the execution of a step.
|
519
|
+
"""
|
520
|
+
...
|
521
|
+
|
522
|
+
def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
523
|
+
"""
|
524
|
+
The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
|
525
|
+
This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
|
526
|
+
"""
|
527
|
+
...
|
528
|
+
|
529
|
+
@typing.overload
|
530
|
+
def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
531
|
+
"""
|
532
|
+
Specifies the PyPI packages for all steps of the flow.
|
517
533
|
|
518
|
-
|
534
|
+
Use `@pypi_base` to set common packages required by all
|
535
|
+
steps and use `@pypi` to specify step-specific overrides.
|
536
|
+
"""
|
537
|
+
...
|
538
|
+
|
539
|
+
@typing.overload
|
540
|
+
def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
541
|
+
...
|
542
|
+
|
543
|
+
def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
|
544
|
+
"""
|
545
|
+
Specifies the PyPI packages for all steps of the flow.
|
546
|
+
|
547
|
+
Use `@pypi_base` to set common packages required by all
|
548
|
+
steps and use `@pypi` to specify step-specific overrides.
|
519
549
|
"""
|
520
550
|
...
|
521
551
|
|
@@ -543,84 +573,109 @@ def nim(*, models: "list[NIM]", backend: str) -> typing.Callable[[typing.Type[Fl
|
|
543
573
|
"""
|
544
574
|
...
|
545
575
|
|
546
|
-
|
547
|
-
def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
576
|
+
def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
548
577
|
"""
|
549
|
-
|
550
|
-
|
551
|
-
|
552
|
-
|
578
|
+
The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
|
579
|
+
before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
|
580
|
+
and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
|
581
|
+
added as a flow decorators. Adding more than one decorator will ensure that `start` step
|
582
|
+
starts only after all sensors finish.
|
553
583
|
"""
|
554
584
|
...
|
555
585
|
|
556
586
|
@typing.overload
|
557
|
-
def
|
558
|
-
...
|
559
|
-
|
560
|
-
def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
|
587
|
+
def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
561
588
|
"""
|
562
|
-
Specifies the
|
589
|
+
Specifies the event(s) that this flow depends on.
|
563
590
|
|
564
|
-
|
565
|
-
|
566
|
-
|
567
|
-
|
568
|
-
|
569
|
-
@
|
570
|
-
|
571
|
-
|
572
|
-
|
573
|
-
|
591
|
+
```
|
592
|
+
@trigger(event='foo')
|
593
|
+
```
|
594
|
+
or
|
595
|
+
```
|
596
|
+
@trigger(events=['foo', 'bar'])
|
597
|
+
```
|
598
|
+
|
599
|
+
Additionally, you can specify the parameter mappings
|
600
|
+
to map event payload to Metaflow parameters for the flow.
|
601
|
+
```
|
602
|
+
@trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
|
603
|
+
```
|
604
|
+
or
|
605
|
+
```
|
606
|
+
@trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
|
607
|
+
{'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
|
608
|
+
```
|
609
|
+
|
610
|
+
'parameters' can also be a list of strings and tuples like so:
|
611
|
+
```
|
612
|
+
@trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
|
613
|
+
```
|
614
|
+
This is equivalent to:
|
615
|
+
```
|
616
|
+
@trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
|
617
|
+
```
|
574
618
|
"""
|
575
619
|
...
|
576
620
|
|
577
621
|
@typing.overload
|
578
|
-
def
|
579
|
-
...
|
580
|
-
|
581
|
-
def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
|
582
|
-
"""
|
583
|
-
Specifies the times when the flow should be run when running on a
|
584
|
-
production scheduler.
|
585
|
-
"""
|
622
|
+
def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
586
623
|
...
|
587
624
|
|
588
|
-
def
|
625
|
+
def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
|
589
626
|
"""
|
590
|
-
Specifies
|
627
|
+
Specifies the event(s) that this flow depends on.
|
591
628
|
|
592
|
-
|
593
|
-
|
594
|
-
|
595
|
-
|
596
|
-
|
597
|
-
|
598
|
-
|
599
|
-
|
600
|
-
|
629
|
+
```
|
630
|
+
@trigger(event='foo')
|
631
|
+
```
|
632
|
+
or
|
633
|
+
```
|
634
|
+
@trigger(events=['foo', 'bar'])
|
635
|
+
```
|
636
|
+
|
637
|
+
Additionally, you can specify the parameter mappings
|
638
|
+
to map event payload to Metaflow parameters for the flow.
|
639
|
+
```
|
640
|
+
@trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
|
641
|
+
```
|
642
|
+
or
|
643
|
+
```
|
644
|
+
@trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
|
645
|
+
{'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
|
646
|
+
```
|
647
|
+
|
648
|
+
'parameters' can also be a list of strings and tuples like so:
|
649
|
+
```
|
650
|
+
@trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
|
651
|
+
```
|
652
|
+
This is equivalent to:
|
653
|
+
```
|
654
|
+
@trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
|
655
|
+
```
|
601
656
|
"""
|
602
657
|
...
|
603
658
|
|
604
659
|
@typing.overload
|
605
|
-
def
|
660
|
+
def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
606
661
|
"""
|
607
|
-
Specifies the
|
662
|
+
Specifies the Conda environment for all steps of the flow.
|
608
663
|
|
609
|
-
Use `@
|
610
|
-
steps and use `@
|
664
|
+
Use `@conda_base` to set common libraries required by all
|
665
|
+
steps and use `@conda` to specify step-specific additions.
|
611
666
|
"""
|
612
667
|
...
|
613
668
|
|
614
669
|
@typing.overload
|
615
|
-
def
|
670
|
+
def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
616
671
|
...
|
617
672
|
|
618
|
-
def
|
673
|
+
def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
|
619
674
|
"""
|
620
|
-
Specifies the
|
675
|
+
Specifies the Conda environment for all steps of the flow.
|
621
676
|
|
622
|
-
Use `@
|
623
|
-
steps and use `@
|
677
|
+
Use `@conda_base` to set common libraries required by all
|
678
|
+
steps and use `@conda` to specify step-specific additions.
|
624
679
|
"""
|
625
680
|
...
|
626
681
|
|
@@ -705,86 +760,31 @@ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *
|
|
705
760
|
"""
|
706
761
|
...
|
707
762
|
|
708
|
-
|
763
|
+
@typing.overload
|
764
|
+
def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
709
765
|
"""
|
710
|
-
|
711
|
-
|
712
|
-
and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
|
713
|
-
added as a flow decorators. Adding more than one decorator will ensure that `start` step
|
714
|
-
starts only after all sensors finish.
|
766
|
+
Specifies the times when the flow should be run when running on a
|
767
|
+
production scheduler.
|
715
768
|
"""
|
716
769
|
...
|
717
770
|
|
718
771
|
@typing.overload
|
719
|
-
def
|
720
|
-
"""
|
721
|
-
Specifies the event(s) that this flow depends on.
|
722
|
-
|
723
|
-
```
|
724
|
-
@trigger(event='foo')
|
725
|
-
```
|
726
|
-
or
|
727
|
-
```
|
728
|
-
@trigger(events=['foo', 'bar'])
|
729
|
-
```
|
730
|
-
|
731
|
-
Additionally, you can specify the parameter mappings
|
732
|
-
to map event payload to Metaflow parameters for the flow.
|
733
|
-
```
|
734
|
-
@trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
|
735
|
-
```
|
736
|
-
or
|
737
|
-
```
|
738
|
-
@trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
|
739
|
-
{'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
|
740
|
-
```
|
741
|
-
|
742
|
-
'parameters' can also be a list of strings and tuples like so:
|
743
|
-
```
|
744
|
-
@trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
|
745
|
-
```
|
746
|
-
This is equivalent to:
|
747
|
-
```
|
748
|
-
@trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
|
749
|
-
```
|
750
|
-
"""
|
772
|
+
def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
751
773
|
...
|
752
774
|
|
753
|
-
|
754
|
-
|
775
|
+
def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
|
776
|
+
"""
|
777
|
+
Specifies the times when the flow should be run when running on a
|
778
|
+
production scheduler.
|
779
|
+
"""
|
755
780
|
...
|
756
781
|
|
757
|
-
def
|
782
|
+
def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
758
783
|
"""
|
759
|
-
Specifies
|
760
|
-
|
761
|
-
```
|
762
|
-
@trigger(event='foo')
|
763
|
-
```
|
764
|
-
or
|
765
|
-
```
|
766
|
-
@trigger(events=['foo', 'bar'])
|
767
|
-
```
|
768
|
-
|
769
|
-
Additionally, you can specify the parameter mappings
|
770
|
-
to map event payload to Metaflow parameters for the flow.
|
771
|
-
```
|
772
|
-
@trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
|
773
|
-
```
|
774
|
-
or
|
775
|
-
```
|
776
|
-
@trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
|
777
|
-
{'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
|
778
|
-
```
|
784
|
+
Specifies what flows belong to the same project.
|
779
785
|
|
780
|
-
|
781
|
-
|
782
|
-
@trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
|
783
|
-
```
|
784
|
-
This is equivalent to:
|
785
|
-
```
|
786
|
-
@trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
|
787
|
-
```
|
786
|
+
A project-specific namespace is created for all flows that
|
787
|
+
use the same `@project(name)`.
|
788
788
|
"""
|
789
789
|
...
|
790
790
|
|