metaflow-stubs 2.13.1__py2.py3-none-any.whl → 2.13.2__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- metaflow-stubs/__init__.pyi +213 -213
- metaflow-stubs/cards.pyi +2 -2
- metaflow-stubs/cli.pyi +2 -2
- metaflow-stubs/cli_components/__init__.pyi +2 -2
- metaflow-stubs/cli_components/utils.pyi +2 -2
- metaflow-stubs/client/__init__.pyi +2 -2
- metaflow-stubs/client/core.pyi +5 -5
- metaflow-stubs/client/filecache.pyi +2 -2
- metaflow-stubs/events.pyi +3 -3
- metaflow-stubs/exception.pyi +2 -2
- metaflow-stubs/flowspec.pyi +4 -4
- metaflow-stubs/generated_for.txt +1 -1
- metaflow-stubs/includefile.pyi +3 -3
- metaflow-stubs/info_file.pyi +2 -2
- metaflow-stubs/metadata_provider/__init__.pyi +2 -2
- metaflow-stubs/metadata_provider/heartbeat.pyi +2 -2
- metaflow-stubs/metadata_provider/metadata.pyi +2 -2
- metaflow-stubs/metadata_provider/util.pyi +2 -2
- metaflow-stubs/metaflow_config.pyi +2 -2
- metaflow-stubs/metaflow_current.pyi +23 -23
- metaflow-stubs/multicore_utils.pyi +2 -2
- metaflow-stubs/parameters.pyi +3 -3
- metaflow-stubs/plugins/__init__.pyi +18 -10
- metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
- metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
- metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
- metaflow-stubs/plugins/airflow/exception.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
- metaflow-stubs/plugins/argo/__init__.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_workflows.pyi +3 -3
- metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +3 -3
- metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +3 -3
- metaflow-stubs/plugins/aws/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
- metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
- metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +3 -3
- metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +3 -3
- metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +3 -3
- metaflow-stubs/plugins/azure/__init__.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +3 -3
- metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
- metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
- metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
- metaflow-stubs/plugins/cards/__init__.pyi +2 -2
- metaflow-stubs/plugins/cards/card_client.pyi +3 -3
- metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
- metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
- metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/basic.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/components.pyi +3 -3
- metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
- metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
- metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
- metaflow-stubs/plugins/cards/exception.pyi +2 -2
- metaflow-stubs/plugins/catch_decorator.pyi +2 -2
- metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
- metaflow-stubs/plugins/datatools/local.pyi +2 -2
- metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
- metaflow-stubs/plugins/datatools/s3/s3.pyi +3 -3
- metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
- metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
- metaflow-stubs/plugins/debug_logger.pyi +2 -2
- metaflow-stubs/plugins/debug_monitor.pyi +2 -2
- metaflow-stubs/plugins/environment_decorator.pyi +2 -2
- metaflow-stubs/plugins/events_decorator.pyi +2 -2
- metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
- metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
- metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
- metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +3 -3
- metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
- metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
- metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
- metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kube_utils.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
- metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
- metaflow-stubs/plugins/project_decorator.pyi +2 -2
- metaflow-stubs/plugins/pypi/__init__.pyi +3 -3
- metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
- metaflow-stubs/plugins/pypi/conda_environment.pyi +5 -5
- metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
- metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
- metaflow-stubs/plugins/pypi/utils.pyi +2 -2
- metaflow-stubs/plugins/resources_decorator.pyi +2 -2
- metaflow-stubs/plugins/retry_decorator.pyi +2 -2
- metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
- metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +2 -2
- metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
- metaflow-stubs/plugins/storage_executor.pyi +2 -2
- metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +3 -3
- metaflow-stubs/plugins/timeout_decorator.pyi +2 -2
- metaflow-stubs/pylint_wrapper.pyi +2 -2
- metaflow-stubs/runner/__init__.pyi +2 -2
- metaflow-stubs/runner/deployer.pyi +29 -29
- metaflow-stubs/runner/deployer_impl.pyi +2 -2
- metaflow-stubs/runner/metaflow_runner.pyi +14 -6
- metaflow-stubs/runner/nbdeploy.pyi +2 -2
- metaflow-stubs/runner/nbrun.pyi +2 -2
- metaflow-stubs/runner/subprocess_manager.pyi +2 -2
- metaflow-stubs/runner/utils.pyi +3 -3
- metaflow-stubs/system/__init__.pyi +2 -2
- metaflow-stubs/system/system_logger.pyi +2 -2
- metaflow-stubs/system/system_monitor.pyi +2 -2
- metaflow-stubs/tagging_util.pyi +2 -2
- metaflow-stubs/tuple_util.pyi +2 -2
- metaflow-stubs/user_configs/__init__.pyi +2 -2
- metaflow-stubs/user_configs/config_decorators.pyi +4 -4
- metaflow-stubs/user_configs/config_options.pyi +2 -2
- metaflow-stubs/user_configs/config_parameters.pyi +3 -3
- metaflow-stubs/version.pyi +2 -2
- {metaflow_stubs-2.13.1.dist-info → metaflow_stubs-2.13.2.dist-info}/METADATA +2 -2
- metaflow_stubs-2.13.2.dist-info/RECORD +144 -0
- metaflow_stubs-2.13.1.dist-info/RECORD +0 -144
- {metaflow_stubs-2.13.1.dist-info → metaflow_stubs-2.13.2.dist-info}/WHEEL +0 -0
- {metaflow_stubs-2.13.1.dist-info → metaflow_stubs-2.13.2.dist-info}/top_level.txt +0 -0
metaflow-stubs/__init__.pyi
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
######################################################################################################
|
2
2
|
# Auto-generated Metaflow stub file #
|
3
|
-
# MF version: 2.13.
|
4
|
-
# Generated on 2025-01-
|
3
|
+
# MF version: 2.13.2 #
|
4
|
+
# Generated on 2025-01-08T14:49:03.615477 #
|
5
5
|
######################################################################################################
|
6
6
|
|
7
7
|
from __future__ import annotations
|
@@ -143,93 +143,157 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
|
|
143
143
|
...
|
144
144
|
|
145
145
|
@typing.overload
|
146
|
-
def
|
146
|
+
def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
147
147
|
"""
|
148
|
-
|
149
|
-
to a step needs to be retried.
|
148
|
+
Creates a human-readable report, a Metaflow Card, after this step completes.
|
150
149
|
|
151
|
-
|
152
|
-
|
153
|
-
|
150
|
+
Note that you may add multiple `@card` decorators in a step with different parameters.
|
151
|
+
"""
|
152
|
+
...
|
153
|
+
|
154
|
+
@typing.overload
|
155
|
+
def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
156
|
+
...
|
157
|
+
|
158
|
+
@typing.overload
|
159
|
+
def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
160
|
+
...
|
161
|
+
|
162
|
+
def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
|
163
|
+
"""
|
164
|
+
Creates a human-readable report, a Metaflow Card, after this step completes.
|
154
165
|
|
155
|
-
|
156
|
-
decorator will execute a no-op task after all retries have been exhausted,
|
157
|
-
ensuring that the flow execution can continue.
|
166
|
+
Note that you may add multiple `@card` decorators in a step with different parameters.
|
158
167
|
"""
|
159
168
|
...
|
160
169
|
|
161
170
|
@typing.overload
|
162
|
-
def
|
171
|
+
def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
172
|
+
"""
|
173
|
+
Specifies secrets to be retrieved and injected as environment variables prior to
|
174
|
+
the execution of a step.
|
175
|
+
"""
|
163
176
|
...
|
164
177
|
|
165
178
|
@typing.overload
|
166
|
-
def
|
179
|
+
def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
167
180
|
...
|
168
181
|
|
169
|
-
|
182
|
+
@typing.overload
|
183
|
+
def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
184
|
+
...
|
185
|
+
|
186
|
+
def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
|
170
187
|
"""
|
171
|
-
Specifies
|
172
|
-
|
188
|
+
Specifies secrets to be retrieved and injected as environment variables prior to
|
189
|
+
the execution of a step.
|
190
|
+
"""
|
191
|
+
...
|
192
|
+
|
193
|
+
@typing.overload
|
194
|
+
def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
195
|
+
"""
|
196
|
+
Specifies that the step will success under all circumstances.
|
173
197
|
|
174
|
-
|
175
|
-
|
176
|
-
|
198
|
+
The decorator will create an optional artifact, specified by `var`, which
|
199
|
+
contains the exception raised. You can use it to detect the presence
|
200
|
+
of errors, indicating that all happy-path artifacts produced by the step
|
201
|
+
are missing.
|
202
|
+
"""
|
203
|
+
...
|
204
|
+
|
205
|
+
@typing.overload
|
206
|
+
def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
207
|
+
...
|
208
|
+
|
209
|
+
@typing.overload
|
210
|
+
def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
211
|
+
...
|
212
|
+
|
213
|
+
def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
|
214
|
+
"""
|
215
|
+
Specifies that the step will success under all circumstances.
|
177
216
|
|
178
|
-
|
179
|
-
|
180
|
-
|
217
|
+
The decorator will create an optional artifact, specified by `var`, which
|
218
|
+
contains the exception raised. You can use it to detect the presence
|
219
|
+
of errors, indicating that all happy-path artifacts produced by the step
|
220
|
+
are missing.
|
221
|
+
"""
|
222
|
+
...
|
223
|
+
|
224
|
+
def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable') -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
225
|
+
"""
|
226
|
+
Specifies that this step should execute on Kubernetes.
|
181
227
|
"""
|
182
228
|
...
|
183
229
|
|
184
230
|
@typing.overload
|
185
|
-
def
|
231
|
+
def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
186
232
|
"""
|
187
|
-
Specifies the
|
233
|
+
Specifies the resources needed when executing this step.
|
188
234
|
|
189
|
-
|
190
|
-
|
191
|
-
|
192
|
-
|
235
|
+
Use `@resources` to specify the resource requirements
|
236
|
+
independently of the specific compute layer (`@batch`, `@kubernetes`).
|
237
|
+
|
238
|
+
You can choose the compute layer on the command line by executing e.g.
|
239
|
+
```
|
240
|
+
python myflow.py run --with batch
|
241
|
+
```
|
242
|
+
or
|
243
|
+
```
|
244
|
+
python myflow.py run --with kubernetes
|
245
|
+
```
|
246
|
+
which executes the flow on the desired system using the
|
247
|
+
requirements specified in `@resources`.
|
193
248
|
"""
|
194
249
|
...
|
195
250
|
|
196
251
|
@typing.overload
|
197
|
-
def
|
252
|
+
def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
198
253
|
...
|
199
254
|
|
200
255
|
@typing.overload
|
201
|
-
def
|
256
|
+
def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
202
257
|
...
|
203
258
|
|
204
|
-
def
|
259
|
+
def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
|
205
260
|
"""
|
206
|
-
Specifies the
|
261
|
+
Specifies the resources needed when executing this step.
|
207
262
|
|
208
|
-
|
209
|
-
|
210
|
-
|
211
|
-
|
263
|
+
Use `@resources` to specify the resource requirements
|
264
|
+
independently of the specific compute layer (`@batch`, `@kubernetes`).
|
265
|
+
|
266
|
+
You can choose the compute layer on the command line by executing e.g.
|
267
|
+
```
|
268
|
+
python myflow.py run --with batch
|
269
|
+
```
|
270
|
+
or
|
271
|
+
```
|
272
|
+
python myflow.py run --with kubernetes
|
273
|
+
```
|
274
|
+
which executes the flow on the desired system using the
|
275
|
+
requirements specified in `@resources`.
|
212
276
|
"""
|
213
277
|
...
|
214
278
|
|
215
279
|
@typing.overload
|
216
|
-
def
|
280
|
+
def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
217
281
|
"""
|
218
|
-
Specifies
|
282
|
+
Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
|
219
283
|
"""
|
220
284
|
...
|
221
285
|
|
222
286
|
@typing.overload
|
223
|
-
def
|
287
|
+
def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
224
288
|
...
|
225
289
|
|
226
290
|
@typing.overload
|
227
|
-
def
|
291
|
+
def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
228
292
|
...
|
229
293
|
|
230
|
-
def
|
294
|
+
def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
|
231
295
|
"""
|
232
|
-
Specifies
|
296
|
+
Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
|
233
297
|
"""
|
234
298
|
...
|
235
299
|
|
@@ -253,41 +317,41 @@ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
|
|
253
317
|
...
|
254
318
|
|
255
319
|
@typing.overload
|
256
|
-
def
|
320
|
+
def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
257
321
|
"""
|
258
|
-
Specifies
|
259
|
-
|
260
|
-
This decorator is useful if this step may hang indefinitely.
|
322
|
+
Specifies the number of times the task corresponding
|
323
|
+
to a step needs to be retried.
|
261
324
|
|
262
|
-
This
|
263
|
-
|
264
|
-
|
325
|
+
This decorator is useful for handling transient errors, such as networking issues.
|
326
|
+
If your task contains operations that can't be retried safely, e.g. database updates,
|
327
|
+
it is advisable to annotate it with `@retry(times=0)`.
|
265
328
|
|
266
|
-
|
267
|
-
|
329
|
+
This can be used in conjunction with the `@catch` decorator. The `@catch`
|
330
|
+
decorator will execute a no-op task after all retries have been exhausted,
|
331
|
+
ensuring that the flow execution can continue.
|
268
332
|
"""
|
269
333
|
...
|
270
334
|
|
271
335
|
@typing.overload
|
272
|
-
def
|
336
|
+
def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
273
337
|
...
|
274
338
|
|
275
339
|
@typing.overload
|
276
|
-
def
|
340
|
+
def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
277
341
|
...
|
278
342
|
|
279
|
-
def
|
343
|
+
def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
|
280
344
|
"""
|
281
|
-
Specifies
|
282
|
-
|
283
|
-
This decorator is useful if this step may hang indefinitely.
|
345
|
+
Specifies the number of times the task corresponding
|
346
|
+
to a step needs to be retried.
|
284
347
|
|
285
|
-
This
|
286
|
-
|
287
|
-
|
348
|
+
This decorator is useful for handling transient errors, such as networking issues.
|
349
|
+
If your task contains operations that can't be retried safely, e.g. database updates,
|
350
|
+
it is advisable to annotate it with `@retry(times=0)`.
|
288
351
|
|
289
|
-
|
290
|
-
|
352
|
+
This can be used in conjunction with the `@catch` decorator. The `@catch`
|
353
|
+
decorator will execute a no-op task after all retries have been exhausted,
|
354
|
+
ensuring that the flow execution can continue.
|
291
355
|
"""
|
292
356
|
...
|
293
357
|
|
@@ -323,180 +387,135 @@ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
|
|
323
387
|
...
|
324
388
|
|
325
389
|
@typing.overload
|
326
|
-
def
|
390
|
+
def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
327
391
|
"""
|
328
|
-
|
392
|
+
Specifies the PyPI packages for the step.
|
329
393
|
|
330
|
-
|
394
|
+
Information in this decorator will augment any
|
395
|
+
attributes set in the `@pyi_base` flow-level decorator. Hence,
|
396
|
+
you can use `@pypi_base` to set packages required by all
|
397
|
+
steps and use `@pypi` to specify step-specific overrides.
|
331
398
|
"""
|
332
399
|
...
|
333
400
|
|
334
401
|
@typing.overload
|
335
|
-
def
|
402
|
+
def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
336
403
|
...
|
337
404
|
|
338
405
|
@typing.overload
|
339
|
-
def
|
406
|
+
def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
340
407
|
...
|
341
408
|
|
342
|
-
def
|
409
|
+
def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
|
343
410
|
"""
|
344
|
-
|
411
|
+
Specifies the PyPI packages for the step.
|
345
412
|
|
346
|
-
|
347
|
-
|
348
|
-
|
349
|
-
|
350
|
-
def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable') -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
351
|
-
"""
|
352
|
-
Specifies that this step should execute on Kubernetes.
|
413
|
+
Information in this decorator will augment any
|
414
|
+
attributes set in the `@pyi_base` flow-level decorator. Hence,
|
415
|
+
you can use `@pypi_base` to set packages required by all
|
416
|
+
steps and use `@pypi` to specify step-specific overrides.
|
353
417
|
"""
|
354
418
|
...
|
355
419
|
|
356
420
|
@typing.overload
|
357
|
-
def
|
421
|
+
def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
358
422
|
"""
|
359
|
-
Specifies
|
360
|
-
the execution of a step.
|
423
|
+
Specifies environment variables to be set prior to the execution of a step.
|
361
424
|
"""
|
362
425
|
...
|
363
426
|
|
364
427
|
@typing.overload
|
365
|
-
def
|
428
|
+
def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
366
429
|
...
|
367
430
|
|
368
431
|
@typing.overload
|
369
|
-
def
|
370
|
-
...
|
371
|
-
|
372
|
-
def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
|
373
|
-
"""
|
374
|
-
Specifies secrets to be retrieved and injected as environment variables prior to
|
375
|
-
the execution of a step.
|
376
|
-
"""
|
432
|
+
def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
377
433
|
...
|
378
434
|
|
379
|
-
|
380
|
-
def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
435
|
+
def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
|
381
436
|
"""
|
382
|
-
Specifies
|
383
|
-
|
384
|
-
The decorator will create an optional artifact, specified by `var`, which
|
385
|
-
contains the exception raised. You can use it to detect the presence
|
386
|
-
of errors, indicating that all happy-path artifacts produced by the step
|
387
|
-
are missing.
|
437
|
+
Specifies environment variables to be set prior to the execution of a step.
|
388
438
|
"""
|
389
439
|
...
|
390
440
|
|
391
441
|
@typing.overload
|
392
|
-
def
|
393
|
-
...
|
394
|
-
|
395
|
-
@typing.overload
|
396
|
-
def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
397
|
-
...
|
398
|
-
|
399
|
-
def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
|
442
|
+
def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
400
443
|
"""
|
401
|
-
Specifies
|
444
|
+
Specifies a timeout for your step.
|
402
445
|
|
403
|
-
|
404
|
-
contains the exception raised. You can use it to detect the presence
|
405
|
-
of errors, indicating that all happy-path artifacts produced by the step
|
406
|
-
are missing.
|
407
|
-
"""
|
408
|
-
...
|
409
|
-
|
410
|
-
@typing.overload
|
411
|
-
def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
412
|
-
"""
|
413
|
-
Specifies the resources needed when executing this step.
|
446
|
+
This decorator is useful if this step may hang indefinitely.
|
414
447
|
|
415
|
-
|
416
|
-
|
448
|
+
This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
|
449
|
+
A timeout is considered to be an exception thrown by the step. It will cause the step to be
|
450
|
+
retried if needed and the exception will be caught by the `@catch` decorator, if present.
|
417
451
|
|
418
|
-
|
419
|
-
|
420
|
-
python myflow.py run --with batch
|
421
|
-
```
|
422
|
-
or
|
423
|
-
```
|
424
|
-
python myflow.py run --with kubernetes
|
425
|
-
```
|
426
|
-
which executes the flow on the desired system using the
|
427
|
-
requirements specified in `@resources`.
|
452
|
+
Note that all the values specified in parameters are added together so if you specify
|
453
|
+
60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
|
428
454
|
"""
|
429
455
|
...
|
430
456
|
|
431
457
|
@typing.overload
|
432
|
-
def
|
458
|
+
def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
433
459
|
...
|
434
460
|
|
435
461
|
@typing.overload
|
436
|
-
def
|
462
|
+
def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
437
463
|
...
|
438
464
|
|
439
|
-
def
|
465
|
+
def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
|
440
466
|
"""
|
441
|
-
Specifies
|
467
|
+
Specifies a timeout for your step.
|
442
468
|
|
443
|
-
|
444
|
-
independently of the specific compute layer (`@batch`, `@kubernetes`).
|
469
|
+
This decorator is useful if this step may hang indefinitely.
|
445
470
|
|
446
|
-
|
447
|
-
|
448
|
-
|
449
|
-
|
450
|
-
|
451
|
-
|
452
|
-
python myflow.py run --with kubernetes
|
453
|
-
```
|
454
|
-
which executes the flow on the desired system using the
|
455
|
-
requirements specified in `@resources`.
|
471
|
+
This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
|
472
|
+
A timeout is considered to be an exception thrown by the step. It will cause the step to be
|
473
|
+
retried if needed and the exception will be caught by the `@catch` decorator, if present.
|
474
|
+
|
475
|
+
Note that all the values specified in parameters are added together so if you specify
|
476
|
+
60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
|
456
477
|
"""
|
457
478
|
...
|
458
479
|
|
459
|
-
|
460
|
-
def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
480
|
+
def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
461
481
|
"""
|
462
|
-
|
482
|
+
The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
|
483
|
+
before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
|
484
|
+
and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
|
485
|
+
added as a flow decorators. Adding more than one decorator will ensure that `start` step
|
486
|
+
starts only after all sensors finish.
|
463
487
|
"""
|
464
488
|
...
|
465
489
|
|
466
|
-
|
467
|
-
def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
468
|
-
...
|
469
|
-
|
470
|
-
@typing.overload
|
471
|
-
def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
472
|
-
...
|
473
|
-
|
474
|
-
def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
|
490
|
+
def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
475
491
|
"""
|
476
|
-
Specifies
|
492
|
+
Specifies what flows belong to the same project.
|
493
|
+
|
494
|
+
A project-specific namespace is created for all flows that
|
495
|
+
use the same `@project(name)`.
|
477
496
|
"""
|
478
497
|
...
|
479
498
|
|
480
499
|
@typing.overload
|
481
|
-
def
|
500
|
+
def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
482
501
|
"""
|
483
|
-
Specifies the
|
502
|
+
Specifies the PyPI packages for all steps of the flow.
|
484
503
|
|
485
|
-
Use `@
|
486
|
-
steps and use `@
|
504
|
+
Use `@pypi_base` to set common packages required by all
|
505
|
+
steps and use `@pypi` to specify step-specific overrides.
|
487
506
|
"""
|
488
507
|
...
|
489
508
|
|
490
509
|
@typing.overload
|
491
|
-
def
|
510
|
+
def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
492
511
|
...
|
493
512
|
|
494
|
-
def
|
513
|
+
def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
|
495
514
|
"""
|
496
|
-
Specifies the
|
515
|
+
Specifies the PyPI packages for all steps of the flow.
|
497
516
|
|
498
|
-
Use `@
|
499
|
-
steps and use `@
|
517
|
+
Use `@pypi_base` to set common packages required by all
|
518
|
+
steps and use `@pypi` to specify step-specific overrides.
|
500
519
|
"""
|
501
520
|
...
|
502
521
|
|
@@ -507,15 +526,6 @@ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str,
|
|
507
526
|
"""
|
508
527
|
...
|
509
528
|
|
510
|
-
def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
511
|
-
"""
|
512
|
-
Specifies what flows belong to the same project.
|
513
|
-
|
514
|
-
A project-specific namespace is created for all flows that
|
515
|
-
use the same `@project(name)`.
|
516
|
-
"""
|
517
|
-
...
|
518
|
-
|
519
529
|
@typing.overload
|
520
530
|
def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
521
531
|
"""
|
@@ -589,6 +599,25 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
|
|
589
599
|
"""
|
590
600
|
...
|
591
601
|
|
602
|
+
@typing.overload
|
603
|
+
def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
604
|
+
"""
|
605
|
+
Specifies the times when the flow should be run when running on a
|
606
|
+
production scheduler.
|
607
|
+
"""
|
608
|
+
...
|
609
|
+
|
610
|
+
@typing.overload
|
611
|
+
def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
612
|
+
...
|
613
|
+
|
614
|
+
def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
|
615
|
+
"""
|
616
|
+
Specifies the times when the flow should be run when running on a
|
617
|
+
production scheduler.
|
618
|
+
"""
|
619
|
+
...
|
620
|
+
|
592
621
|
@typing.overload
|
593
622
|
def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
594
623
|
"""
|
@@ -671,54 +700,25 @@ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *
|
|
671
700
|
...
|
672
701
|
|
673
702
|
@typing.overload
|
674
|
-
def
|
703
|
+
def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
675
704
|
"""
|
676
|
-
Specifies the
|
705
|
+
Specifies the Conda environment for all steps of the flow.
|
677
706
|
|
678
|
-
Use `@
|
679
|
-
steps and use `@
|
707
|
+
Use `@conda_base` to set common libraries required by all
|
708
|
+
steps and use `@conda` to specify step-specific additions.
|
680
709
|
"""
|
681
710
|
...
|
682
711
|
|
683
712
|
@typing.overload
|
684
|
-
def
|
713
|
+
def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
685
714
|
...
|
686
715
|
|
687
|
-
def
|
716
|
+
def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
|
688
717
|
"""
|
689
|
-
Specifies the
|
718
|
+
Specifies the Conda environment for all steps of the flow.
|
690
719
|
|
691
|
-
Use `@
|
692
|
-
steps and use `@
|
693
|
-
"""
|
694
|
-
...
|
695
|
-
|
696
|
-
def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
697
|
-
"""
|
698
|
-
The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
|
699
|
-
before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
|
700
|
-
and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
|
701
|
-
added as a flow decorators. Adding more than one decorator will ensure that `start` step
|
702
|
-
starts only after all sensors finish.
|
703
|
-
"""
|
704
|
-
...
|
705
|
-
|
706
|
-
@typing.overload
|
707
|
-
def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
708
|
-
"""
|
709
|
-
Specifies the times when the flow should be run when running on a
|
710
|
-
production scheduler.
|
711
|
-
"""
|
712
|
-
...
|
713
|
-
|
714
|
-
@typing.overload
|
715
|
-
def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
716
|
-
...
|
717
|
-
|
718
|
-
def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
|
719
|
-
"""
|
720
|
-
Specifies the times when the flow should be run when running on a
|
721
|
-
production scheduler.
|
720
|
+
Use `@conda_base` to set common libraries required by all
|
721
|
+
steps and use `@conda` to specify step-specific additions.
|
722
722
|
"""
|
723
723
|
...
|
724
724
|
|