metaflow-stubs 2.12.38__py2.py3-none-any.whl → 2.12.39__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- metaflow-stubs/__init__.pyi +172 -172
- metaflow-stubs/cards.pyi +2 -2
- metaflow-stubs/cli.pyi +2 -2
- metaflow-stubs/cli_components/__init__.pyi +2 -2
- metaflow-stubs/cli_components/utils.pyi +2 -2
- metaflow-stubs/client/__init__.pyi +2 -2
- metaflow-stubs/client/core.pyi +6 -6
- metaflow-stubs/client/filecache.pyi +2 -2
- metaflow-stubs/events.pyi +3 -3
- metaflow-stubs/exception.pyi +2 -2
- metaflow-stubs/flowspec.pyi +6 -6
- metaflow-stubs/generated_for.txt +1 -1
- metaflow-stubs/includefile.pyi +4 -4
- metaflow-stubs/info_file.pyi +2 -2
- metaflow-stubs/metadata_provider/__init__.pyi +2 -2
- metaflow-stubs/metadata_provider/heartbeat.pyi +2 -2
- metaflow-stubs/metadata_provider/metadata.pyi +2 -2
- metaflow-stubs/metadata_provider/util.pyi +2 -2
- metaflow-stubs/metaflow_config.pyi +2 -2
- metaflow-stubs/metaflow_current.pyi +4 -4
- metaflow-stubs/multicore_utils.pyi +2 -2
- metaflow-stubs/parameters.pyi +4 -4
- metaflow-stubs/plugins/__init__.pyi +11 -11
- metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
- metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
- metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
- metaflow-stubs/plugins/airflow/exception.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
- metaflow-stubs/plugins/argo/__init__.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_workflows.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +3 -3
- metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +3 -3
- metaflow-stubs/plugins/aws/__init__.pyi +3 -3
- metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
- metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
- metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +5 -5
- metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +4 -4
- metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +3 -3
- metaflow-stubs/plugins/azure/__init__.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +5 -5
- metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
- metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
- metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
- metaflow-stubs/plugins/cards/__init__.pyi +2 -2
- metaflow-stubs/plugins/cards/card_client.pyi +3 -3
- metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
- metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
- metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/basic.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/components.pyi +4 -4
- metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
- metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
- metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
- metaflow-stubs/plugins/cards/exception.pyi +2 -2
- metaflow-stubs/plugins/catch_decorator.pyi +2 -2
- metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
- metaflow-stubs/plugins/datatools/local.pyi +2 -2
- metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
- metaflow-stubs/plugins/datatools/s3/s3.pyi +4 -4
- metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
- metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
- metaflow-stubs/plugins/debug_logger.pyi +2 -2
- metaflow-stubs/plugins/debug_monitor.pyi +2 -2
- metaflow-stubs/plugins/environment_decorator.pyi +2 -2
- metaflow-stubs/plugins/events_decorator.pyi +2 -2
- metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
- metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
- metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
- metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +5 -5
- metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
- metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
- metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
- metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kube_utils.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes.pyi +3 -3
- metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
- metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
- metaflow-stubs/plugins/project_decorator.pyi +2 -2
- metaflow-stubs/plugins/pypi/__init__.pyi +3 -3
- metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
- metaflow-stubs/plugins/pypi/conda_environment.pyi +5 -5
- metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
- metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
- metaflow-stubs/plugins/pypi/utils.pyi +2 -2
- metaflow-stubs/plugins/resources_decorator.pyi +2 -2
- metaflow-stubs/plugins/retry_decorator.pyi +2 -2
- metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
- metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +4 -4
- metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
- metaflow-stubs/plugins/storage_executor.pyi +2 -2
- metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +2 -2
- metaflow-stubs/plugins/timeout_decorator.pyi +2 -2
- metaflow-stubs/pylint_wrapper.pyi +2 -2
- metaflow-stubs/runner/__init__.pyi +2 -2
- metaflow-stubs/runner/deployer.pyi +31 -31
- metaflow-stubs/runner/deployer_impl.pyi +3 -3
- metaflow-stubs/runner/metaflow_runner.pyi +4 -4
- metaflow-stubs/runner/nbdeploy.pyi +4 -2
- metaflow-stubs/runner/nbrun.pyi +3 -3
- metaflow-stubs/runner/subprocess_manager.pyi +5 -2
- metaflow-stubs/runner/utils.pyi +7 -7
- metaflow-stubs/system/__init__.pyi +2 -2
- metaflow-stubs/system/system_logger.pyi +2 -2
- metaflow-stubs/system/system_monitor.pyi +2 -2
- metaflow-stubs/tagging_util.pyi +2 -2
- metaflow-stubs/tuple_util.pyi +2 -2
- metaflow-stubs/user_configs/__init__.pyi +2 -2
- metaflow-stubs/user_configs/config_decorators.pyi +5 -5
- metaflow-stubs/user_configs/config_options.pyi +3 -3
- metaflow-stubs/user_configs/config_parameters.pyi +6 -6
- metaflow-stubs/version.pyi +2 -2
- {metaflow_stubs-2.12.38.dist-info → metaflow_stubs-2.12.39.dist-info}/METADATA +2 -2
- metaflow_stubs-2.12.39.dist-info/RECORD +144 -0
- metaflow_stubs-2.12.38.dist-info/RECORD +0 -144
- {metaflow_stubs-2.12.38.dist-info → metaflow_stubs-2.12.39.dist-info}/WHEEL +0 -0
- {metaflow_stubs-2.12.38.dist-info → metaflow_stubs-2.12.39.dist-info}/top_level.txt +0 -0
metaflow-stubs/__init__.pyi
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
######################################################################################################
|
2
2
|
# Auto-generated Metaflow stub file #
|
3
|
-
# MF version: 2.12.
|
4
|
-
# Generated on 2024-12-
|
3
|
+
# MF version: 2.12.39 #
|
4
|
+
# Generated on 2024-12-10T16:02:32.687901 #
|
5
5
|
######################################################################################################
|
6
6
|
|
7
7
|
from __future__ import annotations
|
@@ -34,8 +34,8 @@ from .user_configs.config_parameters import Config as Config
|
|
34
34
|
from .user_configs.config_parameters import config_expr as config_expr
|
35
35
|
from .user_configs.config_decorators import CustomFlowDecorator as CustomFlowDecorator
|
36
36
|
from .user_configs.config_decorators import CustomStepDecorator as CustomStepDecorator
|
37
|
-
from . import tuple_util as tuple_util
|
38
37
|
from . import events as events
|
38
|
+
from . import tuple_util as tuple_util
|
39
39
|
from . import runner as runner
|
40
40
|
from . import plugins as plugins
|
41
41
|
from .plugins.datatools.s3.s3 import S3 as S3
|
@@ -141,40 +141,26 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
|
|
141
141
|
"""
|
142
142
|
...
|
143
143
|
|
144
|
-
def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable') -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
145
|
-
"""
|
146
|
-
Specifies that this step should execute on Kubernetes.
|
147
|
-
"""
|
148
|
-
...
|
149
|
-
|
150
144
|
@typing.overload
|
151
|
-
def
|
145
|
+
def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
152
146
|
"""
|
153
|
-
Specifies
|
154
|
-
|
155
|
-
Information in this decorator will augment any
|
156
|
-
attributes set in the `@pyi_base` flow-level decorator. Hence,
|
157
|
-
you can use `@pypi_base` to set packages required by all
|
158
|
-
steps and use `@pypi` to specify step-specific overrides.
|
147
|
+
Specifies secrets to be retrieved and injected as environment variables prior to
|
148
|
+
the execution of a step.
|
159
149
|
"""
|
160
150
|
...
|
161
151
|
|
162
152
|
@typing.overload
|
163
|
-
def
|
153
|
+
def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
164
154
|
...
|
165
155
|
|
166
156
|
@typing.overload
|
167
|
-
def
|
157
|
+
def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
168
158
|
...
|
169
159
|
|
170
|
-
def
|
160
|
+
def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
|
171
161
|
"""
|
172
|
-
Specifies
|
173
|
-
|
174
|
-
Information in this decorator will augment any
|
175
|
-
attributes set in the `@pyi_base` flow-level decorator. Hence,
|
176
|
-
you can use `@pypi_base` to set packages required by all
|
177
|
-
steps and use `@pypi` to specify step-specific overrides.
|
162
|
+
Specifies secrets to be retrieved and injected as environment variables prior to
|
163
|
+
the execution of a step.
|
178
164
|
"""
|
179
165
|
...
|
180
166
|
|
@@ -210,90 +196,124 @@ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
|
|
210
196
|
...
|
211
197
|
|
212
198
|
@typing.overload
|
213
|
-
def
|
199
|
+
def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
214
200
|
"""
|
215
|
-
|
201
|
+
Specifies the resources needed when executing this step.
|
216
202
|
|
217
|
-
|
203
|
+
Use `@resources` to specify the resource requirements
|
204
|
+
independently of the specific compute layer (`@batch`, `@kubernetes`).
|
205
|
+
|
206
|
+
You can choose the compute layer on the command line by executing e.g.
|
207
|
+
```
|
208
|
+
python myflow.py run --with batch
|
209
|
+
```
|
210
|
+
or
|
211
|
+
```
|
212
|
+
python myflow.py run --with kubernetes
|
213
|
+
```
|
214
|
+
which executes the flow on the desired system using the
|
215
|
+
requirements specified in `@resources`.
|
218
216
|
"""
|
219
217
|
...
|
220
218
|
|
221
219
|
@typing.overload
|
222
|
-
def
|
220
|
+
def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
223
221
|
...
|
224
222
|
|
225
223
|
@typing.overload
|
226
|
-
def
|
224
|
+
def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
227
225
|
...
|
228
226
|
|
229
|
-
def
|
227
|
+
def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
|
230
228
|
"""
|
231
|
-
|
229
|
+
Specifies the resources needed when executing this step.
|
232
230
|
|
233
|
-
|
231
|
+
Use `@resources` to specify the resource requirements
|
232
|
+
independently of the specific compute layer (`@batch`, `@kubernetes`).
|
233
|
+
|
234
|
+
You can choose the compute layer on the command line by executing e.g.
|
235
|
+
```
|
236
|
+
python myflow.py run --with batch
|
237
|
+
```
|
238
|
+
or
|
239
|
+
```
|
240
|
+
python myflow.py run --with kubernetes
|
241
|
+
```
|
242
|
+
which executes the flow on the desired system using the
|
243
|
+
requirements specified in `@resources`.
|
234
244
|
"""
|
235
245
|
...
|
236
246
|
|
237
247
|
@typing.overload
|
238
|
-
def
|
248
|
+
def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
239
249
|
"""
|
240
|
-
|
241
|
-
and imported for all decorators types by _import_plugin_decorators().
|
250
|
+
Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
|
242
251
|
"""
|
243
252
|
...
|
244
253
|
|
245
254
|
@typing.overload
|
246
|
-
def
|
255
|
+
def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
247
256
|
...
|
248
257
|
|
249
|
-
|
258
|
+
@typing.overload
|
259
|
+
def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
260
|
+
...
|
261
|
+
|
262
|
+
def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
|
250
263
|
"""
|
251
|
-
|
252
|
-
and imported for all decorators types by _import_plugin_decorators().
|
264
|
+
Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
|
253
265
|
"""
|
254
266
|
...
|
255
267
|
|
256
268
|
@typing.overload
|
257
|
-
def
|
269
|
+
def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
258
270
|
"""
|
259
|
-
|
271
|
+
Creates a human-readable report, a Metaflow Card, after this step completes.
|
272
|
+
|
273
|
+
Note that you may add multiple `@card` decorators in a step with different parameters.
|
260
274
|
"""
|
261
275
|
...
|
262
276
|
|
263
277
|
@typing.overload
|
264
|
-
def
|
278
|
+
def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
265
279
|
...
|
266
280
|
|
267
281
|
@typing.overload
|
268
|
-
def
|
282
|
+
def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
269
283
|
...
|
270
284
|
|
271
|
-
def
|
285
|
+
def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
|
272
286
|
"""
|
273
|
-
|
287
|
+
Creates a human-readable report, a Metaflow Card, after this step completes.
|
288
|
+
|
289
|
+
Note that you may add multiple `@card` decorators in a step with different parameters.
|
274
290
|
"""
|
275
291
|
...
|
276
292
|
|
277
293
|
@typing.overload
|
278
|
-
def
|
294
|
+
def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
279
295
|
"""
|
280
|
-
Specifies
|
281
|
-
the execution of a step.
|
296
|
+
Specifies environment variables to be set prior to the execution of a step.
|
282
297
|
"""
|
283
298
|
...
|
284
299
|
|
285
300
|
@typing.overload
|
286
|
-
def
|
301
|
+
def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
287
302
|
...
|
288
303
|
|
289
304
|
@typing.overload
|
290
|
-
def
|
305
|
+
def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
291
306
|
...
|
292
307
|
|
293
|
-
def
|
308
|
+
def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
|
294
309
|
"""
|
295
|
-
Specifies
|
296
|
-
|
310
|
+
Specifies environment variables to be set prior to the execution of a step.
|
311
|
+
"""
|
312
|
+
...
|
313
|
+
|
314
|
+
def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable') -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
315
|
+
"""
|
316
|
+
Specifies that this step should execute on Kubernetes.
|
297
317
|
"""
|
298
318
|
...
|
299
319
|
|
@@ -337,77 +357,45 @@ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
|
|
337
357
|
...
|
338
358
|
|
339
359
|
@typing.overload
|
340
|
-
def
|
360
|
+
def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
341
361
|
"""
|
342
|
-
|
343
|
-
|
344
|
-
Use `@resources` to specify the resource requirements
|
345
|
-
independently of the specific compute layer (`@batch`, `@kubernetes`).
|
346
|
-
|
347
|
-
You can choose the compute layer on the command line by executing e.g.
|
348
|
-
```
|
349
|
-
python myflow.py run --with batch
|
350
|
-
```
|
351
|
-
or
|
352
|
-
```
|
353
|
-
python myflow.py run --with kubernetes
|
354
|
-
```
|
355
|
-
which executes the flow on the desired system using the
|
356
|
-
requirements specified in `@resources`.
|
362
|
+
Decorator prototype for all step decorators. This function gets specialized
|
363
|
+
and imported for all decorators types by _import_plugin_decorators().
|
357
364
|
"""
|
358
365
|
...
|
359
366
|
|
360
367
|
@typing.overload
|
361
|
-
def
|
362
|
-
...
|
363
|
-
|
364
|
-
@typing.overload
|
365
|
-
def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
368
|
+
def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
366
369
|
...
|
367
370
|
|
368
|
-
def
|
371
|
+
def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
|
369
372
|
"""
|
370
|
-
|
371
|
-
|
372
|
-
Use `@resources` to specify the resource requirements
|
373
|
-
independently of the specific compute layer (`@batch`, `@kubernetes`).
|
374
|
-
|
375
|
-
You can choose the compute layer on the command line by executing e.g.
|
376
|
-
```
|
377
|
-
python myflow.py run --with batch
|
378
|
-
```
|
379
|
-
or
|
380
|
-
```
|
381
|
-
python myflow.py run --with kubernetes
|
382
|
-
```
|
383
|
-
which executes the flow on the desired system using the
|
384
|
-
requirements specified in `@resources`.
|
373
|
+
Decorator prototype for all step decorators. This function gets specialized
|
374
|
+
and imported for all decorators types by _import_plugin_decorators().
|
385
375
|
"""
|
386
376
|
...
|
387
377
|
|
388
378
|
@typing.overload
|
389
|
-
def
|
379
|
+
def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
390
380
|
"""
|
391
|
-
Specifies that
|
381
|
+
Specifies that the step will success under all circumstances.
|
382
|
+
|
383
|
+
The decorator will create an optional artifact, specified by `var`, which
|
384
|
+
contains the exception raised. You can use it to detect the presence
|
385
|
+
of errors, indicating that all happy-path artifacts produced by the step
|
386
|
+
are missing.
|
392
387
|
"""
|
393
388
|
...
|
394
389
|
|
395
390
|
@typing.overload
|
396
|
-
def
|
391
|
+
def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
397
392
|
...
|
398
393
|
|
399
394
|
@typing.overload
|
400
|
-
def
|
401
|
-
...
|
402
|
-
|
403
|
-
def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
|
404
|
-
"""
|
405
|
-
Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
|
406
|
-
"""
|
395
|
+
def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
407
396
|
...
|
408
397
|
|
409
|
-
|
410
|
-
def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
398
|
+
def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
|
411
399
|
"""
|
412
400
|
Specifies that the step will success under all circumstances.
|
413
401
|
|
@@ -419,21 +407,33 @@ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) ->
|
|
419
407
|
...
|
420
408
|
|
421
409
|
@typing.overload
|
422
|
-
def
|
410
|
+
def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
411
|
+
"""
|
412
|
+
Specifies the PyPI packages for the step.
|
413
|
+
|
414
|
+
Information in this decorator will augment any
|
415
|
+
attributes set in the `@pyi_base` flow-level decorator. Hence,
|
416
|
+
you can use `@pypi_base` to set packages required by all
|
417
|
+
steps and use `@pypi` to specify step-specific overrides.
|
418
|
+
"""
|
423
419
|
...
|
424
420
|
|
425
421
|
@typing.overload
|
426
|
-
def
|
422
|
+
def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
427
423
|
...
|
428
424
|
|
429
|
-
|
425
|
+
@typing.overload
|
426
|
+
def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
427
|
+
...
|
428
|
+
|
429
|
+
def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
|
430
430
|
"""
|
431
|
-
Specifies
|
431
|
+
Specifies the PyPI packages for the step.
|
432
432
|
|
433
|
-
|
434
|
-
|
435
|
-
|
436
|
-
|
433
|
+
Information in this decorator will augment any
|
434
|
+
attributes set in the `@pyi_base` flow-level decorator. Hence,
|
435
|
+
you can use `@pypi_base` to set packages required by all
|
436
|
+
steps and use `@pypi` to specify step-specific overrides.
|
437
437
|
"""
|
438
438
|
...
|
439
439
|
|
@@ -476,46 +476,6 @@ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
|
|
476
476
|
"""
|
477
477
|
...
|
478
478
|
|
479
|
-
@typing.overload
|
480
|
-
def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
481
|
-
"""
|
482
|
-
Specifies the PyPI packages for all steps of the flow.
|
483
|
-
|
484
|
-
Use `@pypi_base` to set common packages required by all
|
485
|
-
steps and use `@pypi` to specify step-specific overrides.
|
486
|
-
"""
|
487
|
-
...
|
488
|
-
|
489
|
-
@typing.overload
|
490
|
-
def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
491
|
-
...
|
492
|
-
|
493
|
-
def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
|
494
|
-
"""
|
495
|
-
Specifies the PyPI packages for all steps of the flow.
|
496
|
-
|
497
|
-
Use `@pypi_base` to set common packages required by all
|
498
|
-
steps and use `@pypi` to specify step-specific overrides.
|
499
|
-
"""
|
500
|
-
...
|
501
|
-
|
502
|
-
def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
503
|
-
"""
|
504
|
-
The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
|
505
|
-
This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
|
506
|
-
"""
|
507
|
-
...
|
508
|
-
|
509
|
-
def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
510
|
-
"""
|
511
|
-
The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
|
512
|
-
before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
|
513
|
-
and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
|
514
|
-
added as a flow decorators. Adding more than one decorator will ensure that `start` step
|
515
|
-
starts only after all sensors finish.
|
516
|
-
"""
|
517
|
-
...
|
518
|
-
|
519
479
|
@typing.overload
|
520
480
|
def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
521
481
|
"""
|
@@ -597,45 +557,75 @@ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *
|
|
597
557
|
"""
|
598
558
|
...
|
599
559
|
|
560
|
+
def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
561
|
+
"""
|
562
|
+
Specifies what flows belong to the same project.
|
563
|
+
|
564
|
+
A project-specific namespace is created for all flows that
|
565
|
+
use the same `@project(name)`.
|
566
|
+
"""
|
567
|
+
...
|
568
|
+
|
569
|
+
def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
570
|
+
"""
|
571
|
+
The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
|
572
|
+
This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
|
573
|
+
"""
|
574
|
+
...
|
575
|
+
|
576
|
+
def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
577
|
+
"""
|
578
|
+
The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
|
579
|
+
before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
|
580
|
+
and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
|
581
|
+
added as a flow decorators. Adding more than one decorator will ensure that `start` step
|
582
|
+
starts only after all sensors finish.
|
583
|
+
"""
|
584
|
+
...
|
585
|
+
|
600
586
|
@typing.overload
|
601
|
-
def
|
587
|
+
def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
602
588
|
"""
|
603
|
-
Specifies the
|
589
|
+
Specifies the PyPI packages for all steps of the flow.
|
604
590
|
|
605
|
-
Use `@
|
606
|
-
steps and use `@
|
591
|
+
Use `@pypi_base` to set common packages required by all
|
592
|
+
steps and use `@pypi` to specify step-specific overrides.
|
607
593
|
"""
|
608
594
|
...
|
609
595
|
|
610
596
|
@typing.overload
|
611
|
-
def
|
597
|
+
def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
612
598
|
...
|
613
599
|
|
614
|
-
def
|
600
|
+
def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
|
615
601
|
"""
|
616
|
-
Specifies the
|
602
|
+
Specifies the PyPI packages for all steps of the flow.
|
617
603
|
|
618
|
-
Use `@
|
619
|
-
steps and use `@
|
604
|
+
Use `@pypi_base` to set common packages required by all
|
605
|
+
steps and use `@pypi` to specify step-specific overrides.
|
620
606
|
"""
|
621
607
|
...
|
622
608
|
|
623
609
|
@typing.overload
|
624
|
-
def
|
610
|
+
def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
625
611
|
"""
|
626
|
-
Specifies the
|
627
|
-
|
612
|
+
Specifies the Conda environment for all steps of the flow.
|
613
|
+
|
614
|
+
Use `@conda_base` to set common libraries required by all
|
615
|
+
steps and use `@conda` to specify step-specific additions.
|
628
616
|
"""
|
629
617
|
...
|
630
618
|
|
631
619
|
@typing.overload
|
632
|
-
def
|
620
|
+
def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
633
621
|
...
|
634
622
|
|
635
|
-
def
|
623
|
+
def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
|
636
624
|
"""
|
637
|
-
Specifies the
|
638
|
-
|
625
|
+
Specifies the Conda environment for all steps of the flow.
|
626
|
+
|
627
|
+
Use `@conda_base` to set common libraries required by all
|
628
|
+
steps and use `@conda` to specify step-specific additions.
|
639
629
|
"""
|
640
630
|
...
|
641
631
|
|
@@ -712,12 +702,22 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
|
|
712
702
|
"""
|
713
703
|
...
|
714
704
|
|
715
|
-
|
705
|
+
@typing.overload
|
706
|
+
def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
716
707
|
"""
|
717
|
-
Specifies
|
718
|
-
|
719
|
-
|
720
|
-
|
708
|
+
Specifies the times when the flow should be run when running on a
|
709
|
+
production scheduler.
|
710
|
+
"""
|
711
|
+
...
|
712
|
+
|
713
|
+
@typing.overload
|
714
|
+
def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
715
|
+
...
|
716
|
+
|
717
|
+
def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
|
718
|
+
"""
|
719
|
+
Specifies the times when the flow should be run when running on a
|
720
|
+
production scheduler.
|
721
721
|
"""
|
722
722
|
...
|
723
723
|
|
metaflow-stubs/cards.pyi
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
######################################################################################################
|
2
2
|
# Auto-generated Metaflow stub file #
|
3
|
-
# MF version: 2.12.
|
4
|
-
# Generated on 2024-12-
|
3
|
+
# MF version: 2.12.39 #
|
4
|
+
# Generated on 2024-12-10T16:02:32.600460 #
|
5
5
|
######################################################################################################
|
6
6
|
|
7
7
|
from __future__ import annotations
|
metaflow-stubs/cli.pyi
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
######################################################################################################
|
2
2
|
# Auto-generated Metaflow stub file #
|
3
|
-
# MF version: 2.12.
|
4
|
-
# Generated on 2024-12-
|
3
|
+
# MF version: 2.12.39 #
|
4
|
+
# Generated on 2024-12-10T16:02:32.610717 #
|
5
5
|
######################################################################################################
|
6
6
|
|
7
7
|
from __future__ import annotations
|
@@ -1,7 +1,7 @@
|
|
1
1
|
######################################################################################################
|
2
2
|
# Auto-generated Metaflow stub file #
|
3
|
-
# MF version: 2.12.
|
4
|
-
# Generated on 2024-12-
|
3
|
+
# MF version: 2.12.39 #
|
4
|
+
# Generated on 2024-12-10T16:02:32.609249 #
|
5
5
|
######################################################################################################
|
6
6
|
|
7
7
|
from __future__ import annotations
|
@@ -1,7 +1,7 @@
|
|
1
1
|
######################################################################################################
|
2
2
|
# Auto-generated Metaflow stub file #
|
3
|
-
# MF version: 2.12.
|
4
|
-
# Generated on 2024-12-
|
3
|
+
# MF version: 2.12.39 #
|
4
|
+
# Generated on 2024-12-10T16:02:32.633910 #
|
5
5
|
######################################################################################################
|
6
6
|
|
7
7
|
from __future__ import annotations
|
@@ -1,7 +1,7 @@
|
|
1
1
|
######################################################################################################
|
2
2
|
# Auto-generated Metaflow stub file #
|
3
|
-
# MF version: 2.12.
|
4
|
-
# Generated on 2024-12-
|
3
|
+
# MF version: 2.12.39 #
|
4
|
+
# Generated on 2024-12-10T16:02:32.600834 #
|
5
5
|
######################################################################################################
|
6
6
|
|
7
7
|
from __future__ import annotations
|
metaflow-stubs/client/core.pyi
CHANGED
@@ -1,19 +1,19 @@
|
|
1
1
|
######################################################################################################
|
2
2
|
# Auto-generated Metaflow stub file #
|
3
|
-
# MF version: 2.12.
|
4
|
-
# Generated on 2024-12-
|
3
|
+
# MF version: 2.12.39 #
|
4
|
+
# Generated on 2024-12-10T16:02:32.606315 #
|
5
5
|
######################################################################################################
|
6
6
|
|
7
7
|
from __future__ import annotations
|
8
8
|
|
9
|
-
import typing
|
10
9
|
import metaflow
|
10
|
+
import typing
|
11
11
|
if typing.TYPE_CHECKING:
|
12
|
-
import metaflow.client.core
|
13
|
-
import datetime
|
14
12
|
import tarfile
|
15
|
-
import metaflow.events
|
16
13
|
import typing
|
14
|
+
import datetime
|
15
|
+
import metaflow.client.core
|
16
|
+
import metaflow.events
|
17
17
|
|
18
18
|
from ..metaflow_current import current as current
|
19
19
|
from ..events import Trigger as Trigger
|