metaflow-stubs 2.13.3__py2.py3-none-any.whl → 2.13.4__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- metaflow-stubs/__init__.pyi +255 -255
- metaflow-stubs/cards.pyi +2 -2
- metaflow-stubs/cli.pyi +2 -2
- metaflow-stubs/cli_components/__init__.pyi +2 -2
- metaflow-stubs/cli_components/utils.pyi +2 -2
- metaflow-stubs/client/__init__.pyi +2 -2
- metaflow-stubs/client/core.pyi +5 -5
- metaflow-stubs/client/filecache.pyi +2 -2
- metaflow-stubs/events.pyi +2 -2
- metaflow-stubs/exception.pyi +2 -2
- metaflow-stubs/flowspec.pyi +4 -4
- metaflow-stubs/generated_for.txt +1 -1
- metaflow-stubs/includefile.pyi +5 -5
- metaflow-stubs/info_file.pyi +2 -2
- metaflow-stubs/metadata_provider/__init__.pyi +2 -2
- metaflow-stubs/metadata_provider/heartbeat.pyi +2 -2
- metaflow-stubs/metadata_provider/metadata.pyi +2 -2
- metaflow-stubs/metadata_provider/util.pyi +2 -2
- metaflow-stubs/metaflow_config.pyi +2 -2
- metaflow-stubs/metaflow_current.pyi +6 -6
- metaflow-stubs/multicore_utils.pyi +2 -2
- metaflow-stubs/parameters.pyi +5 -5
- metaflow-stubs/plugins/__init__.pyi +11 -11
- metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
- metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
- metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
- metaflow-stubs/plugins/airflow/exception.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
- metaflow-stubs/plugins/argo/__init__.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_workflows.pyi +3 -3
- metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +3 -3
- metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +4 -4
- metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +3 -3
- metaflow-stubs/plugins/aws/__init__.pyi +3 -3
- metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
- metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
- metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +3 -3
- metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +3 -3
- metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +3 -3
- metaflow-stubs/plugins/azure/__init__.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +3 -3
- metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
- metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
- metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
- metaflow-stubs/plugins/cards/__init__.pyi +2 -2
- metaflow-stubs/plugins/cards/card_client.pyi +2 -2
- metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
- metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
- metaflow-stubs/plugins/cards/card_decorator.pyi +4 -2
- metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/basic.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/components.pyi +3 -3
- metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
- metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
- metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
- metaflow-stubs/plugins/cards/exception.pyi +2 -2
- metaflow-stubs/plugins/catch_decorator.pyi +3 -3
- metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
- metaflow-stubs/plugins/datatools/local.pyi +2 -2
- metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
- metaflow-stubs/plugins/datatools/s3/s3.pyi +4 -4
- metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
- metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
- metaflow-stubs/plugins/debug_logger.pyi +2 -2
- metaflow-stubs/plugins/debug_monitor.pyi +2 -2
- metaflow-stubs/plugins/environment_decorator.pyi +2 -2
- metaflow-stubs/plugins/events_decorator.pyi +2 -2
- metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
- metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
- metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
- metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +3 -3
- metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
- metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
- metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
- metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kube_utils.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
- metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
- metaflow-stubs/plugins/project_decorator.pyi +2 -2
- metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
- metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
- metaflow-stubs/plugins/pypi/conda_environment.pyi +2 -2
- metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
- metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
- metaflow-stubs/plugins/pypi/utils.pyi +2 -2
- metaflow-stubs/plugins/resources_decorator.pyi +2 -2
- metaflow-stubs/plugins/retry_decorator.pyi +2 -2
- metaflow-stubs/plugins/secrets/__init__.pyi +3 -3
- metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +3 -3
- metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
- metaflow-stubs/plugins/storage_executor.pyi +2 -2
- metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +2 -2
- metaflow-stubs/plugins/timeout_decorator.pyi +3 -3
- metaflow-stubs/pylint_wrapper.pyi +2 -2
- metaflow-stubs/runner/__init__.pyi +2 -2
- metaflow-stubs/runner/deployer.pyi +4 -4
- metaflow-stubs/runner/deployer_impl.pyi +3 -3
- metaflow-stubs/runner/metaflow_runner.pyi +3 -3
- metaflow-stubs/runner/nbdeploy.pyi +2 -2
- metaflow-stubs/runner/nbrun.pyi +2 -2
- metaflow-stubs/runner/subprocess_manager.pyi +2 -2
- metaflow-stubs/runner/utils.pyi +2 -2
- metaflow-stubs/system/__init__.pyi +2 -2
- metaflow-stubs/system/system_logger.pyi +3 -3
- metaflow-stubs/system/system_monitor.pyi +2 -2
- metaflow-stubs/tagging_util.pyi +2 -2
- metaflow-stubs/tuple_util.pyi +2 -2
- metaflow-stubs/user_configs/__init__.pyi +2 -2
- metaflow-stubs/user_configs/config_decorators.pyi +5 -5
- metaflow-stubs/user_configs/config_options.pyi +3 -3
- metaflow-stubs/user_configs/config_parameters.pyi +8 -8
- metaflow-stubs/version.pyi +2 -2
- {metaflow_stubs-2.13.3.dist-info → metaflow_stubs-2.13.4.dist-info}/METADATA +2 -2
- metaflow_stubs-2.13.4.dist-info/RECORD +144 -0
- metaflow_stubs-2.13.3.dist-info/RECORD +0 -144
- {metaflow_stubs-2.13.3.dist-info → metaflow_stubs-2.13.4.dist-info}/WHEEL +0 -0
- {metaflow_stubs-2.13.3.dist-info → metaflow_stubs-2.13.4.dist-info}/top_level.txt +0 -0
metaflow-stubs/__init__.pyi
CHANGED
@@ -1,15 +1,15 @@
|
|
1
1
|
######################################################################################################
|
2
2
|
# Auto-generated Metaflow stub file #
|
3
|
-
# MF version: 2.13.
|
4
|
-
# Generated on 2025-01-
|
3
|
+
# MF version: 2.13.4 #
|
4
|
+
# Generated on 2025-01-15T17:53:58.657195 #
|
5
5
|
######################################################################################################
|
6
6
|
|
7
7
|
from __future__ import annotations
|
8
8
|
|
9
9
|
import typing
|
10
10
|
if typing.TYPE_CHECKING:
|
11
|
-
import typing
|
12
11
|
import datetime
|
12
|
+
import typing
|
13
13
|
FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
|
14
14
|
StepFlag = typing.NewType("StepFlag", bool)
|
15
15
|
|
@@ -35,8 +35,8 @@ from .user_configs.config_parameters import ConfigValue as ConfigValue
|
|
35
35
|
from .user_configs.config_parameters import config_expr as config_expr
|
36
36
|
from .user_configs.config_decorators import CustomFlowDecorator as CustomFlowDecorator
|
37
37
|
from .user_configs.config_decorators import CustomStepDecorator as CustomStepDecorator
|
38
|
-
from . import tuple_util as tuple_util
|
39
38
|
from . import events as events
|
39
|
+
from . import tuple_util as tuple_util
|
40
40
|
from . import runner as runner
|
41
41
|
from . import plugins as plugins
|
42
42
|
from .plugins.datatools.s3.s3 import S3 as S3
|
@@ -143,170 +143,114 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
|
|
143
143
|
...
|
144
144
|
|
145
145
|
@typing.overload
|
146
|
-
def
|
146
|
+
def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
147
147
|
"""
|
148
|
-
|
149
|
-
|
148
|
+
Decorator prototype for all step decorators. This function gets specialized
|
149
|
+
and imported for all decorators types by _import_plugin_decorators().
|
150
150
|
"""
|
151
151
|
...
|
152
152
|
|
153
153
|
@typing.overload
|
154
|
-
def
|
155
|
-
...
|
156
|
-
|
157
|
-
@typing.overload
|
158
|
-
def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
154
|
+
def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
159
155
|
...
|
160
156
|
|
161
|
-
def
|
157
|
+
def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
|
162
158
|
"""
|
163
|
-
|
164
|
-
|
159
|
+
Decorator prototype for all step decorators. This function gets specialized
|
160
|
+
and imported for all decorators types by _import_plugin_decorators().
|
165
161
|
"""
|
166
162
|
...
|
167
163
|
|
168
164
|
@typing.overload
|
169
|
-
def
|
165
|
+
def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
170
166
|
"""
|
171
|
-
|
172
|
-
|
173
|
-
Use `@resources` to specify the resource requirements
|
174
|
-
independently of the specific compute layer (`@batch`, `@kubernetes`).
|
167
|
+
Creates a human-readable report, a Metaflow Card, after this step completes.
|
175
168
|
|
176
|
-
|
177
|
-
```
|
178
|
-
python myflow.py run --with batch
|
179
|
-
```
|
180
|
-
or
|
181
|
-
```
|
182
|
-
python myflow.py run --with kubernetes
|
183
|
-
```
|
184
|
-
which executes the flow on the desired system using the
|
185
|
-
requirements specified in `@resources`.
|
169
|
+
Note that you may add multiple `@card` decorators in a step with different parameters.
|
186
170
|
"""
|
187
171
|
...
|
188
172
|
|
189
173
|
@typing.overload
|
190
|
-
def
|
174
|
+
def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
191
175
|
...
|
192
176
|
|
193
177
|
@typing.overload
|
194
|
-
def
|
178
|
+
def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
195
179
|
...
|
196
180
|
|
197
|
-
def
|
181
|
+
def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
|
198
182
|
"""
|
199
|
-
|
200
|
-
|
201
|
-
Use `@resources` to specify the resource requirements
|
202
|
-
independently of the specific compute layer (`@batch`, `@kubernetes`).
|
183
|
+
Creates a human-readable report, a Metaflow Card, after this step completes.
|
203
184
|
|
204
|
-
|
205
|
-
```
|
206
|
-
python myflow.py run --with batch
|
207
|
-
```
|
208
|
-
or
|
209
|
-
```
|
210
|
-
python myflow.py run --with kubernetes
|
211
|
-
```
|
212
|
-
which executes the flow on the desired system using the
|
213
|
-
requirements specified in `@resources`.
|
185
|
+
Note that you may add multiple `@card` decorators in a step with different parameters.
|
214
186
|
"""
|
215
187
|
...
|
216
188
|
|
217
189
|
@typing.overload
|
218
|
-
def
|
190
|
+
def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
219
191
|
"""
|
220
|
-
Specifies
|
221
|
-
|
222
|
-
Information in this decorator will augment any
|
223
|
-
attributes set in the `@pyi_base` flow-level decorator. Hence,
|
224
|
-
you can use `@pypi_base` to set packages required by all
|
225
|
-
steps and use `@pypi` to specify step-specific overrides.
|
192
|
+
Specifies secrets to be retrieved and injected as environment variables prior to
|
193
|
+
the execution of a step.
|
226
194
|
"""
|
227
195
|
...
|
228
196
|
|
229
197
|
@typing.overload
|
230
|
-
def
|
198
|
+
def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
231
199
|
...
|
232
200
|
|
233
201
|
@typing.overload
|
234
|
-
def
|
235
|
-
...
|
236
|
-
|
237
|
-
def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
|
238
|
-
"""
|
239
|
-
Specifies the PyPI packages for the step.
|
240
|
-
|
241
|
-
Information in this decorator will augment any
|
242
|
-
attributes set in the `@pyi_base` flow-level decorator. Hence,
|
243
|
-
you can use `@pypi_base` to set packages required by all
|
244
|
-
steps and use `@pypi` to specify step-specific overrides.
|
245
|
-
"""
|
202
|
+
def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
246
203
|
...
|
247
204
|
|
248
|
-
|
249
|
-
def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
205
|
+
def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
|
250
206
|
"""
|
251
|
-
Specifies
|
207
|
+
Specifies secrets to be retrieved and injected as environment variables prior to
|
208
|
+
the execution of a step.
|
252
209
|
"""
|
253
210
|
...
|
254
211
|
|
255
212
|
@typing.overload
|
256
|
-
def
|
257
|
-
...
|
258
|
-
|
259
|
-
@typing.overload
|
260
|
-
def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
261
|
-
...
|
262
|
-
|
263
|
-
def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
|
213
|
+
def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
264
214
|
"""
|
265
|
-
Specifies
|
215
|
+
Specifies the number of times the task corresponding
|
216
|
+
to a step needs to be retried.
|
217
|
+
|
218
|
+
This decorator is useful for handling transient errors, such as networking issues.
|
219
|
+
If your task contains operations that can't be retried safely, e.g. database updates,
|
220
|
+
it is advisable to annotate it with `@retry(times=0)`.
|
221
|
+
|
222
|
+
This can be used in conjunction with the `@catch` decorator. The `@catch`
|
223
|
+
decorator will execute a no-op task after all retries have been exhausted,
|
224
|
+
ensuring that the flow execution can continue.
|
266
225
|
"""
|
267
226
|
...
|
268
227
|
|
269
228
|
@typing.overload
|
270
|
-
def
|
271
|
-
"""
|
272
|
-
Decorator prototype for all step decorators. This function gets specialized
|
273
|
-
and imported for all decorators types by _import_plugin_decorators().
|
274
|
-
"""
|
229
|
+
def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
275
230
|
...
|
276
231
|
|
277
232
|
@typing.overload
|
278
|
-
def
|
279
|
-
...
|
280
|
-
|
281
|
-
def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
|
282
|
-
"""
|
283
|
-
Decorator prototype for all step decorators. This function gets specialized
|
284
|
-
and imported for all decorators types by _import_plugin_decorators().
|
285
|
-
"""
|
233
|
+
def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
286
234
|
...
|
287
235
|
|
288
|
-
|
289
|
-
def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
236
|
+
def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
|
290
237
|
"""
|
291
|
-
|
238
|
+
Specifies the number of times the task corresponding
|
239
|
+
to a step needs to be retried.
|
292
240
|
|
293
|
-
|
241
|
+
This decorator is useful for handling transient errors, such as networking issues.
|
242
|
+
If your task contains operations that can't be retried safely, e.g. database updates,
|
243
|
+
it is advisable to annotate it with `@retry(times=0)`.
|
244
|
+
|
245
|
+
This can be used in conjunction with the `@catch` decorator. The `@catch`
|
246
|
+
decorator will execute a no-op task after all retries have been exhausted,
|
247
|
+
ensuring that the flow execution can continue.
|
294
248
|
"""
|
295
249
|
...
|
296
250
|
|
297
|
-
|
298
|
-
def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
299
|
-
...
|
300
|
-
|
301
|
-
@typing.overload
|
302
|
-
def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
303
|
-
...
|
304
|
-
|
305
|
-
def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
|
251
|
+
def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable') -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
306
252
|
"""
|
307
|
-
|
308
|
-
|
309
|
-
Note that you may add multiple `@card` decorators in a step with different parameters.
|
253
|
+
Specifies that this step should execute on Kubernetes.
|
310
254
|
"""
|
311
255
|
...
|
312
256
|
|
@@ -350,38 +294,26 @@ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
|
|
350
294
|
...
|
351
295
|
|
352
296
|
@typing.overload
|
353
|
-
def
|
297
|
+
def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
354
298
|
"""
|
355
|
-
Specifies the
|
299
|
+
Specifies that the step will success under all circumstances.
|
356
300
|
|
357
|
-
|
358
|
-
|
359
|
-
|
360
|
-
|
301
|
+
The decorator will create an optional artifact, specified by `var`, which
|
302
|
+
contains the exception raised. You can use it to detect the presence
|
303
|
+
of errors, indicating that all happy-path artifacts produced by the step
|
304
|
+
are missing.
|
361
305
|
"""
|
362
306
|
...
|
363
307
|
|
364
308
|
@typing.overload
|
365
|
-
def
|
309
|
+
def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
366
310
|
...
|
367
311
|
|
368
312
|
@typing.overload
|
369
|
-
def
|
370
|
-
...
|
371
|
-
|
372
|
-
def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
|
373
|
-
"""
|
374
|
-
Specifies the Conda environment for the step.
|
375
|
-
|
376
|
-
Information in this decorator will augment any
|
377
|
-
attributes set in the `@conda_base` flow-level decorator. Hence,
|
378
|
-
you can use `@conda_base` to set packages required by all
|
379
|
-
steps and use `@conda` to specify step-specific overrides.
|
380
|
-
"""
|
313
|
+
def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
381
314
|
...
|
382
315
|
|
383
|
-
|
384
|
-
def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
316
|
+
def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
|
385
317
|
"""
|
386
318
|
Specifies that the step will success under all circumstances.
|
387
319
|
|
@@ -393,129 +325,237 @@ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) ->
|
|
393
325
|
...
|
394
326
|
|
395
327
|
@typing.overload
|
396
|
-
def
|
328
|
+
def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
329
|
+
"""
|
330
|
+
Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
|
331
|
+
"""
|
397
332
|
...
|
398
333
|
|
399
334
|
@typing.overload
|
400
|
-
def
|
335
|
+
def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
401
336
|
...
|
402
337
|
|
403
|
-
|
338
|
+
@typing.overload
|
339
|
+
def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
340
|
+
...
|
341
|
+
|
342
|
+
def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
|
404
343
|
"""
|
405
|
-
Specifies that
|
406
|
-
|
407
|
-
The decorator will create an optional artifact, specified by `var`, which
|
408
|
-
contains the exception raised. You can use it to detect the presence
|
409
|
-
of errors, indicating that all happy-path artifacts produced by the step
|
410
|
-
are missing.
|
344
|
+
Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
|
411
345
|
"""
|
412
346
|
...
|
413
347
|
|
414
348
|
@typing.overload
|
415
|
-
def
|
349
|
+
def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
416
350
|
"""
|
417
|
-
Specifies the
|
418
|
-
to a step needs to be retried.
|
419
|
-
|
420
|
-
This decorator is useful for handling transient errors, such as networking issues.
|
421
|
-
If your task contains operations that can't be retried safely, e.g. database updates,
|
422
|
-
it is advisable to annotate it with `@retry(times=0)`.
|
351
|
+
Specifies the PyPI packages for the step.
|
423
352
|
|
424
|
-
|
425
|
-
|
426
|
-
|
353
|
+
Information in this decorator will augment any
|
354
|
+
attributes set in the `@pyi_base` flow-level decorator. Hence,
|
355
|
+
you can use `@pypi_base` to set packages required by all
|
356
|
+
steps and use `@pypi` to specify step-specific overrides.
|
427
357
|
"""
|
428
358
|
...
|
429
359
|
|
430
360
|
@typing.overload
|
431
|
-
def
|
361
|
+
def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
432
362
|
...
|
433
363
|
|
434
364
|
@typing.overload
|
435
|
-
def
|
365
|
+
def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
436
366
|
...
|
437
367
|
|
438
|
-
def
|
368
|
+
def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
|
439
369
|
"""
|
440
|
-
Specifies the
|
441
|
-
to a step needs to be retried.
|
442
|
-
|
443
|
-
This decorator is useful for handling transient errors, such as networking issues.
|
444
|
-
If your task contains operations that can't be retried safely, e.g. database updates,
|
445
|
-
it is advisable to annotate it with `@retry(times=0)`.
|
370
|
+
Specifies the PyPI packages for the step.
|
446
371
|
|
447
|
-
|
448
|
-
|
449
|
-
|
372
|
+
Information in this decorator will augment any
|
373
|
+
attributes set in the `@pyi_base` flow-level decorator. Hence,
|
374
|
+
you can use `@pypi_base` to set packages required by all
|
375
|
+
steps and use `@pypi` to specify step-specific overrides.
|
450
376
|
"""
|
451
377
|
...
|
452
378
|
|
453
|
-
|
379
|
+
@typing.overload
|
380
|
+
def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
454
381
|
"""
|
455
|
-
Specifies
|
382
|
+
Specifies environment variables to be set prior to the execution of a step.
|
456
383
|
"""
|
457
384
|
...
|
458
385
|
|
459
386
|
@typing.overload
|
460
|
-
def
|
387
|
+
def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
388
|
+
...
|
389
|
+
|
390
|
+
@typing.overload
|
391
|
+
def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
392
|
+
...
|
393
|
+
|
394
|
+
def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
|
461
395
|
"""
|
462
|
-
Specifies
|
396
|
+
Specifies environment variables to be set prior to the execution of a step.
|
463
397
|
"""
|
464
398
|
...
|
465
399
|
|
466
400
|
@typing.overload
|
467
|
-
def
|
401
|
+
def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
402
|
+
"""
|
403
|
+
Specifies the resources needed when executing this step.
|
404
|
+
|
405
|
+
Use `@resources` to specify the resource requirements
|
406
|
+
independently of the specific compute layer (`@batch`, `@kubernetes`).
|
407
|
+
|
408
|
+
You can choose the compute layer on the command line by executing e.g.
|
409
|
+
```
|
410
|
+
python myflow.py run --with batch
|
411
|
+
```
|
412
|
+
or
|
413
|
+
```
|
414
|
+
python myflow.py run --with kubernetes
|
415
|
+
```
|
416
|
+
which executes the flow on the desired system using the
|
417
|
+
requirements specified in `@resources`.
|
418
|
+
"""
|
468
419
|
...
|
469
420
|
|
470
421
|
@typing.overload
|
471
|
-
def
|
422
|
+
def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
472
423
|
...
|
473
424
|
|
474
|
-
|
425
|
+
@typing.overload
|
426
|
+
def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
427
|
+
...
|
428
|
+
|
429
|
+
def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
|
475
430
|
"""
|
476
|
-
Specifies
|
431
|
+
Specifies the resources needed when executing this step.
|
432
|
+
|
433
|
+
Use `@resources` to specify the resource requirements
|
434
|
+
independently of the specific compute layer (`@batch`, `@kubernetes`).
|
435
|
+
|
436
|
+
You can choose the compute layer on the command line by executing e.g.
|
437
|
+
```
|
438
|
+
python myflow.py run --with batch
|
439
|
+
```
|
440
|
+
or
|
441
|
+
```
|
442
|
+
python myflow.py run --with kubernetes
|
443
|
+
```
|
444
|
+
which executes the flow on the desired system using the
|
445
|
+
requirements specified in `@resources`.
|
477
446
|
"""
|
478
447
|
...
|
479
448
|
|
480
449
|
@typing.overload
|
481
|
-
def
|
450
|
+
def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
482
451
|
"""
|
483
|
-
Specifies the Conda environment for
|
452
|
+
Specifies the Conda environment for the step.
|
484
453
|
|
485
|
-
|
486
|
-
|
454
|
+
Information in this decorator will augment any
|
455
|
+
attributes set in the `@conda_base` flow-level decorator. Hence,
|
456
|
+
you can use `@conda_base` to set packages required by all
|
457
|
+
steps and use `@conda` to specify step-specific overrides.
|
487
458
|
"""
|
488
459
|
...
|
489
460
|
|
490
461
|
@typing.overload
|
491
|
-
def
|
462
|
+
def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
492
463
|
...
|
493
464
|
|
494
|
-
|
465
|
+
@typing.overload
|
466
|
+
def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
467
|
+
...
|
468
|
+
|
469
|
+
def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
|
495
470
|
"""
|
496
|
-
Specifies the Conda environment for
|
471
|
+
Specifies the Conda environment for the step.
|
497
472
|
|
498
|
-
|
499
|
-
|
473
|
+
Information in this decorator will augment any
|
474
|
+
attributes set in the `@conda_base` flow-level decorator. Hence,
|
475
|
+
you can use `@conda_base` to set packages required by all
|
476
|
+
steps and use `@conda` to specify step-specific overrides.
|
500
477
|
"""
|
501
478
|
...
|
502
479
|
|
503
480
|
@typing.overload
|
504
|
-
def
|
481
|
+
def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
505
482
|
"""
|
506
|
-
Specifies the
|
507
|
-
|
483
|
+
Specifies the event(s) that this flow depends on.
|
484
|
+
|
485
|
+
```
|
486
|
+
@trigger(event='foo')
|
487
|
+
```
|
488
|
+
or
|
489
|
+
```
|
490
|
+
@trigger(events=['foo', 'bar'])
|
491
|
+
```
|
492
|
+
|
493
|
+
Additionally, you can specify the parameter mappings
|
494
|
+
to map event payload to Metaflow parameters for the flow.
|
495
|
+
```
|
496
|
+
@trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
|
497
|
+
```
|
498
|
+
or
|
499
|
+
```
|
500
|
+
@trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
|
501
|
+
{'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
|
502
|
+
```
|
503
|
+
|
504
|
+
'parameters' can also be a list of strings and tuples like so:
|
505
|
+
```
|
506
|
+
@trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
|
507
|
+
```
|
508
|
+
This is equivalent to:
|
509
|
+
```
|
510
|
+
@trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
|
511
|
+
```
|
508
512
|
"""
|
509
513
|
...
|
510
514
|
|
511
515
|
@typing.overload
|
512
|
-
def
|
516
|
+
def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
513
517
|
...
|
514
518
|
|
515
|
-
def
|
519
|
+
def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
|
516
520
|
"""
|
517
|
-
Specifies the
|
518
|
-
|
521
|
+
Specifies the event(s) that this flow depends on.
|
522
|
+
|
523
|
+
```
|
524
|
+
@trigger(event='foo')
|
525
|
+
```
|
526
|
+
or
|
527
|
+
```
|
528
|
+
@trigger(events=['foo', 'bar'])
|
529
|
+
```
|
530
|
+
|
531
|
+
Additionally, you can specify the parameter mappings
|
532
|
+
to map event payload to Metaflow parameters for the flow.
|
533
|
+
```
|
534
|
+
@trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
|
535
|
+
```
|
536
|
+
or
|
537
|
+
```
|
538
|
+
@trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
|
539
|
+
{'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
|
540
|
+
```
|
541
|
+
|
542
|
+
'parameters' can also be a list of strings and tuples like so:
|
543
|
+
```
|
544
|
+
@trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
|
545
|
+
```
|
546
|
+
This is equivalent to:
|
547
|
+
```
|
548
|
+
@trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
|
549
|
+
```
|
550
|
+
"""
|
551
|
+
...
|
552
|
+
|
553
|
+
def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
554
|
+
"""
|
555
|
+
Specifies what flows belong to the same project.
|
556
|
+
|
557
|
+
A project-specific namespace is created for all flows that
|
558
|
+
use the same `@project(name)`.
|
519
559
|
"""
|
520
560
|
...
|
521
561
|
|
@@ -526,6 +566,16 @@ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str,
|
|
526
566
|
"""
|
527
567
|
...
|
528
568
|
|
569
|
+
def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
570
|
+
"""
|
571
|
+
The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
|
572
|
+
before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
|
573
|
+
and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
|
574
|
+
added as a flow decorators. Adding more than one decorator will ensure that `start` step
|
575
|
+
starts only after all sensors finish.
|
576
|
+
"""
|
577
|
+
...
|
578
|
+
|
529
579
|
@typing.overload
|
530
580
|
def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
531
581
|
"""
|
@@ -630,95 +680,45 @@ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packag
|
|
630
680
|
"""
|
631
681
|
...
|
632
682
|
|
633
|
-
|
683
|
+
@typing.overload
|
684
|
+
def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
634
685
|
"""
|
635
|
-
|
636
|
-
|
637
|
-
|
638
|
-
|
639
|
-
starts only after all sensors finish.
|
686
|
+
Specifies the Conda environment for all steps of the flow.
|
687
|
+
|
688
|
+
Use `@conda_base` to set common libraries required by all
|
689
|
+
steps and use `@conda` to specify step-specific additions.
|
640
690
|
"""
|
641
691
|
...
|
642
692
|
|
643
|
-
|
693
|
+
@typing.overload
|
694
|
+
def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
695
|
+
...
|
696
|
+
|
697
|
+
def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
|
644
698
|
"""
|
645
|
-
Specifies
|
699
|
+
Specifies the Conda environment for all steps of the flow.
|
646
700
|
|
647
|
-
|
648
|
-
use
|
701
|
+
Use `@conda_base` to set common libraries required by all
|
702
|
+
steps and use `@conda` to specify step-specific additions.
|
649
703
|
"""
|
650
704
|
...
|
651
705
|
|
652
706
|
@typing.overload
|
653
|
-
def
|
707
|
+
def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
654
708
|
"""
|
655
|
-
Specifies the
|
656
|
-
|
657
|
-
```
|
658
|
-
@trigger(event='foo')
|
659
|
-
```
|
660
|
-
or
|
661
|
-
```
|
662
|
-
@trigger(events=['foo', 'bar'])
|
663
|
-
```
|
664
|
-
|
665
|
-
Additionally, you can specify the parameter mappings
|
666
|
-
to map event payload to Metaflow parameters for the flow.
|
667
|
-
```
|
668
|
-
@trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
|
669
|
-
```
|
670
|
-
or
|
671
|
-
```
|
672
|
-
@trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
|
673
|
-
{'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
|
674
|
-
```
|
675
|
-
|
676
|
-
'parameters' can also be a list of strings and tuples like so:
|
677
|
-
```
|
678
|
-
@trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
|
679
|
-
```
|
680
|
-
This is equivalent to:
|
681
|
-
```
|
682
|
-
@trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
|
683
|
-
```
|
709
|
+
Specifies the times when the flow should be run when running on a
|
710
|
+
production scheduler.
|
684
711
|
"""
|
685
712
|
...
|
686
713
|
|
687
714
|
@typing.overload
|
688
|
-
def
|
715
|
+
def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
689
716
|
...
|
690
717
|
|
691
|
-
def
|
718
|
+
def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
|
692
719
|
"""
|
693
|
-
Specifies the
|
694
|
-
|
695
|
-
```
|
696
|
-
@trigger(event='foo')
|
697
|
-
```
|
698
|
-
or
|
699
|
-
```
|
700
|
-
@trigger(events=['foo', 'bar'])
|
701
|
-
```
|
702
|
-
|
703
|
-
Additionally, you can specify the parameter mappings
|
704
|
-
to map event payload to Metaflow parameters for the flow.
|
705
|
-
```
|
706
|
-
@trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
|
707
|
-
```
|
708
|
-
or
|
709
|
-
```
|
710
|
-
@trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
|
711
|
-
{'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
|
712
|
-
```
|
713
|
-
|
714
|
-
'parameters' can also be a list of strings and tuples like so:
|
715
|
-
```
|
716
|
-
@trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
|
717
|
-
```
|
718
|
-
This is equivalent to:
|
719
|
-
```
|
720
|
-
@trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
|
721
|
-
```
|
720
|
+
Specifies the times when the flow should be run when running on a
|
721
|
+
production scheduler.
|
722
722
|
"""
|
723
723
|
...
|
724
724
|
|