metaflow-stubs 2.15.9__py2.py3-none-any.whl → 2.15.11__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- metaflow-stubs/__init__.pyi +502 -502
- metaflow-stubs/cards.pyi +2 -2
- metaflow-stubs/cli.pyi +2 -2
- metaflow-stubs/cli_components/__init__.pyi +2 -2
- metaflow-stubs/cli_components/utils.pyi +2 -2
- metaflow-stubs/client/__init__.pyi +2 -2
- metaflow-stubs/client/core.pyi +4 -4
- metaflow-stubs/client/filecache.pyi +2 -2
- metaflow-stubs/events.pyi +2 -2
- metaflow-stubs/exception.pyi +2 -2
- metaflow-stubs/flowspec.pyi +4 -4
- metaflow-stubs/generated_for.txt +1 -1
- metaflow-stubs/includefile.pyi +2 -2
- metaflow-stubs/info_file.pyi +2 -2
- metaflow-stubs/metadata_provider/__init__.pyi +2 -2
- metaflow-stubs/metadata_provider/heartbeat.pyi +2 -2
- metaflow-stubs/metadata_provider/metadata.pyi +2 -2
- metaflow-stubs/metadata_provider/util.pyi +2 -2
- metaflow-stubs/metaflow_config.pyi +2 -2
- metaflow-stubs/metaflow_current.pyi +17 -17
- metaflow-stubs/metaflow_git.pyi +2 -2
- metaflow-stubs/multicore_utils.pyi +2 -2
- metaflow-stubs/parameters.pyi +2 -2
- metaflow-stubs/plugins/__init__.pyi +11 -11
- metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
- metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
- metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
- metaflow-stubs/plugins/airflow/exception.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
- metaflow-stubs/plugins/argo/__init__.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_workflows.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +4 -4
- metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +3 -3
- metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +2 -2
- metaflow-stubs/plugins/aws/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
- metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
- metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +3 -3
- metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +3 -3
- metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +2 -2
- metaflow-stubs/plugins/azure/__init__.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +3 -3
- metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
- metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
- metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
- metaflow-stubs/plugins/cards/__init__.pyi +2 -2
- metaflow-stubs/plugins/cards/card_client.pyi +2 -2
- metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
- metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
- metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/basic.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/components.pyi +4 -4
- metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
- metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
- metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
- metaflow-stubs/plugins/cards/exception.pyi +2 -2
- metaflow-stubs/plugins/catch_decorator.pyi +3 -3
- metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
- metaflow-stubs/plugins/datatools/local.pyi +2 -2
- metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
- metaflow-stubs/plugins/datatools/s3/s3.pyi +6 -3
- metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
- metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
- metaflow-stubs/plugins/debug_logger.pyi +2 -2
- metaflow-stubs/plugins/debug_monitor.pyi +2 -2
- metaflow-stubs/plugins/environment_decorator.pyi +2 -2
- metaflow-stubs/plugins/events_decorator.pyi +2 -2
- metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
- metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
- metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
- metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +3 -3
- metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
- metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
- metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
- metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kube_utils.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +2 -2
- metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
- metaflow-stubs/plugins/project_decorator.pyi +2 -2
- metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
- metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
- metaflow-stubs/plugins/pypi/conda_environment.pyi +5 -5
- metaflow-stubs/plugins/pypi/parsers.pyi +2 -2
- metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
- metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
- metaflow-stubs/plugins/pypi/utils.pyi +2 -2
- metaflow-stubs/plugins/resources_decorator.pyi +2 -2
- metaflow-stubs/plugins/retry_decorator.pyi +2 -2
- metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
- metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +3 -3
- metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
- metaflow-stubs/plugins/storage_executor.pyi +2 -2
- metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +3 -3
- metaflow-stubs/plugins/timeout_decorator.pyi +3 -3
- metaflow-stubs/plugins/uv/__init__.pyi +2 -2
- metaflow-stubs/plugins/uv/uv_environment.pyi +3 -3
- metaflow-stubs/pylint_wrapper.pyi +2 -2
- metaflow-stubs/runner/__init__.pyi +2 -2
- metaflow-stubs/runner/deployer.pyi +5 -5
- metaflow-stubs/runner/deployer_impl.pyi +2 -2
- metaflow-stubs/runner/metaflow_runner.pyi +3 -3
- metaflow-stubs/runner/nbdeploy.pyi +2 -2
- metaflow-stubs/runner/nbrun.pyi +2 -2
- metaflow-stubs/runner/subprocess_manager.pyi +2 -2
- metaflow-stubs/runner/utils.pyi +3 -3
- metaflow-stubs/system/__init__.pyi +2 -2
- metaflow-stubs/system/system_logger.pyi +2 -2
- metaflow-stubs/system/system_monitor.pyi +2 -2
- metaflow-stubs/tagging_util.pyi +2 -2
- metaflow-stubs/tuple_util.pyi +2 -2
- metaflow-stubs/user_configs/__init__.pyi +2 -2
- metaflow-stubs/user_configs/config_decorators.pyi +4 -4
- metaflow-stubs/user_configs/config_options.pyi +2 -2
- metaflow-stubs/user_configs/config_parameters.pyi +5 -5
- metaflow-stubs/version.pyi +2 -2
- {metaflow_stubs-2.15.9.dist-info → metaflow_stubs-2.15.11.dist-info}/METADATA +2 -2
- metaflow_stubs-2.15.11.dist-info/RECORD +149 -0
- {metaflow_stubs-2.15.9.dist-info → metaflow_stubs-2.15.11.dist-info}/WHEEL +1 -1
- metaflow_stubs-2.15.9.dist-info/RECORD +0 -149
- {metaflow_stubs-2.15.9.dist-info → metaflow_stubs-2.15.11.dist-info}/top_level.txt +0 -0
metaflow-stubs/__init__.pyi
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
######################################################################################################
|
2
2
|
# Auto-generated Metaflow stub file #
|
3
|
-
# MF version: 2.15.
|
4
|
-
# Generated on 2025-
|
3
|
+
# MF version: 2.15.11 #
|
4
|
+
# Generated on 2025-05-06T23:47:09.203033 #
|
5
5
|
######################################################################################################
|
6
6
|
|
7
7
|
from __future__ import annotations
|
@@ -35,8 +35,8 @@ from .user_configs.config_parameters import ConfigValue as ConfigValue
|
|
35
35
|
from .user_configs.config_parameters import config_expr as config_expr
|
36
36
|
from .user_configs.config_decorators import CustomFlowDecorator as CustomFlowDecorator
|
37
37
|
from .user_configs.config_decorators import CustomStepDecorator as CustomStepDecorator
|
38
|
-
from . import tuple_util as tuple_util
|
39
38
|
from . import metaflow_git as metaflow_git
|
39
|
+
from . import tuple_util as tuple_util
|
40
40
|
from . import events as events
|
41
41
|
from . import runner as runner
|
42
42
|
from . import plugins as plugins
|
@@ -44,8 +44,8 @@ from .plugins.datatools.s3.s3 import S3 as S3
|
|
44
44
|
from . import includefile as includefile
|
45
45
|
from .includefile import IncludeFile as IncludeFile
|
46
46
|
from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
|
47
|
-
from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
|
48
47
|
from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
|
48
|
+
from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
|
49
49
|
from . import cards as cards
|
50
50
|
from . import client as client
|
51
51
|
from .client.core import namespace as namespace
|
@@ -146,6 +146,145 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
|
|
146
146
|
"""
|
147
147
|
...
|
148
148
|
|
149
|
+
@typing.overload
|
150
|
+
def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
151
|
+
"""
|
152
|
+
Specifies that the step will success under all circumstances.
|
153
|
+
|
154
|
+
The decorator will create an optional artifact, specified by `var`, which
|
155
|
+
contains the exception raised. You can use it to detect the presence
|
156
|
+
of errors, indicating that all happy-path artifacts produced by the step
|
157
|
+
are missing.
|
158
|
+
|
159
|
+
|
160
|
+
Parameters
|
161
|
+
----------
|
162
|
+
var : str, optional, default None
|
163
|
+
Name of the artifact in which to store the caught exception.
|
164
|
+
If not specified, the exception is not stored.
|
165
|
+
print_exception : bool, default True
|
166
|
+
Determines whether or not the exception is printed to
|
167
|
+
stdout when caught.
|
168
|
+
"""
|
169
|
+
...
|
170
|
+
|
171
|
+
@typing.overload
|
172
|
+
def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
173
|
+
...
|
174
|
+
|
175
|
+
@typing.overload
|
176
|
+
def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
177
|
+
...
|
178
|
+
|
179
|
+
def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
|
180
|
+
"""
|
181
|
+
Specifies that the step will success under all circumstances.
|
182
|
+
|
183
|
+
The decorator will create an optional artifact, specified by `var`, which
|
184
|
+
contains the exception raised. You can use it to detect the presence
|
185
|
+
of errors, indicating that all happy-path artifacts produced by the step
|
186
|
+
are missing.
|
187
|
+
|
188
|
+
|
189
|
+
Parameters
|
190
|
+
----------
|
191
|
+
var : str, optional, default None
|
192
|
+
Name of the artifact in which to store the caught exception.
|
193
|
+
If not specified, the exception is not stored.
|
194
|
+
print_exception : bool, default True
|
195
|
+
Determines whether or not the exception is printed to
|
196
|
+
stdout when caught.
|
197
|
+
"""
|
198
|
+
...
|
199
|
+
|
200
|
+
@typing.overload
|
201
|
+
def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
202
|
+
"""
|
203
|
+
Specifies environment variables to be set prior to the execution of a step.
|
204
|
+
|
205
|
+
|
206
|
+
Parameters
|
207
|
+
----------
|
208
|
+
vars : Dict[str, str], default {}
|
209
|
+
Dictionary of environment variables to set.
|
210
|
+
"""
|
211
|
+
...
|
212
|
+
|
213
|
+
@typing.overload
|
214
|
+
def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
215
|
+
...
|
216
|
+
|
217
|
+
@typing.overload
|
218
|
+
def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
219
|
+
...
|
220
|
+
|
221
|
+
def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
|
222
|
+
"""
|
223
|
+
Specifies environment variables to be set prior to the execution of a step.
|
224
|
+
|
225
|
+
|
226
|
+
Parameters
|
227
|
+
----------
|
228
|
+
vars : Dict[str, str], default {}
|
229
|
+
Dictionary of environment variables to set.
|
230
|
+
"""
|
231
|
+
...
|
232
|
+
|
233
|
+
@typing.overload
|
234
|
+
def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
235
|
+
"""
|
236
|
+
Specifies the number of times the task corresponding
|
237
|
+
to a step needs to be retried.
|
238
|
+
|
239
|
+
This decorator is useful for handling transient errors, such as networking issues.
|
240
|
+
If your task contains operations that can't be retried safely, e.g. database updates,
|
241
|
+
it is advisable to annotate it with `@retry(times=0)`.
|
242
|
+
|
243
|
+
This can be used in conjunction with the `@catch` decorator. The `@catch`
|
244
|
+
decorator will execute a no-op task after all retries have been exhausted,
|
245
|
+
ensuring that the flow execution can continue.
|
246
|
+
|
247
|
+
|
248
|
+
Parameters
|
249
|
+
----------
|
250
|
+
times : int, default 3
|
251
|
+
Number of times to retry this task.
|
252
|
+
minutes_between_retries : int, default 2
|
253
|
+
Number of minutes between retries.
|
254
|
+
"""
|
255
|
+
...
|
256
|
+
|
257
|
+
@typing.overload
|
258
|
+
def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
259
|
+
...
|
260
|
+
|
261
|
+
@typing.overload
|
262
|
+
def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
263
|
+
...
|
264
|
+
|
265
|
+
def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
|
266
|
+
"""
|
267
|
+
Specifies the number of times the task corresponding
|
268
|
+
to a step needs to be retried.
|
269
|
+
|
270
|
+
This decorator is useful for handling transient errors, such as networking issues.
|
271
|
+
If your task contains operations that can't be retried safely, e.g. database updates,
|
272
|
+
it is advisable to annotate it with `@retry(times=0)`.
|
273
|
+
|
274
|
+
This can be used in conjunction with the `@catch` decorator. The `@catch`
|
275
|
+
decorator will execute a no-op task after all retries have been exhausted,
|
276
|
+
ensuring that the flow execution can continue.
|
277
|
+
|
278
|
+
|
279
|
+
Parameters
|
280
|
+
----------
|
281
|
+
times : int, default 3
|
282
|
+
Number of times to retry this task.
|
283
|
+
minutes_between_retries : int, default 2
|
284
|
+
Number of minutes between retries.
|
285
|
+
"""
|
286
|
+
...
|
287
|
+
|
149
288
|
def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
150
289
|
"""
|
151
290
|
Specifies that this step should execute on Kubernetes.
|
@@ -232,70 +371,53 @@ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: ty
|
|
232
371
|
...
|
233
372
|
|
234
373
|
@typing.overload
|
235
|
-
def
|
374
|
+
def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
236
375
|
"""
|
237
|
-
|
376
|
+
Specifies the PyPI packages for the step.
|
238
377
|
|
239
|
-
|
378
|
+
Information in this decorator will augment any
|
379
|
+
attributes set in the `@pyi_base` flow-level decorator. Hence,
|
380
|
+
you can use `@pypi_base` to set packages required by all
|
381
|
+
steps and use `@pypi` to specify step-specific overrides.
|
240
382
|
|
241
383
|
|
242
384
|
Parameters
|
243
385
|
----------
|
244
|
-
|
245
|
-
|
246
|
-
|
247
|
-
|
248
|
-
|
249
|
-
|
250
|
-
timeout : int, default 45
|
251
|
-
Interrupt reporting if it takes more than this many seconds.
|
386
|
+
packages : Dict[str, str], default: {}
|
387
|
+
Packages to use for this step. The key is the name of the package
|
388
|
+
and the value is the version to use.
|
389
|
+
python : str, optional, default: None
|
390
|
+
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
391
|
+
that the version used will correspond to the version of the Python interpreter used to start the run.
|
252
392
|
"""
|
253
393
|
...
|
254
394
|
|
255
395
|
@typing.overload
|
256
|
-
def
|
396
|
+
def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
257
397
|
...
|
258
398
|
|
259
399
|
@typing.overload
|
260
|
-
def
|
400
|
+
def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
261
401
|
...
|
262
402
|
|
263
|
-
def
|
403
|
+
def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
|
264
404
|
"""
|
265
|
-
|
405
|
+
Specifies the PyPI packages for the step.
|
266
406
|
|
267
|
-
|
407
|
+
Information in this decorator will augment any
|
408
|
+
attributes set in the `@pyi_base` flow-level decorator. Hence,
|
409
|
+
you can use `@pypi_base` to set packages required by all
|
410
|
+
steps and use `@pypi` to specify step-specific overrides.
|
268
411
|
|
269
412
|
|
270
413
|
Parameters
|
271
414
|
----------
|
272
|
-
|
273
|
-
|
274
|
-
|
275
|
-
|
276
|
-
|
277
|
-
|
278
|
-
timeout : int, default 45
|
279
|
-
Interrupt reporting if it takes more than this many seconds.
|
280
|
-
"""
|
281
|
-
...
|
282
|
-
|
283
|
-
@typing.overload
|
284
|
-
def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
285
|
-
"""
|
286
|
-
Decorator prototype for all step decorators. This function gets specialized
|
287
|
-
and imported for all decorators types by _import_plugin_decorators().
|
288
|
-
"""
|
289
|
-
...
|
290
|
-
|
291
|
-
@typing.overload
|
292
|
-
def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
293
|
-
...
|
294
|
-
|
295
|
-
def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
|
296
|
-
"""
|
297
|
-
Decorator prototype for all step decorators. This function gets specialized
|
298
|
-
and imported for all decorators types by _import_plugin_decorators().
|
415
|
+
packages : Dict[str, str], default: {}
|
416
|
+
Packages to use for this step. The key is the name of the package
|
417
|
+
and the value is the version to use.
|
418
|
+
python : str, optional, default: None
|
419
|
+
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
420
|
+
that the version used will correspond to the version of the Python interpreter used to start the run.
|
299
421
|
"""
|
300
422
|
...
|
301
423
|
|
@@ -449,149 +571,70 @@ def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
|
|
449
571
|
...
|
450
572
|
|
451
573
|
@typing.overload
|
452
|
-
def
|
574
|
+
def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
453
575
|
"""
|
454
|
-
|
455
|
-
|
456
|
-
This decorator is useful if this step may hang indefinitely.
|
457
|
-
|
458
|
-
This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
|
459
|
-
A timeout is considered to be an exception thrown by the step. It will cause the step to be
|
460
|
-
retried if needed and the exception will be caught by the `@catch` decorator, if present.
|
576
|
+
Creates a human-readable report, a Metaflow Card, after this step completes.
|
461
577
|
|
462
|
-
Note that
|
463
|
-
60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
|
578
|
+
Note that you may add multiple `@card` decorators in a step with different parameters.
|
464
579
|
|
465
580
|
|
466
581
|
Parameters
|
467
582
|
----------
|
468
|
-
|
469
|
-
|
470
|
-
|
471
|
-
|
472
|
-
|
473
|
-
|
583
|
+
type : str, default 'default'
|
584
|
+
Card type.
|
585
|
+
id : str, optional, default None
|
586
|
+
If multiple cards are present, use this id to identify this card.
|
587
|
+
options : Dict[str, Any], default {}
|
588
|
+
Options passed to the card. The contents depend on the card type.
|
589
|
+
timeout : int, default 45
|
590
|
+
Interrupt reporting if it takes more than this many seconds.
|
474
591
|
"""
|
475
592
|
...
|
476
593
|
|
477
594
|
@typing.overload
|
478
|
-
def
|
595
|
+
def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
479
596
|
...
|
480
597
|
|
481
598
|
@typing.overload
|
482
|
-
def
|
599
|
+
def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
483
600
|
...
|
484
601
|
|
485
|
-
def
|
602
|
+
def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
|
486
603
|
"""
|
487
|
-
|
488
|
-
|
489
|
-
This decorator is useful if this step may hang indefinitely.
|
490
|
-
|
491
|
-
This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
|
492
|
-
A timeout is considered to be an exception thrown by the step. It will cause the step to be
|
493
|
-
retried if needed and the exception will be caught by the `@catch` decorator, if present.
|
604
|
+
Creates a human-readable report, a Metaflow Card, after this step completes.
|
494
605
|
|
495
|
-
Note that
|
496
|
-
60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
|
606
|
+
Note that you may add multiple `@card` decorators in a step with different parameters.
|
497
607
|
|
498
608
|
|
499
609
|
Parameters
|
500
610
|
----------
|
501
|
-
|
502
|
-
|
503
|
-
|
504
|
-
|
505
|
-
|
506
|
-
|
507
|
-
|
508
|
-
|
509
|
-
|
510
|
-
@typing.overload
|
511
|
-
def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
512
|
-
"""
|
513
|
-
Specifies the number of times the task corresponding
|
514
|
-
to a step needs to be retried.
|
515
|
-
|
516
|
-
This decorator is useful for handling transient errors, such as networking issues.
|
517
|
-
If your task contains operations that can't be retried safely, e.g. database updates,
|
518
|
-
it is advisable to annotate it with `@retry(times=0)`.
|
519
|
-
|
520
|
-
This can be used in conjunction with the `@catch` decorator. The `@catch`
|
521
|
-
decorator will execute a no-op task after all retries have been exhausted,
|
522
|
-
ensuring that the flow execution can continue.
|
523
|
-
|
524
|
-
|
525
|
-
Parameters
|
526
|
-
----------
|
527
|
-
times : int, default 3
|
528
|
-
Number of times to retry this task.
|
529
|
-
minutes_between_retries : int, default 2
|
530
|
-
Number of minutes between retries.
|
531
|
-
"""
|
532
|
-
...
|
533
|
-
|
534
|
-
@typing.overload
|
535
|
-
def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
536
|
-
...
|
537
|
-
|
538
|
-
@typing.overload
|
539
|
-
def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
540
|
-
...
|
541
|
-
|
542
|
-
def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
|
543
|
-
"""
|
544
|
-
Specifies the number of times the task corresponding
|
545
|
-
to a step needs to be retried.
|
546
|
-
|
547
|
-
This decorator is useful for handling transient errors, such as networking issues.
|
548
|
-
If your task contains operations that can't be retried safely, e.g. database updates,
|
549
|
-
it is advisable to annotate it with `@retry(times=0)`.
|
550
|
-
|
551
|
-
This can be used in conjunction with the `@catch` decorator. The `@catch`
|
552
|
-
decorator will execute a no-op task after all retries have been exhausted,
|
553
|
-
ensuring that the flow execution can continue.
|
554
|
-
|
555
|
-
|
556
|
-
Parameters
|
557
|
-
----------
|
558
|
-
times : int, default 3
|
559
|
-
Number of times to retry this task.
|
560
|
-
minutes_between_retries : int, default 2
|
561
|
-
Number of minutes between retries.
|
611
|
+
type : str, default 'default'
|
612
|
+
Card type.
|
613
|
+
id : str, optional, default None
|
614
|
+
If multiple cards are present, use this id to identify this card.
|
615
|
+
options : Dict[str, Any], default {}
|
616
|
+
Options passed to the card. The contents depend on the card type.
|
617
|
+
timeout : int, default 45
|
618
|
+
Interrupt reporting if it takes more than this many seconds.
|
562
619
|
"""
|
563
620
|
...
|
564
621
|
|
565
622
|
@typing.overload
|
566
|
-
def
|
623
|
+
def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
567
624
|
"""
|
568
|
-
|
569
|
-
|
570
|
-
|
571
|
-
Parameters
|
572
|
-
----------
|
573
|
-
vars : Dict[str, str], default {}
|
574
|
-
Dictionary of environment variables to set.
|
625
|
+
Decorator prototype for all step decorators. This function gets specialized
|
626
|
+
and imported for all decorators types by _import_plugin_decorators().
|
575
627
|
"""
|
576
628
|
...
|
577
629
|
|
578
630
|
@typing.overload
|
579
|
-
def
|
580
|
-
...
|
581
|
-
|
582
|
-
@typing.overload
|
583
|
-
def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
631
|
+
def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
584
632
|
...
|
585
633
|
|
586
|
-
def
|
634
|
+
def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
|
587
635
|
"""
|
588
|
-
|
589
|
-
|
590
|
-
|
591
|
-
Parameters
|
592
|
-
----------
|
593
|
-
vars : Dict[str, str], default {}
|
594
|
-
Dictionary of environment variables to set.
|
636
|
+
Decorator prototype for all step decorators. This function gets specialized
|
637
|
+
and imported for all decorators types by _import_plugin_decorators().
|
595
638
|
"""
|
596
639
|
...
|
597
640
|
|
@@ -674,92 +717,6 @@ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None]
|
|
674
717
|
"""
|
675
718
|
...
|
676
719
|
|
677
|
-
@typing.overload
|
678
|
-
def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
679
|
-
"""
|
680
|
-
Specifies the PyPI packages for the step.
|
681
|
-
|
682
|
-
Information in this decorator will augment any
|
683
|
-
attributes set in the `@pyi_base` flow-level decorator. Hence,
|
684
|
-
you can use `@pypi_base` to set packages required by all
|
685
|
-
steps and use `@pypi` to specify step-specific overrides.
|
686
|
-
|
687
|
-
|
688
|
-
Parameters
|
689
|
-
----------
|
690
|
-
packages : Dict[str, str], default: {}
|
691
|
-
Packages to use for this step. The key is the name of the package
|
692
|
-
and the value is the version to use.
|
693
|
-
python : str, optional, default: None
|
694
|
-
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
695
|
-
that the version used will correspond to the version of the Python interpreter used to start the run.
|
696
|
-
"""
|
697
|
-
...
|
698
|
-
|
699
|
-
@typing.overload
|
700
|
-
def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
701
|
-
...
|
702
|
-
|
703
|
-
@typing.overload
|
704
|
-
def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
705
|
-
...
|
706
|
-
|
707
|
-
def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
|
708
|
-
"""
|
709
|
-
Specifies the PyPI packages for the step.
|
710
|
-
|
711
|
-
Information in this decorator will augment any
|
712
|
-
attributes set in the `@pyi_base` flow-level decorator. Hence,
|
713
|
-
you can use `@pypi_base` to set packages required by all
|
714
|
-
steps and use `@pypi` to specify step-specific overrides.
|
715
|
-
|
716
|
-
|
717
|
-
Parameters
|
718
|
-
----------
|
719
|
-
packages : Dict[str, str], default: {}
|
720
|
-
Packages to use for this step. The key is the name of the package
|
721
|
-
and the value is the version to use.
|
722
|
-
python : str, optional, default: None
|
723
|
-
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
724
|
-
that the version used will correspond to the version of the Python interpreter used to start the run.
|
725
|
-
"""
|
726
|
-
...
|
727
|
-
|
728
|
-
@typing.overload
|
729
|
-
def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
730
|
-
"""
|
731
|
-
Specifies secrets to be retrieved and injected as environment variables prior to
|
732
|
-
the execution of a step.
|
733
|
-
|
734
|
-
|
735
|
-
Parameters
|
736
|
-
----------
|
737
|
-
sources : List[Union[str, Dict[str, Any]]], default: []
|
738
|
-
List of secret specs, defining how the secrets are to be retrieved
|
739
|
-
"""
|
740
|
-
...
|
741
|
-
|
742
|
-
@typing.overload
|
743
|
-
def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
744
|
-
...
|
745
|
-
|
746
|
-
@typing.overload
|
747
|
-
def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
748
|
-
...
|
749
|
-
|
750
|
-
def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
|
751
|
-
"""
|
752
|
-
Specifies secrets to be retrieved and injected as environment variables prior to
|
753
|
-
the execution of a step.
|
754
|
-
|
755
|
-
|
756
|
-
Parameters
|
757
|
-
----------
|
758
|
-
sources : List[Union[str, Dict[str, Any]]], default: []
|
759
|
-
List of secret specs, defining how the secrets are to be retrieved
|
760
|
-
"""
|
761
|
-
...
|
762
|
-
|
763
720
|
@typing.overload
|
764
721
|
def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
765
722
|
"""
|
@@ -820,172 +777,248 @@ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
|
|
820
777
|
...
|
821
778
|
|
822
779
|
@typing.overload
|
823
|
-
def
|
780
|
+
def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
824
781
|
"""
|
825
|
-
Specifies
|
782
|
+
Specifies a timeout for your step.
|
826
783
|
|
827
|
-
|
828
|
-
|
829
|
-
|
830
|
-
|
784
|
+
This decorator is useful if this step may hang indefinitely.
|
785
|
+
|
786
|
+
This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
|
787
|
+
A timeout is considered to be an exception thrown by the step. It will cause the step to be
|
788
|
+
retried if needed and the exception will be caught by the `@catch` decorator, if present.
|
789
|
+
|
790
|
+
Note that all the values specified in parameters are added together so if you specify
|
791
|
+
60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
|
831
792
|
|
832
793
|
|
833
794
|
Parameters
|
834
795
|
----------
|
835
|
-
|
836
|
-
|
837
|
-
|
838
|
-
|
839
|
-
|
840
|
-
|
796
|
+
seconds : int, default 0
|
797
|
+
Number of seconds to wait prior to timing out.
|
798
|
+
minutes : int, default 0
|
799
|
+
Number of minutes to wait prior to timing out.
|
800
|
+
hours : int, default 0
|
801
|
+
Number of hours to wait prior to timing out.
|
841
802
|
"""
|
842
803
|
...
|
843
804
|
|
844
805
|
@typing.overload
|
845
|
-
def
|
806
|
+
def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
846
807
|
...
|
847
808
|
|
848
809
|
@typing.overload
|
849
|
-
def
|
810
|
+
def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
850
811
|
...
|
851
812
|
|
852
|
-
def
|
813
|
+
def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
|
853
814
|
"""
|
854
|
-
Specifies
|
815
|
+
Specifies a timeout for your step.
|
855
816
|
|
856
|
-
|
857
|
-
|
858
|
-
|
859
|
-
|
817
|
+
This decorator is useful if this step may hang indefinitely.
|
818
|
+
|
819
|
+
This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
|
820
|
+
A timeout is considered to be an exception thrown by the step. It will cause the step to be
|
821
|
+
retried if needed and the exception will be caught by the `@catch` decorator, if present.
|
822
|
+
|
823
|
+
Note that all the values specified in parameters are added together so if you specify
|
824
|
+
60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
|
860
825
|
|
861
826
|
|
862
827
|
Parameters
|
863
828
|
----------
|
864
|
-
|
865
|
-
|
866
|
-
|
867
|
-
|
868
|
-
|
869
|
-
|
829
|
+
seconds : int, default 0
|
830
|
+
Number of seconds to wait prior to timing out.
|
831
|
+
minutes : int, default 0
|
832
|
+
Number of minutes to wait prior to timing out.
|
833
|
+
hours : int, default 0
|
834
|
+
Number of hours to wait prior to timing out.
|
870
835
|
"""
|
871
836
|
...
|
872
837
|
|
873
|
-
|
838
|
+
@typing.overload
|
839
|
+
def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
874
840
|
"""
|
875
|
-
Specifies
|
876
|
-
|
877
|
-
A project-specific namespace is created for all flows that
|
878
|
-
use the same `@project(name)`.
|
841
|
+
Specifies secrets to be retrieved and injected as environment variables prior to
|
842
|
+
the execution of a step.
|
879
843
|
|
880
844
|
|
881
845
|
Parameters
|
882
846
|
----------
|
883
|
-
|
884
|
-
|
885
|
-
|
886
|
-
|
847
|
+
sources : List[Union[str, Dict[str, Any]]], default: []
|
848
|
+
List of secret specs, defining how the secrets are to be retrieved
|
849
|
+
"""
|
850
|
+
...
|
851
|
+
|
852
|
+
@typing.overload
|
853
|
+
def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
854
|
+
...
|
855
|
+
|
856
|
+
@typing.overload
|
857
|
+
def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
858
|
+
...
|
859
|
+
|
860
|
+
def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
|
861
|
+
"""
|
862
|
+
Specifies secrets to be retrieved and injected as environment variables prior to
|
863
|
+
the execution of a step.
|
887
864
|
|
888
|
-
branch : Optional[str], default None
|
889
|
-
The branch to use. If not specified, the branch is set to
|
890
|
-
`user.<username>` unless `production` is set to `True`. This can
|
891
|
-
also be set on the command line using `--branch` as a top-level option.
|
892
|
-
It is an error to specify `branch` in the decorator and on the command line.
|
893
865
|
|
894
|
-
|
895
|
-
|
896
|
-
|
897
|
-
|
898
|
-
The project branch name will be:
|
899
|
-
- if `branch` is specified:
|
900
|
-
- if `production` is True: `prod.<branch>`
|
901
|
-
- if `production` is False: `test.<branch>`
|
902
|
-
- if `branch` is not specified:
|
903
|
-
- if `production` is True: `prod`
|
904
|
-
- if `production` is False: `user.<username>`
|
866
|
+
Parameters
|
867
|
+
----------
|
868
|
+
sources : List[Union[str, Dict[str, Any]]], default: []
|
869
|
+
List of secret specs, defining how the secrets are to be retrieved
|
905
870
|
"""
|
906
871
|
...
|
907
872
|
|
908
|
-
|
873
|
+
@typing.overload
|
874
|
+
def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
909
875
|
"""
|
910
|
-
|
911
|
-
|
876
|
+
Specifies the times when the flow should be run when running on a
|
877
|
+
production scheduler.
|
912
878
|
|
913
879
|
|
914
880
|
Parameters
|
915
881
|
----------
|
916
|
-
|
917
|
-
|
918
|
-
|
919
|
-
|
920
|
-
|
921
|
-
|
922
|
-
|
923
|
-
|
924
|
-
|
925
|
-
|
926
|
-
|
927
|
-
|
928
|
-
Set to true to mark the task as SKIPPED on failure. (Default: False)
|
929
|
-
name : str
|
930
|
-
Name of the sensor on Airflow
|
931
|
-
description : str
|
932
|
-
Description of sensor in the Airflow UI
|
933
|
-
external_dag_id : str
|
934
|
-
The dag_id that contains the task you want to wait for.
|
935
|
-
external_task_ids : List[str]
|
936
|
-
The list of task_ids that you want to wait for.
|
937
|
-
If None (default value) the sensor waits for the DAG. (Default: None)
|
938
|
-
allowed_states : List[str]
|
939
|
-
Iterable of allowed states, (Default: ['success'])
|
940
|
-
failed_states : List[str]
|
941
|
-
Iterable of failed or dis-allowed states. (Default: None)
|
942
|
-
execution_delta : datetime.timedelta
|
943
|
-
time difference with the previous execution to look at,
|
944
|
-
the default is the same logical date as the current task or DAG. (Default: None)
|
945
|
-
check_existence: bool
|
946
|
-
Set to True to check if the external task exists or check if
|
947
|
-
the DAG to wait for exists. (Default: True)
|
882
|
+
hourly : bool, default False
|
883
|
+
Run the workflow hourly.
|
884
|
+
daily : bool, default True
|
885
|
+
Run the workflow daily.
|
886
|
+
weekly : bool, default False
|
887
|
+
Run the workflow weekly.
|
888
|
+
cron : str, optional, default None
|
889
|
+
Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
|
890
|
+
specified by this expression.
|
891
|
+
timezone : str, optional, default None
|
892
|
+
Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
|
893
|
+
which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
|
948
894
|
"""
|
949
895
|
...
|
950
896
|
|
951
897
|
@typing.overload
|
952
|
-
def
|
898
|
+
def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
899
|
+
...
|
900
|
+
|
901
|
+
def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
|
953
902
|
"""
|
954
|
-
Specifies the
|
903
|
+
Specifies the times when the flow should be run when running on a
|
904
|
+
production scheduler.
|
955
905
|
|
956
|
-
Use `@pypi_base` to set common packages required by all
|
957
|
-
steps and use `@pypi` to specify step-specific overrides.
|
958
906
|
|
959
907
|
Parameters
|
960
908
|
----------
|
961
|
-
|
962
|
-
|
963
|
-
|
964
|
-
|
965
|
-
|
966
|
-
|
909
|
+
hourly : bool, default False
|
910
|
+
Run the workflow hourly.
|
911
|
+
daily : bool, default True
|
912
|
+
Run the workflow daily.
|
913
|
+
weekly : bool, default False
|
914
|
+
Run the workflow weekly.
|
915
|
+
cron : str, optional, default None
|
916
|
+
Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
|
917
|
+
specified by this expression.
|
918
|
+
timezone : str, optional, default None
|
919
|
+
Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
|
920
|
+
which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
|
967
921
|
"""
|
968
922
|
...
|
969
923
|
|
970
924
|
@typing.overload
|
971
|
-
def
|
925
|
+
def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
926
|
+
"""
|
927
|
+
Specifies the flow(s) that this flow depends on.
|
928
|
+
|
929
|
+
```
|
930
|
+
@trigger_on_finish(flow='FooFlow')
|
931
|
+
```
|
932
|
+
or
|
933
|
+
```
|
934
|
+
@trigger_on_finish(flows=['FooFlow', 'BarFlow'])
|
935
|
+
```
|
936
|
+
This decorator respects the @project decorator and triggers the flow
|
937
|
+
when upstream runs within the same namespace complete successfully
|
938
|
+
|
939
|
+
Additionally, you can specify project aware upstream flow dependencies
|
940
|
+
by specifying the fully qualified project_flow_name.
|
941
|
+
```
|
942
|
+
@trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
|
943
|
+
```
|
944
|
+
or
|
945
|
+
```
|
946
|
+
@trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
|
947
|
+
```
|
948
|
+
|
949
|
+
You can also specify just the project or project branch (other values will be
|
950
|
+
inferred from the current project or project branch):
|
951
|
+
```
|
952
|
+
@trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
|
953
|
+
```
|
954
|
+
|
955
|
+
Note that `branch` is typically one of:
|
956
|
+
- `prod`
|
957
|
+
- `user.bob`
|
958
|
+
- `test.my_experiment`
|
959
|
+
- `prod.staging`
|
960
|
+
|
961
|
+
|
962
|
+
Parameters
|
963
|
+
----------
|
964
|
+
flow : Union[str, Dict[str, str]], optional, default None
|
965
|
+
Upstream flow dependency for this flow.
|
966
|
+
flows : List[Union[str, Dict[str, str]]], default []
|
967
|
+
Upstream flow dependencies for this flow.
|
968
|
+
options : Dict[str, Any], default {}
|
969
|
+
Backend-specific configuration for tuning eventing behavior.
|
970
|
+
"""
|
972
971
|
...
|
973
972
|
|
974
|
-
|
973
|
+
@typing.overload
|
974
|
+
def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
975
|
+
...
|
976
|
+
|
977
|
+
def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
|
975
978
|
"""
|
976
|
-
Specifies the
|
979
|
+
Specifies the flow(s) that this flow depends on.
|
980
|
+
|
981
|
+
```
|
982
|
+
@trigger_on_finish(flow='FooFlow')
|
983
|
+
```
|
984
|
+
or
|
985
|
+
```
|
986
|
+
@trigger_on_finish(flows=['FooFlow', 'BarFlow'])
|
987
|
+
```
|
988
|
+
This decorator respects the @project decorator and triggers the flow
|
989
|
+
when upstream runs within the same namespace complete successfully
|
990
|
+
|
991
|
+
Additionally, you can specify project aware upstream flow dependencies
|
992
|
+
by specifying the fully qualified project_flow_name.
|
993
|
+
```
|
994
|
+
@trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
|
995
|
+
```
|
996
|
+
or
|
997
|
+
```
|
998
|
+
@trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
|
999
|
+
```
|
1000
|
+
|
1001
|
+
You can also specify just the project or project branch (other values will be
|
1002
|
+
inferred from the current project or project branch):
|
1003
|
+
```
|
1004
|
+
@trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
|
1005
|
+
```
|
1006
|
+
|
1007
|
+
Note that `branch` is typically one of:
|
1008
|
+
- `prod`
|
1009
|
+
- `user.bob`
|
1010
|
+
- `test.my_experiment`
|
1011
|
+
- `prod.staging`
|
977
1012
|
|
978
|
-
Use `@pypi_base` to set common packages required by all
|
979
|
-
steps and use `@pypi` to specify step-specific overrides.
|
980
1013
|
|
981
1014
|
Parameters
|
982
1015
|
----------
|
983
|
-
|
984
|
-
|
985
|
-
|
986
|
-
|
987
|
-
|
988
|
-
|
1016
|
+
flow : Union[str, Dict[str, str]], optional, default None
|
1017
|
+
Upstream flow dependency for this flow.
|
1018
|
+
flows : List[Union[str, Dict[str, str]]], default []
|
1019
|
+
Upstream flow dependencies for this flow.
|
1020
|
+
options : Dict[str, Any], default {}
|
1021
|
+
Backend-specific configuration for tuning eventing behavior.
|
989
1022
|
"""
|
990
1023
|
...
|
991
1024
|
|
@@ -1040,6 +1073,49 @@ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packa
|
|
1040
1073
|
"""
|
1041
1074
|
...
|
1042
1075
|
|
1076
|
+
def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1077
|
+
"""
|
1078
|
+
The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
|
1079
|
+
This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
|
1080
|
+
|
1081
|
+
|
1082
|
+
Parameters
|
1083
|
+
----------
|
1084
|
+
timeout : int
|
1085
|
+
Time, in seconds before the task times out and fails. (Default: 3600)
|
1086
|
+
poke_interval : int
|
1087
|
+
Time in seconds that the job should wait in between each try. (Default: 60)
|
1088
|
+
mode : str
|
1089
|
+
How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
|
1090
|
+
exponential_backoff : bool
|
1091
|
+
allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
|
1092
|
+
pool : str
|
1093
|
+
the slot pool this task should run in,
|
1094
|
+
slot pools are a way to limit concurrency for certain tasks. (Default:None)
|
1095
|
+
soft_fail : bool
|
1096
|
+
Set to true to mark the task as SKIPPED on failure. (Default: False)
|
1097
|
+
name : str
|
1098
|
+
Name of the sensor on Airflow
|
1099
|
+
description : str
|
1100
|
+
Description of sensor in the Airflow UI
|
1101
|
+
external_dag_id : str
|
1102
|
+
The dag_id that contains the task you want to wait for.
|
1103
|
+
external_task_ids : List[str]
|
1104
|
+
The list of task_ids that you want to wait for.
|
1105
|
+
If None (default value) the sensor waits for the DAG. (Default: None)
|
1106
|
+
allowed_states : List[str]
|
1107
|
+
Iterable of allowed states, (Default: ['success'])
|
1108
|
+
failed_states : List[str]
|
1109
|
+
Iterable of failed or dis-allowed states. (Default: None)
|
1110
|
+
execution_delta : datetime.timedelta
|
1111
|
+
time difference with the previous execution to look at,
|
1112
|
+
the default is the same logical date as the current task or DAG. (Default: None)
|
1113
|
+
check_existence: bool
|
1114
|
+
Set to True to check if the external task exists or check if
|
1115
|
+
the DAG to wait for exists. (Default: True)
|
1116
|
+
"""
|
1117
|
+
...
|
1118
|
+
|
1043
1119
|
def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1044
1120
|
"""
|
1045
1121
|
The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
|
@@ -1083,6 +1159,82 @@ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, expone
|
|
1083
1159
|
"""
|
1084
1160
|
...
|
1085
1161
|
|
1162
|
+
@typing.overload
|
1163
|
+
def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1164
|
+
"""
|
1165
|
+
Specifies the PyPI packages for all steps of the flow.
|
1166
|
+
|
1167
|
+
Use `@pypi_base` to set common packages required by all
|
1168
|
+
steps and use `@pypi` to specify step-specific overrides.
|
1169
|
+
|
1170
|
+
Parameters
|
1171
|
+
----------
|
1172
|
+
packages : Dict[str, str], default: {}
|
1173
|
+
Packages to use for this flow. The key is the name of the package
|
1174
|
+
and the value is the version to use.
|
1175
|
+
python : str, optional, default: None
|
1176
|
+
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
1177
|
+
that the version used will correspond to the version of the Python interpreter used to start the run.
|
1178
|
+
"""
|
1179
|
+
...
|
1180
|
+
|
1181
|
+
@typing.overload
|
1182
|
+
def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
1183
|
+
...
|
1184
|
+
|
1185
|
+
def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
|
1186
|
+
"""
|
1187
|
+
Specifies the PyPI packages for all steps of the flow.
|
1188
|
+
|
1189
|
+
Use `@pypi_base` to set common packages required by all
|
1190
|
+
steps and use `@pypi` to specify step-specific overrides.
|
1191
|
+
|
1192
|
+
Parameters
|
1193
|
+
----------
|
1194
|
+
packages : Dict[str, str], default: {}
|
1195
|
+
Packages to use for this flow. The key is the name of the package
|
1196
|
+
and the value is the version to use.
|
1197
|
+
python : str, optional, default: None
|
1198
|
+
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
1199
|
+
that the version used will correspond to the version of the Python interpreter used to start the run.
|
1200
|
+
"""
|
1201
|
+
...
|
1202
|
+
|
1203
|
+
def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1204
|
+
"""
|
1205
|
+
Specifies what flows belong to the same project.
|
1206
|
+
|
1207
|
+
A project-specific namespace is created for all flows that
|
1208
|
+
use the same `@project(name)`.
|
1209
|
+
|
1210
|
+
|
1211
|
+
Parameters
|
1212
|
+
----------
|
1213
|
+
name : str
|
1214
|
+
Project name. Make sure that the name is unique amongst all
|
1215
|
+
projects that use the same production scheduler. The name may
|
1216
|
+
contain only lowercase alphanumeric characters and underscores.
|
1217
|
+
|
1218
|
+
branch : Optional[str], default None
|
1219
|
+
The branch to use. If not specified, the branch is set to
|
1220
|
+
`user.<username>` unless `production` is set to `True`. This can
|
1221
|
+
also be set on the command line using `--branch` as a top-level option.
|
1222
|
+
It is an error to specify `branch` in the decorator and on the command line.
|
1223
|
+
|
1224
|
+
production : bool, default False
|
1225
|
+
Whether or not the branch is the production branch. This can also be set on the
|
1226
|
+
command line using `--production` as a top-level option. It is an error to specify
|
1227
|
+
`production` in the decorator and on the command line.
|
1228
|
+
The project branch name will be:
|
1229
|
+
- if `branch` is specified:
|
1230
|
+
- if `production` is True: `prod.<branch>`
|
1231
|
+
- if `production` is False: `test.<branch>`
|
1232
|
+
- if `branch` is not specified:
|
1233
|
+
- if `production` is True: `prod`
|
1234
|
+
- if `production` is False: `user.<username>`
|
1235
|
+
"""
|
1236
|
+
...
|
1237
|
+
|
1086
1238
|
@typing.overload
|
1087
1239
|
def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1088
1240
|
"""
|
@@ -1176,155 +1328,3 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
|
|
1176
1328
|
"""
|
1177
1329
|
...
|
1178
1330
|
|
1179
|
-
@typing.overload
|
1180
|
-
def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1181
|
-
"""
|
1182
|
-
Specifies the flow(s) that this flow depends on.
|
1183
|
-
|
1184
|
-
```
|
1185
|
-
@trigger_on_finish(flow='FooFlow')
|
1186
|
-
```
|
1187
|
-
or
|
1188
|
-
```
|
1189
|
-
@trigger_on_finish(flows=['FooFlow', 'BarFlow'])
|
1190
|
-
```
|
1191
|
-
This decorator respects the @project decorator and triggers the flow
|
1192
|
-
when upstream runs within the same namespace complete successfully
|
1193
|
-
|
1194
|
-
Additionally, you can specify project aware upstream flow dependencies
|
1195
|
-
by specifying the fully qualified project_flow_name.
|
1196
|
-
```
|
1197
|
-
@trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
|
1198
|
-
```
|
1199
|
-
or
|
1200
|
-
```
|
1201
|
-
@trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
|
1202
|
-
```
|
1203
|
-
|
1204
|
-
You can also specify just the project or project branch (other values will be
|
1205
|
-
inferred from the current project or project branch):
|
1206
|
-
```
|
1207
|
-
@trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
|
1208
|
-
```
|
1209
|
-
|
1210
|
-
Note that `branch` is typically one of:
|
1211
|
-
- `prod`
|
1212
|
-
- `user.bob`
|
1213
|
-
- `test.my_experiment`
|
1214
|
-
- `prod.staging`
|
1215
|
-
|
1216
|
-
|
1217
|
-
Parameters
|
1218
|
-
----------
|
1219
|
-
flow : Union[str, Dict[str, str]], optional, default None
|
1220
|
-
Upstream flow dependency for this flow.
|
1221
|
-
flows : List[Union[str, Dict[str, str]]], default []
|
1222
|
-
Upstream flow dependencies for this flow.
|
1223
|
-
options : Dict[str, Any], default {}
|
1224
|
-
Backend-specific configuration for tuning eventing behavior.
|
1225
|
-
"""
|
1226
|
-
...
|
1227
|
-
|
1228
|
-
@typing.overload
|
1229
|
-
def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
1230
|
-
...
|
1231
|
-
|
1232
|
-
def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
|
1233
|
-
"""
|
1234
|
-
Specifies the flow(s) that this flow depends on.
|
1235
|
-
|
1236
|
-
```
|
1237
|
-
@trigger_on_finish(flow='FooFlow')
|
1238
|
-
```
|
1239
|
-
or
|
1240
|
-
```
|
1241
|
-
@trigger_on_finish(flows=['FooFlow', 'BarFlow'])
|
1242
|
-
```
|
1243
|
-
This decorator respects the @project decorator and triggers the flow
|
1244
|
-
when upstream runs within the same namespace complete successfully
|
1245
|
-
|
1246
|
-
Additionally, you can specify project aware upstream flow dependencies
|
1247
|
-
by specifying the fully qualified project_flow_name.
|
1248
|
-
```
|
1249
|
-
@trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
|
1250
|
-
```
|
1251
|
-
or
|
1252
|
-
```
|
1253
|
-
@trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
|
1254
|
-
```
|
1255
|
-
|
1256
|
-
You can also specify just the project or project branch (other values will be
|
1257
|
-
inferred from the current project or project branch):
|
1258
|
-
```
|
1259
|
-
@trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
|
1260
|
-
```
|
1261
|
-
|
1262
|
-
Note that `branch` is typically one of:
|
1263
|
-
- `prod`
|
1264
|
-
- `user.bob`
|
1265
|
-
- `test.my_experiment`
|
1266
|
-
- `prod.staging`
|
1267
|
-
|
1268
|
-
|
1269
|
-
Parameters
|
1270
|
-
----------
|
1271
|
-
flow : Union[str, Dict[str, str]], optional, default None
|
1272
|
-
Upstream flow dependency for this flow.
|
1273
|
-
flows : List[Union[str, Dict[str, str]]], default []
|
1274
|
-
Upstream flow dependencies for this flow.
|
1275
|
-
options : Dict[str, Any], default {}
|
1276
|
-
Backend-specific configuration for tuning eventing behavior.
|
1277
|
-
"""
|
1278
|
-
...
|
1279
|
-
|
1280
|
-
@typing.overload
|
1281
|
-
def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1282
|
-
"""
|
1283
|
-
Specifies the times when the flow should be run when running on a
|
1284
|
-
production scheduler.
|
1285
|
-
|
1286
|
-
|
1287
|
-
Parameters
|
1288
|
-
----------
|
1289
|
-
hourly : bool, default False
|
1290
|
-
Run the workflow hourly.
|
1291
|
-
daily : bool, default True
|
1292
|
-
Run the workflow daily.
|
1293
|
-
weekly : bool, default False
|
1294
|
-
Run the workflow weekly.
|
1295
|
-
cron : str, optional, default None
|
1296
|
-
Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
|
1297
|
-
specified by this expression.
|
1298
|
-
timezone : str, optional, default None
|
1299
|
-
Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
|
1300
|
-
which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
|
1301
|
-
"""
|
1302
|
-
...
|
1303
|
-
|
1304
|
-
@typing.overload
|
1305
|
-
def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
1306
|
-
...
|
1307
|
-
|
1308
|
-
def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
|
1309
|
-
"""
|
1310
|
-
Specifies the times when the flow should be run when running on a
|
1311
|
-
production scheduler.
|
1312
|
-
|
1313
|
-
|
1314
|
-
Parameters
|
1315
|
-
----------
|
1316
|
-
hourly : bool, default False
|
1317
|
-
Run the workflow hourly.
|
1318
|
-
daily : bool, default True
|
1319
|
-
Run the workflow daily.
|
1320
|
-
weekly : bool, default False
|
1321
|
-
Run the workflow weekly.
|
1322
|
-
cron : str, optional, default None
|
1323
|
-
Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
|
1324
|
-
specified by this expression.
|
1325
|
-
timezone : str, optional, default None
|
1326
|
-
Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
|
1327
|
-
which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
|
1328
|
-
"""
|
1329
|
-
...
|
1330
|
-
|