metaflow-stubs 2.15.6__py2.py3-none-any.whl → 2.15.7__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- metaflow-stubs/__init__.pyi +499 -499
- metaflow-stubs/cards.pyi +2 -2
- metaflow-stubs/cli.pyi +2 -2
- metaflow-stubs/cli_components/__init__.pyi +2 -2
- metaflow-stubs/cli_components/utils.pyi +2 -2
- metaflow-stubs/client/__init__.pyi +2 -2
- metaflow-stubs/client/core.pyi +7 -7
- metaflow-stubs/client/filecache.pyi +3 -3
- metaflow-stubs/events.pyi +2 -2
- metaflow-stubs/exception.pyi +2 -2
- metaflow-stubs/flowspec.pyi +5 -5
- metaflow-stubs/generated_for.txt +1 -1
- metaflow-stubs/includefile.pyi +4 -4
- metaflow-stubs/info_file.pyi +2 -2
- metaflow-stubs/metadata_provider/__init__.pyi +2 -2
- metaflow-stubs/metadata_provider/heartbeat.pyi +2 -2
- metaflow-stubs/metadata_provider/metadata.pyi +2 -2
- metaflow-stubs/metadata_provider/util.pyi +2 -2
- metaflow-stubs/metaflow_config.pyi +2 -2
- metaflow-stubs/metaflow_current.pyi +18 -18
- metaflow-stubs/multicore_utils.pyi +2 -2
- metaflow-stubs/parameters.pyi +4 -4
- metaflow-stubs/plugins/__init__.pyi +14 -14
- metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
- metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
- metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
- metaflow-stubs/plugins/airflow/exception.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
- metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
- metaflow-stubs/plugins/argo/__init__.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
- metaflow-stubs/plugins/argo/argo_workflows.pyi +3 -3
- metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +3 -3
- metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +3 -3
- metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +4 -4
- metaflow-stubs/plugins/aws/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
- metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
- metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
- metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +4 -4
- metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
- metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +3 -3
- metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +2 -2
- metaflow-stubs/plugins/azure/__init__.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
- metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +4 -4
- metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
- metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
- metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
- metaflow-stubs/plugins/cards/__init__.pyi +2 -2
- metaflow-stubs/plugins/cards/card_client.pyi +2 -2
- metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
- metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
- metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/basic.pyi +3 -3
- metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/components.pyi +3 -3
- metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
- metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
- metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
- metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
- metaflow-stubs/plugins/cards/exception.pyi +2 -2
- metaflow-stubs/plugins/catch_decorator.pyi +3 -3
- metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
- metaflow-stubs/plugins/datatools/local.pyi +2 -2
- metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
- metaflow-stubs/plugins/datatools/s3/s3.pyi +4 -4
- metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
- metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
- metaflow-stubs/plugins/debug_logger.pyi +2 -2
- metaflow-stubs/plugins/debug_monitor.pyi +2 -2
- metaflow-stubs/plugins/environment_decorator.pyi +2 -2
- metaflow-stubs/plugins/events_decorator.pyi +2 -2
- metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
- metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
- metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
- metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +4 -4
- metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
- metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
- metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
- metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kube_utils.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
- metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +2 -2
- metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
- metaflow-stubs/plugins/project_decorator.pyi +2 -2
- metaflow-stubs/plugins/pypi/__init__.pyi +3 -3
- metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
- metaflow-stubs/plugins/pypi/conda_environment.pyi +5 -5
- metaflow-stubs/plugins/pypi/parsers.pyi +2 -2
- metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
- metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
- metaflow-stubs/plugins/pypi/utils.pyi +2 -2
- metaflow-stubs/plugins/resources_decorator.pyi +2 -2
- metaflow-stubs/plugins/retry_decorator.pyi +2 -2
- metaflow-stubs/plugins/secrets/__init__.pyi +3 -3
- metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +2 -2
- metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
- metaflow-stubs/plugins/storage_executor.pyi +2 -2
- metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +3 -3
- metaflow-stubs/plugins/timeout_decorator.pyi +3 -3
- metaflow-stubs/pylint_wrapper.pyi +2 -2
- metaflow-stubs/runner/__init__.pyi +2 -2
- metaflow-stubs/runner/deployer.pyi +29 -29
- metaflow-stubs/runner/deployer_impl.pyi +2 -2
- metaflow-stubs/runner/metaflow_runner.pyi +4 -4
- metaflow-stubs/runner/nbdeploy.pyi +2 -2
- metaflow-stubs/runner/nbrun.pyi +2 -2
- metaflow-stubs/runner/subprocess_manager.pyi +2 -2
- metaflow-stubs/runner/utils.pyi +4 -4
- metaflow-stubs/system/__init__.pyi +2 -2
- metaflow-stubs/system/system_logger.pyi +2 -2
- metaflow-stubs/system/system_monitor.pyi +2 -2
- metaflow-stubs/tagging_util.pyi +2 -2
- metaflow-stubs/tuple_util.pyi +2 -2
- metaflow-stubs/user_configs/__init__.pyi +2 -2
- metaflow-stubs/user_configs/config_decorators.pyi +6 -6
- metaflow-stubs/user_configs/config_options.pyi +3 -3
- metaflow-stubs/user_configs/config_parameters.pyi +6 -6
- metaflow-stubs/version.pyi +2 -2
- {metaflow_stubs-2.15.6.dist-info → metaflow_stubs-2.15.7.dist-info}/METADATA +3 -3
- metaflow_stubs-2.15.7.dist-info/RECORD +146 -0
- {metaflow_stubs-2.15.6.dist-info → metaflow_stubs-2.15.7.dist-info}/WHEEL +1 -1
- metaflow_stubs-2.15.6.dist-info/RECORD +0 -146
- {metaflow_stubs-2.15.6.dist-info → metaflow_stubs-2.15.7.dist-info}/top_level.txt +0 -0
metaflow-stubs/__init__.pyi
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
######################################################################################################
|
2
2
|
# Auto-generated Metaflow stub file #
|
3
|
-
# MF version: 2.15.
|
4
|
-
# Generated on 2025-03-
|
3
|
+
# MF version: 2.15.7 #
|
4
|
+
# Generated on 2025-03-29T00:30:30.645150 #
|
5
5
|
######################################################################################################
|
6
6
|
|
7
7
|
from __future__ import annotations
|
@@ -42,8 +42,8 @@ from . import plugins as plugins
|
|
42
42
|
from .plugins.datatools.s3.s3 import S3 as S3
|
43
43
|
from . import includefile as includefile
|
44
44
|
from .includefile import IncludeFile as IncludeFile
|
45
|
-
from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
|
46
45
|
from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
|
46
|
+
from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
|
47
47
|
from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
|
48
48
|
from . import cards as cards
|
49
49
|
from . import client as client
|
@@ -145,6 +145,139 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
|
|
145
145
|
"""
|
146
146
|
...
|
147
147
|
|
148
|
+
@typing.overload
|
149
|
+
def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
150
|
+
"""
|
151
|
+
Decorator prototype for all step decorators. This function gets specialized
|
152
|
+
and imported for all decorators types by _import_plugin_decorators().
|
153
|
+
"""
|
154
|
+
...
|
155
|
+
|
156
|
+
@typing.overload
|
157
|
+
def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
158
|
+
...
|
159
|
+
|
160
|
+
def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
|
161
|
+
"""
|
162
|
+
Decorator prototype for all step decorators. This function gets specialized
|
163
|
+
and imported for all decorators types by _import_plugin_decorators().
|
164
|
+
"""
|
165
|
+
...
|
166
|
+
|
167
|
+
@typing.overload
|
168
|
+
def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
169
|
+
"""
|
170
|
+
Specifies a timeout for your step.
|
171
|
+
|
172
|
+
This decorator is useful if this step may hang indefinitely.
|
173
|
+
|
174
|
+
This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
|
175
|
+
A timeout is considered to be an exception thrown by the step. It will cause the step to be
|
176
|
+
retried if needed and the exception will be caught by the `@catch` decorator, if present.
|
177
|
+
|
178
|
+
Note that all the values specified in parameters are added together so if you specify
|
179
|
+
60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
|
180
|
+
|
181
|
+
|
182
|
+
Parameters
|
183
|
+
----------
|
184
|
+
seconds : int, default 0
|
185
|
+
Number of seconds to wait prior to timing out.
|
186
|
+
minutes : int, default 0
|
187
|
+
Number of minutes to wait prior to timing out.
|
188
|
+
hours : int, default 0
|
189
|
+
Number of hours to wait prior to timing out.
|
190
|
+
"""
|
191
|
+
...
|
192
|
+
|
193
|
+
@typing.overload
|
194
|
+
def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
195
|
+
...
|
196
|
+
|
197
|
+
@typing.overload
|
198
|
+
def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
199
|
+
...
|
200
|
+
|
201
|
+
def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
|
202
|
+
"""
|
203
|
+
Specifies a timeout for your step.
|
204
|
+
|
205
|
+
This decorator is useful if this step may hang indefinitely.
|
206
|
+
|
207
|
+
This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
|
208
|
+
A timeout is considered to be an exception thrown by the step. It will cause the step to be
|
209
|
+
retried if needed and the exception will be caught by the `@catch` decorator, if present.
|
210
|
+
|
211
|
+
Note that all the values specified in parameters are added together so if you specify
|
212
|
+
60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
|
213
|
+
|
214
|
+
|
215
|
+
Parameters
|
216
|
+
----------
|
217
|
+
seconds : int, default 0
|
218
|
+
Number of seconds to wait prior to timing out.
|
219
|
+
minutes : int, default 0
|
220
|
+
Number of minutes to wait prior to timing out.
|
221
|
+
hours : int, default 0
|
222
|
+
Number of hours to wait prior to timing out.
|
223
|
+
"""
|
224
|
+
...
|
225
|
+
|
226
|
+
@typing.overload
|
227
|
+
def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
228
|
+
"""
|
229
|
+
Specifies the number of times the task corresponding
|
230
|
+
to a step needs to be retried.
|
231
|
+
|
232
|
+
This decorator is useful for handling transient errors, such as networking issues.
|
233
|
+
If your task contains operations that can't be retried safely, e.g. database updates,
|
234
|
+
it is advisable to annotate it with `@retry(times=0)`.
|
235
|
+
|
236
|
+
This can be used in conjunction with the `@catch` decorator. The `@catch`
|
237
|
+
decorator will execute a no-op task after all retries have been exhausted,
|
238
|
+
ensuring that the flow execution can continue.
|
239
|
+
|
240
|
+
|
241
|
+
Parameters
|
242
|
+
----------
|
243
|
+
times : int, default 3
|
244
|
+
Number of times to retry this task.
|
245
|
+
minutes_between_retries : int, default 2
|
246
|
+
Number of minutes between retries.
|
247
|
+
"""
|
248
|
+
...
|
249
|
+
|
250
|
+
@typing.overload
|
251
|
+
def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
252
|
+
...
|
253
|
+
|
254
|
+
@typing.overload
|
255
|
+
def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
256
|
+
...
|
257
|
+
|
258
|
+
def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
|
259
|
+
"""
|
260
|
+
Specifies the number of times the task corresponding
|
261
|
+
to a step needs to be retried.
|
262
|
+
|
263
|
+
This decorator is useful for handling transient errors, such as networking issues.
|
264
|
+
If your task contains operations that can't be retried safely, e.g. database updates,
|
265
|
+
it is advisable to annotate it with `@retry(times=0)`.
|
266
|
+
|
267
|
+
This can be used in conjunction with the `@catch` decorator. The `@catch`
|
268
|
+
decorator will execute a no-op task after all retries have been exhausted,
|
269
|
+
ensuring that the flow execution can continue.
|
270
|
+
|
271
|
+
|
272
|
+
Parameters
|
273
|
+
----------
|
274
|
+
times : int, default 3
|
275
|
+
Number of times to retry this task.
|
276
|
+
minutes_between_retries : int, default 2
|
277
|
+
Number of minutes between retries.
|
278
|
+
"""
|
279
|
+
...
|
280
|
+
|
148
281
|
@typing.overload
|
149
282
|
def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
150
283
|
"""
|
@@ -295,202 +428,112 @@ def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
|
|
295
428
|
...
|
296
429
|
|
297
430
|
@typing.overload
|
298
|
-
def
|
431
|
+
def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
299
432
|
"""
|
300
|
-
Specifies the
|
301
|
-
to a step needs to be retried.
|
302
|
-
|
303
|
-
This decorator is useful for handling transient errors, such as networking issues.
|
304
|
-
If your task contains operations that can't be retried safely, e.g. database updates,
|
305
|
-
it is advisable to annotate it with `@retry(times=0)`.
|
433
|
+
Specifies the Conda environment for the step.
|
306
434
|
|
307
|
-
|
308
|
-
|
309
|
-
|
435
|
+
Information in this decorator will augment any
|
436
|
+
attributes set in the `@conda_base` flow-level decorator. Hence,
|
437
|
+
you can use `@conda_base` to set packages required by all
|
438
|
+
steps and use `@conda` to specify step-specific overrides.
|
310
439
|
|
311
440
|
|
312
441
|
Parameters
|
313
442
|
----------
|
314
|
-
|
315
|
-
|
316
|
-
|
317
|
-
|
443
|
+
packages : Dict[str, str], default {}
|
444
|
+
Packages to use for this step. The key is the name of the package
|
445
|
+
and the value is the version to use.
|
446
|
+
libraries : Dict[str, str], default {}
|
447
|
+
Supported for backward compatibility. When used with packages, packages will take precedence.
|
448
|
+
python : str, optional, default None
|
449
|
+
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
450
|
+
that the version used will correspond to the version of the Python interpreter used to start the run.
|
451
|
+
disabled : bool, default False
|
452
|
+
If set to True, disables @conda.
|
318
453
|
"""
|
319
454
|
...
|
320
455
|
|
321
456
|
@typing.overload
|
322
|
-
def
|
457
|
+
def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
323
458
|
...
|
324
459
|
|
325
460
|
@typing.overload
|
326
|
-
def
|
461
|
+
def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
327
462
|
...
|
328
463
|
|
329
|
-
def
|
464
|
+
def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
|
330
465
|
"""
|
331
|
-
Specifies the
|
332
|
-
to a step needs to be retried.
|
333
|
-
|
334
|
-
This decorator is useful for handling transient errors, such as networking issues.
|
335
|
-
If your task contains operations that can't be retried safely, e.g. database updates,
|
336
|
-
it is advisable to annotate it with `@retry(times=0)`.
|
466
|
+
Specifies the Conda environment for the step.
|
337
467
|
|
338
|
-
|
339
|
-
|
340
|
-
|
468
|
+
Information in this decorator will augment any
|
469
|
+
attributes set in the `@conda_base` flow-level decorator. Hence,
|
470
|
+
you can use `@conda_base` to set packages required by all
|
471
|
+
steps and use `@conda` to specify step-specific overrides.
|
341
472
|
|
342
473
|
|
343
474
|
Parameters
|
344
475
|
----------
|
345
|
-
|
346
|
-
|
347
|
-
|
348
|
-
|
476
|
+
packages : Dict[str, str], default {}
|
477
|
+
Packages to use for this step. The key is the name of the package
|
478
|
+
and the value is the version to use.
|
479
|
+
libraries : Dict[str, str], default {}
|
480
|
+
Supported for backward compatibility. When used with packages, packages will take precedence.
|
481
|
+
python : str, optional, default None
|
482
|
+
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
483
|
+
that the version used will correspond to the version of the Python interpreter used to start the run.
|
484
|
+
disabled : bool, default False
|
485
|
+
If set to True, disables @conda.
|
349
486
|
"""
|
350
487
|
...
|
351
488
|
|
352
489
|
@typing.overload
|
353
|
-
def
|
490
|
+
def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
354
491
|
"""
|
355
|
-
Specifies
|
356
|
-
|
357
|
-
This decorator is useful if this step may hang indefinitely.
|
358
|
-
|
359
|
-
This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
|
360
|
-
A timeout is considered to be an exception thrown by the step. It will cause the step to be
|
361
|
-
retried if needed and the exception will be caught by the `@catch` decorator, if present.
|
492
|
+
Specifies that the step will success under all circumstances.
|
362
493
|
|
363
|
-
|
364
|
-
|
494
|
+
The decorator will create an optional artifact, specified by `var`, which
|
495
|
+
contains the exception raised. You can use it to detect the presence
|
496
|
+
of errors, indicating that all happy-path artifacts produced by the step
|
497
|
+
are missing.
|
365
498
|
|
366
499
|
|
367
500
|
Parameters
|
368
501
|
----------
|
369
|
-
|
370
|
-
|
371
|
-
|
372
|
-
|
373
|
-
|
374
|
-
|
502
|
+
var : str, optional, default None
|
503
|
+
Name of the artifact in which to store the caught exception.
|
504
|
+
If not specified, the exception is not stored.
|
505
|
+
print_exception : bool, default True
|
506
|
+
Determines whether or not the exception is printed to
|
507
|
+
stdout when caught.
|
375
508
|
"""
|
376
509
|
...
|
377
510
|
|
378
511
|
@typing.overload
|
379
|
-
def
|
512
|
+
def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
380
513
|
...
|
381
514
|
|
382
515
|
@typing.overload
|
383
|
-
def
|
516
|
+
def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
384
517
|
...
|
385
518
|
|
386
|
-
def
|
519
|
+
def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
|
387
520
|
"""
|
388
|
-
Specifies
|
389
|
-
|
390
|
-
This decorator is useful if this step may hang indefinitely.
|
391
|
-
|
392
|
-
This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
|
393
|
-
A timeout is considered to be an exception thrown by the step. It will cause the step to be
|
394
|
-
retried if needed and the exception will be caught by the `@catch` decorator, if present.
|
521
|
+
Specifies that the step will success under all circumstances.
|
395
522
|
|
396
|
-
|
397
|
-
|
523
|
+
The decorator will create an optional artifact, specified by `var`, which
|
524
|
+
contains the exception raised. You can use it to detect the presence
|
525
|
+
of errors, indicating that all happy-path artifacts produced by the step
|
526
|
+
are missing.
|
398
527
|
|
399
528
|
|
400
529
|
Parameters
|
401
530
|
----------
|
402
|
-
|
403
|
-
|
404
|
-
|
405
|
-
|
406
|
-
|
407
|
-
|
408
|
-
"""
|
409
|
-
...
|
410
|
-
|
411
|
-
@typing.overload
|
412
|
-
def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
413
|
-
"""
|
414
|
-
Specifies the PyPI packages for the step.
|
415
|
-
|
416
|
-
Information in this decorator will augment any
|
417
|
-
attributes set in the `@pyi_base` flow-level decorator. Hence,
|
418
|
-
you can use `@pypi_base` to set packages required by all
|
419
|
-
steps and use `@pypi` to specify step-specific overrides.
|
420
|
-
|
421
|
-
|
422
|
-
Parameters
|
423
|
-
----------
|
424
|
-
packages : Dict[str, str], default: {}
|
425
|
-
Packages to use for this step. The key is the name of the package
|
426
|
-
and the value is the version to use.
|
427
|
-
python : str, optional, default: None
|
428
|
-
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
429
|
-
that the version used will correspond to the version of the Python interpreter used to start the run.
|
430
|
-
"""
|
431
|
-
...
|
432
|
-
|
433
|
-
@typing.overload
|
434
|
-
def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
435
|
-
...
|
436
|
-
|
437
|
-
@typing.overload
|
438
|
-
def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
439
|
-
...
|
440
|
-
|
441
|
-
def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
|
442
|
-
"""
|
443
|
-
Specifies the PyPI packages for the step.
|
444
|
-
|
445
|
-
Information in this decorator will augment any
|
446
|
-
attributes set in the `@pyi_base` flow-level decorator. Hence,
|
447
|
-
you can use `@pypi_base` to set packages required by all
|
448
|
-
steps and use `@pypi` to specify step-specific overrides.
|
449
|
-
|
450
|
-
|
451
|
-
Parameters
|
452
|
-
----------
|
453
|
-
packages : Dict[str, str], default: {}
|
454
|
-
Packages to use for this step. The key is the name of the package
|
455
|
-
and the value is the version to use.
|
456
|
-
python : str, optional, default: None
|
457
|
-
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
458
|
-
that the version used will correspond to the version of the Python interpreter used to start the run.
|
459
|
-
"""
|
460
|
-
...
|
461
|
-
|
462
|
-
@typing.overload
|
463
|
-
def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
464
|
-
"""
|
465
|
-
Specifies secrets to be retrieved and injected as environment variables prior to
|
466
|
-
the execution of a step.
|
467
|
-
|
468
|
-
|
469
|
-
Parameters
|
470
|
-
----------
|
471
|
-
sources : List[Union[str, Dict[str, Any]]], default: []
|
472
|
-
List of secret specs, defining how the secrets are to be retrieved
|
473
|
-
"""
|
474
|
-
...
|
475
|
-
|
476
|
-
@typing.overload
|
477
|
-
def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
478
|
-
...
|
479
|
-
|
480
|
-
@typing.overload
|
481
|
-
def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
482
|
-
...
|
483
|
-
|
484
|
-
def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
|
485
|
-
"""
|
486
|
-
Specifies secrets to be retrieved and injected as environment variables prior to
|
487
|
-
the execution of a step.
|
488
|
-
|
489
|
-
|
490
|
-
Parameters
|
491
|
-
----------
|
492
|
-
sources : List[Union[str, Dict[str, Any]]], default: []
|
493
|
-
List of secret specs, defining how the secrets are to be retrieved
|
531
|
+
var : str, optional, default None
|
532
|
+
Name of the artifact in which to store the caught exception.
|
533
|
+
If not specified, the exception is not stored.
|
534
|
+
print_exception : bool, default True
|
535
|
+
Determines whether or not the exception is printed to
|
536
|
+
stdout when caught.
|
494
537
|
"""
|
495
538
|
...
|
496
539
|
|
@@ -527,104 +570,6 @@ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], Non
|
|
527
570
|
"""
|
528
571
|
...
|
529
572
|
|
530
|
-
@typing.overload
|
531
|
-
def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
532
|
-
"""
|
533
|
-
Decorator prototype for all step decorators. This function gets specialized
|
534
|
-
and imported for all decorators types by _import_plugin_decorators().
|
535
|
-
"""
|
536
|
-
...
|
537
|
-
|
538
|
-
@typing.overload
|
539
|
-
def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
540
|
-
...
|
541
|
-
|
542
|
-
def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
|
543
|
-
"""
|
544
|
-
Decorator prototype for all step decorators. This function gets specialized
|
545
|
-
and imported for all decorators types by _import_plugin_decorators().
|
546
|
-
"""
|
547
|
-
...
|
548
|
-
|
549
|
-
@typing.overload
|
550
|
-
def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
551
|
-
"""
|
552
|
-
Specifies the resources needed when executing this step.
|
553
|
-
|
554
|
-
Use `@resources` to specify the resource requirements
|
555
|
-
independently of the specific compute layer (`@batch`, `@kubernetes`).
|
556
|
-
|
557
|
-
You can choose the compute layer on the command line by executing e.g.
|
558
|
-
```
|
559
|
-
python myflow.py run --with batch
|
560
|
-
```
|
561
|
-
or
|
562
|
-
```
|
563
|
-
python myflow.py run --with kubernetes
|
564
|
-
```
|
565
|
-
which executes the flow on the desired system using the
|
566
|
-
requirements specified in `@resources`.
|
567
|
-
|
568
|
-
|
569
|
-
Parameters
|
570
|
-
----------
|
571
|
-
cpu : int, default 1
|
572
|
-
Number of CPUs required for this step.
|
573
|
-
gpu : int, optional, default None
|
574
|
-
Number of GPUs required for this step.
|
575
|
-
disk : int, optional, default None
|
576
|
-
Disk size (in MB) required for this step. Only applies on Kubernetes.
|
577
|
-
memory : int, default 4096
|
578
|
-
Memory size (in MB) required for this step.
|
579
|
-
shared_memory : int, optional, default None
|
580
|
-
The value for the size (in MiB) of the /dev/shm volume for this step.
|
581
|
-
This parameter maps to the `--shm-size` option in Docker.
|
582
|
-
"""
|
583
|
-
...
|
584
|
-
|
585
|
-
@typing.overload
|
586
|
-
def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
587
|
-
...
|
588
|
-
|
589
|
-
@typing.overload
|
590
|
-
def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
591
|
-
...
|
592
|
-
|
593
|
-
def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
|
594
|
-
"""
|
595
|
-
Specifies the resources needed when executing this step.
|
596
|
-
|
597
|
-
Use `@resources` to specify the resource requirements
|
598
|
-
independently of the specific compute layer (`@batch`, `@kubernetes`).
|
599
|
-
|
600
|
-
You can choose the compute layer on the command line by executing e.g.
|
601
|
-
```
|
602
|
-
python myflow.py run --with batch
|
603
|
-
```
|
604
|
-
or
|
605
|
-
```
|
606
|
-
python myflow.py run --with kubernetes
|
607
|
-
```
|
608
|
-
which executes the flow on the desired system using the
|
609
|
-
requirements specified in `@resources`.
|
610
|
-
|
611
|
-
|
612
|
-
Parameters
|
613
|
-
----------
|
614
|
-
cpu : int, default 1
|
615
|
-
Number of CPUs required for this step.
|
616
|
-
gpu : int, optional, default None
|
617
|
-
Number of GPUs required for this step.
|
618
|
-
disk : int, optional, default None
|
619
|
-
Disk size (in MB) required for this step. Only applies on Kubernetes.
|
620
|
-
memory : int, default 4096
|
621
|
-
Memory size (in MB) required for this step.
|
622
|
-
shared_memory : int, optional, default None
|
623
|
-
The value for the size (in MiB) of the /dev/shm volume for this step.
|
624
|
-
This parameter maps to the `--shm-size` option in Docker.
|
625
|
-
"""
|
626
|
-
...
|
627
|
-
|
628
573
|
@typing.overload
|
629
574
|
def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
630
575
|
"""
|
@@ -675,112 +620,88 @@ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
|
|
675
620
|
...
|
676
621
|
|
677
622
|
@typing.overload
|
678
|
-
def
|
623
|
+
def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
679
624
|
"""
|
680
|
-
Specifies the
|
625
|
+
Specifies the PyPI packages for the step.
|
681
626
|
|
682
627
|
Information in this decorator will augment any
|
683
|
-
attributes set in the `@
|
684
|
-
you can use `@
|
685
|
-
steps and use `@
|
628
|
+
attributes set in the `@pyi_base` flow-level decorator. Hence,
|
629
|
+
you can use `@pypi_base` to set packages required by all
|
630
|
+
steps and use `@pypi` to specify step-specific overrides.
|
686
631
|
|
687
632
|
|
688
633
|
Parameters
|
689
634
|
----------
|
690
|
-
packages : Dict[str, str], default {}
|
635
|
+
packages : Dict[str, str], default: {}
|
691
636
|
Packages to use for this step. The key is the name of the package
|
692
637
|
and the value is the version to use.
|
693
|
-
|
694
|
-
Supported for backward compatibility. When used with packages, packages will take precedence.
|
695
|
-
python : str, optional, default None
|
638
|
+
python : str, optional, default: None
|
696
639
|
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
697
640
|
that the version used will correspond to the version of the Python interpreter used to start the run.
|
698
|
-
disabled : bool, default False
|
699
|
-
If set to True, disables @conda.
|
700
641
|
"""
|
701
642
|
...
|
702
643
|
|
703
644
|
@typing.overload
|
704
|
-
def
|
645
|
+
def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
705
646
|
...
|
706
647
|
|
707
648
|
@typing.overload
|
708
|
-
def
|
649
|
+
def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
709
650
|
...
|
710
651
|
|
711
|
-
def
|
652
|
+
def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
|
712
653
|
"""
|
713
|
-
Specifies the
|
654
|
+
Specifies the PyPI packages for the step.
|
714
655
|
|
715
656
|
Information in this decorator will augment any
|
716
|
-
attributes set in the `@
|
717
|
-
you can use `@
|
718
|
-
steps and use `@
|
657
|
+
attributes set in the `@pyi_base` flow-level decorator. Hence,
|
658
|
+
you can use `@pypi_base` to set packages required by all
|
659
|
+
steps and use `@pypi` to specify step-specific overrides.
|
719
660
|
|
720
661
|
|
721
662
|
Parameters
|
722
663
|
----------
|
723
|
-
packages : Dict[str, str], default {}
|
664
|
+
packages : Dict[str, str], default: {}
|
724
665
|
Packages to use for this step. The key is the name of the package
|
725
666
|
and the value is the version to use.
|
726
|
-
|
727
|
-
Supported for backward compatibility. When used with packages, packages will take precedence.
|
728
|
-
python : str, optional, default None
|
667
|
+
python : str, optional, default: None
|
729
668
|
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
730
669
|
that the version used will correspond to the version of the Python interpreter used to start the run.
|
731
|
-
disabled : bool, default False
|
732
|
-
If set to True, disables @conda.
|
733
670
|
"""
|
734
671
|
...
|
735
672
|
|
736
673
|
@typing.overload
|
737
|
-
def
|
674
|
+
def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
738
675
|
"""
|
739
|
-
Specifies
|
740
|
-
|
741
|
-
The decorator will create an optional artifact, specified by `var`, which
|
742
|
-
contains the exception raised. You can use it to detect the presence
|
743
|
-
of errors, indicating that all happy-path artifacts produced by the step
|
744
|
-
are missing.
|
676
|
+
Specifies secrets to be retrieved and injected as environment variables prior to
|
677
|
+
the execution of a step.
|
745
678
|
|
746
679
|
|
747
680
|
Parameters
|
748
681
|
----------
|
749
|
-
|
750
|
-
|
751
|
-
If not specified, the exception is not stored.
|
752
|
-
print_exception : bool, default True
|
753
|
-
Determines whether or not the exception is printed to
|
754
|
-
stdout when caught.
|
682
|
+
sources : List[Union[str, Dict[str, Any]]], default: []
|
683
|
+
List of secret specs, defining how the secrets are to be retrieved
|
755
684
|
"""
|
756
685
|
...
|
757
686
|
|
758
687
|
@typing.overload
|
759
|
-
def
|
688
|
+
def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
760
689
|
...
|
761
690
|
|
762
691
|
@typing.overload
|
763
|
-
def
|
692
|
+
def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
764
693
|
...
|
765
694
|
|
766
|
-
def
|
695
|
+
def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
|
767
696
|
"""
|
768
|
-
Specifies
|
769
|
-
|
770
|
-
The decorator will create an optional artifact, specified by `var`, which
|
771
|
-
contains the exception raised. You can use it to detect the presence
|
772
|
-
of errors, indicating that all happy-path artifacts produced by the step
|
773
|
-
are missing.
|
697
|
+
Specifies secrets to be retrieved and injected as environment variables prior to
|
698
|
+
the execution of a step.
|
774
699
|
|
775
700
|
|
776
701
|
Parameters
|
777
702
|
----------
|
778
|
-
|
779
|
-
|
780
|
-
If not specified, the exception is not stored.
|
781
|
-
print_exception : bool, default True
|
782
|
-
Determines whether or not the exception is printed to
|
783
|
-
stdout when caught.
|
703
|
+
sources : List[Union[str, Dict[str, Any]]], default: []
|
704
|
+
List of secret specs, defining how the secrets are to be retrieved
|
784
705
|
"""
|
785
706
|
...
|
786
707
|
|
@@ -861,173 +782,176 @@ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: ty
|
|
861
782
|
"""
|
862
783
|
...
|
863
784
|
|
864
|
-
|
785
|
+
@typing.overload
|
786
|
+
def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
|
865
787
|
"""
|
866
|
-
|
867
|
-
This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
|
868
|
-
|
788
|
+
Specifies the resources needed when executing this step.
|
869
789
|
|
870
|
-
|
871
|
-
|
872
|
-
timeout : int
|
873
|
-
Time, in seconds before the task times out and fails. (Default: 3600)
|
874
|
-
poke_interval : int
|
875
|
-
Time in seconds that the job should wait in between each try. (Default: 60)
|
876
|
-
mode : str
|
877
|
-
How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
|
878
|
-
exponential_backoff : bool
|
879
|
-
allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
|
880
|
-
pool : str
|
881
|
-
the slot pool this task should run in,
|
882
|
-
slot pools are a way to limit concurrency for certain tasks. (Default:None)
|
883
|
-
soft_fail : bool
|
884
|
-
Set to true to mark the task as SKIPPED on failure. (Default: False)
|
885
|
-
name : str
|
886
|
-
Name of the sensor on Airflow
|
887
|
-
description : str
|
888
|
-
Description of sensor in the Airflow UI
|
889
|
-
external_dag_id : str
|
890
|
-
The dag_id that contains the task you want to wait for.
|
891
|
-
external_task_ids : List[str]
|
892
|
-
The list of task_ids that you want to wait for.
|
893
|
-
If None (default value) the sensor waits for the DAG. (Default: None)
|
894
|
-
allowed_states : List[str]
|
895
|
-
Iterable of allowed states, (Default: ['success'])
|
896
|
-
failed_states : List[str]
|
897
|
-
Iterable of failed or dis-allowed states. (Default: None)
|
898
|
-
execution_delta : datetime.timedelta
|
899
|
-
time difference with the previous execution to look at,
|
900
|
-
the default is the same logical date as the current task or DAG. (Default: None)
|
901
|
-
check_existence: bool
|
902
|
-
Set to True to check if the external task exists or check if
|
903
|
-
the DAG to wait for exists. (Default: True)
|
904
|
-
"""
|
905
|
-
...
|
906
|
-
|
907
|
-
def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
908
|
-
"""
|
909
|
-
Specifies what flows belong to the same project.
|
790
|
+
Use `@resources` to specify the resource requirements
|
791
|
+
independently of the specific compute layer (`@batch`, `@kubernetes`).
|
910
792
|
|
911
|
-
|
912
|
-
|
793
|
+
You can choose the compute layer on the command line by executing e.g.
|
794
|
+
```
|
795
|
+
python myflow.py run --with batch
|
796
|
+
```
|
797
|
+
or
|
798
|
+
```
|
799
|
+
python myflow.py run --with kubernetes
|
800
|
+
```
|
801
|
+
which executes the flow on the desired system using the
|
802
|
+
requirements specified in `@resources`.
|
913
803
|
|
914
804
|
|
915
805
|
Parameters
|
916
806
|
----------
|
917
|
-
|
918
|
-
|
919
|
-
|
920
|
-
|
921
|
-
|
922
|
-
|
923
|
-
|
924
|
-
|
925
|
-
|
926
|
-
|
927
|
-
|
928
|
-
production : bool, default False
|
929
|
-
Whether or not the branch is the production branch. This can also be set on the
|
930
|
-
command line using `--production` as a top-level option. It is an error to specify
|
931
|
-
`production` in the decorator and on the command line.
|
932
|
-
The project branch name will be:
|
933
|
-
- if `branch` is specified:
|
934
|
-
- if `production` is True: `prod.<branch>`
|
935
|
-
- if `production` is False: `test.<branch>`
|
936
|
-
- if `branch` is not specified:
|
937
|
-
- if `production` is True: `prod`
|
938
|
-
- if `production` is False: `user.<username>`
|
807
|
+
cpu : int, default 1
|
808
|
+
Number of CPUs required for this step.
|
809
|
+
gpu : int, optional, default None
|
810
|
+
Number of GPUs required for this step.
|
811
|
+
disk : int, optional, default None
|
812
|
+
Disk size (in MB) required for this step. Only applies on Kubernetes.
|
813
|
+
memory : int, default 4096
|
814
|
+
Memory size (in MB) required for this step.
|
815
|
+
shared_memory : int, optional, default None
|
816
|
+
The value for the size (in MiB) of the /dev/shm volume for this step.
|
817
|
+
This parameter maps to the `--shm-size` option in Docker.
|
939
818
|
"""
|
940
819
|
...
|
941
820
|
|
942
821
|
@typing.overload
|
943
|
-
def
|
944
|
-
"""
|
945
|
-
Specifies the PyPI packages for all steps of the flow.
|
946
|
-
|
947
|
-
Use `@pypi_base` to set common packages required by all
|
948
|
-
steps and use `@pypi` to specify step-specific overrides.
|
949
|
-
|
950
|
-
Parameters
|
951
|
-
----------
|
952
|
-
packages : Dict[str, str], default: {}
|
953
|
-
Packages to use for this flow. The key is the name of the package
|
954
|
-
and the value is the version to use.
|
955
|
-
python : str, optional, default: None
|
956
|
-
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
957
|
-
that the version used will correspond to the version of the Python interpreter used to start the run.
|
958
|
-
"""
|
822
|
+
def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
|
959
823
|
...
|
960
824
|
|
961
825
|
@typing.overload
|
962
|
-
def
|
826
|
+
def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
|
963
827
|
...
|
964
828
|
|
965
|
-
def
|
829
|
+
def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
|
966
830
|
"""
|
967
|
-
Specifies the
|
831
|
+
Specifies the resources needed when executing this step.
|
832
|
+
|
833
|
+
Use `@resources` to specify the resource requirements
|
834
|
+
independently of the specific compute layer (`@batch`, `@kubernetes`).
|
835
|
+
|
836
|
+
You can choose the compute layer on the command line by executing e.g.
|
837
|
+
```
|
838
|
+
python myflow.py run --with batch
|
839
|
+
```
|
840
|
+
or
|
841
|
+
```
|
842
|
+
python myflow.py run --with kubernetes
|
843
|
+
```
|
844
|
+
which executes the flow on the desired system using the
|
845
|
+
requirements specified in `@resources`.
|
968
846
|
|
969
|
-
Use `@pypi_base` to set common packages required by all
|
970
|
-
steps and use `@pypi` to specify step-specific overrides.
|
971
847
|
|
972
848
|
Parameters
|
973
849
|
----------
|
974
|
-
|
975
|
-
|
976
|
-
|
977
|
-
|
978
|
-
|
979
|
-
|
850
|
+
cpu : int, default 1
|
851
|
+
Number of CPUs required for this step.
|
852
|
+
gpu : int, optional, default None
|
853
|
+
Number of GPUs required for this step.
|
854
|
+
disk : int, optional, default None
|
855
|
+
Disk size (in MB) required for this step. Only applies on Kubernetes.
|
856
|
+
memory : int, default 4096
|
857
|
+
Memory size (in MB) required for this step.
|
858
|
+
shared_memory : int, optional, default None
|
859
|
+
The value for the size (in MiB) of the /dev/shm volume for this step.
|
860
|
+
This parameter maps to the `--shm-size` option in Docker.
|
980
861
|
"""
|
981
862
|
...
|
982
863
|
|
983
864
|
@typing.overload
|
984
|
-
def
|
865
|
+
def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
985
866
|
"""
|
986
|
-
Specifies the
|
987
|
-
|
988
|
-
Use `@conda_base` to set common libraries required by all
|
989
|
-
steps and use `@conda` to specify step-specific additions.
|
867
|
+
Specifies the times when the flow should be run when running on a
|
868
|
+
production scheduler.
|
990
869
|
|
991
870
|
|
992
871
|
Parameters
|
993
872
|
----------
|
994
|
-
|
995
|
-
|
996
|
-
|
997
|
-
|
998
|
-
|
999
|
-
|
1000
|
-
|
1001
|
-
|
1002
|
-
|
1003
|
-
|
873
|
+
hourly : bool, default False
|
874
|
+
Run the workflow hourly.
|
875
|
+
daily : bool, default True
|
876
|
+
Run the workflow daily.
|
877
|
+
weekly : bool, default False
|
878
|
+
Run the workflow weekly.
|
879
|
+
cron : str, optional, default None
|
880
|
+
Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
|
881
|
+
specified by this expression.
|
882
|
+
timezone : str, optional, default None
|
883
|
+
Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
|
884
|
+
which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
|
1004
885
|
"""
|
1005
886
|
...
|
1006
887
|
|
1007
888
|
@typing.overload
|
1008
|
-
def
|
889
|
+
def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
1009
890
|
...
|
1010
891
|
|
1011
|
-
def
|
892
|
+
def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
|
1012
893
|
"""
|
1013
|
-
Specifies the
|
1014
|
-
|
1015
|
-
Use `@conda_base` to set common libraries required by all
|
1016
|
-
steps and use `@conda` to specify step-specific additions.
|
894
|
+
Specifies the times when the flow should be run when running on a
|
895
|
+
production scheduler.
|
1017
896
|
|
1018
897
|
|
1019
898
|
Parameters
|
1020
899
|
----------
|
1021
|
-
|
1022
|
-
|
1023
|
-
|
1024
|
-
|
1025
|
-
|
1026
|
-
|
1027
|
-
|
1028
|
-
|
1029
|
-
|
1030
|
-
|
900
|
+
hourly : bool, default False
|
901
|
+
Run the workflow hourly.
|
902
|
+
daily : bool, default True
|
903
|
+
Run the workflow daily.
|
904
|
+
weekly : bool, default False
|
905
|
+
Run the workflow weekly.
|
906
|
+
cron : str, optional, default None
|
907
|
+
Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
|
908
|
+
specified by this expression.
|
909
|
+
timezone : str, optional, default None
|
910
|
+
Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
|
911
|
+
which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
|
912
|
+
"""
|
913
|
+
...
|
914
|
+
|
915
|
+
def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
916
|
+
"""
|
917
|
+
The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
|
918
|
+
This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
|
919
|
+
|
920
|
+
|
921
|
+
Parameters
|
922
|
+
----------
|
923
|
+
timeout : int
|
924
|
+
Time, in seconds before the task times out and fails. (Default: 3600)
|
925
|
+
poke_interval : int
|
926
|
+
Time in seconds that the job should wait in between each try. (Default: 60)
|
927
|
+
mode : str
|
928
|
+
How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
|
929
|
+
exponential_backoff : bool
|
930
|
+
allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
|
931
|
+
pool : str
|
932
|
+
the slot pool this task should run in,
|
933
|
+
slot pools are a way to limit concurrency for certain tasks. (Default:None)
|
934
|
+
soft_fail : bool
|
935
|
+
Set to true to mark the task as SKIPPED on failure. (Default: False)
|
936
|
+
name : str
|
937
|
+
Name of the sensor on Airflow
|
938
|
+
description : str
|
939
|
+
Description of sensor in the Airflow UI
|
940
|
+
external_dag_id : str
|
941
|
+
The dag_id that contains the task you want to wait for.
|
942
|
+
external_task_ids : List[str]
|
943
|
+
The list of task_ids that you want to wait for.
|
944
|
+
If None (default value) the sensor waits for the DAG. (Default: None)
|
945
|
+
allowed_states : List[str]
|
946
|
+
Iterable of allowed states, (Default: ['success'])
|
947
|
+
failed_states : List[str]
|
948
|
+
Iterable of failed or dis-allowed states. (Default: None)
|
949
|
+
execution_delta : datetime.timedelta
|
950
|
+
time difference with the previous execution to look at,
|
951
|
+
the default is the same logical date as the current task or DAG. (Default: None)
|
952
|
+
check_existence: bool
|
953
|
+
Set to True to check if the external task exists or check if
|
954
|
+
the DAG to wait for exists. (Default: True)
|
1031
955
|
"""
|
1032
956
|
...
|
1033
957
|
|
@@ -1124,46 +1048,95 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
|
|
1124
1048
|
"""
|
1125
1049
|
...
|
1126
1050
|
|
1127
|
-
|
1051
|
+
@typing.overload
|
1052
|
+
def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1128
1053
|
"""
|
1129
|
-
|
1130
|
-
before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
|
1131
|
-
and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
|
1132
|
-
added as a flow decorators. Adding more than one decorator will ensure that `start` step
|
1133
|
-
starts only after all sensors finish.
|
1054
|
+
Specifies the PyPI packages for all steps of the flow.
|
1134
1055
|
|
1056
|
+
Use `@pypi_base` to set common packages required by all
|
1057
|
+
steps and use `@pypi` to specify step-specific overrides.
|
1135
1058
|
|
1136
1059
|
Parameters
|
1137
1060
|
----------
|
1138
|
-
|
1139
|
-
|
1140
|
-
|
1141
|
-
|
1142
|
-
|
1143
|
-
|
1144
|
-
|
1145
|
-
|
1146
|
-
|
1147
|
-
|
1148
|
-
|
1149
|
-
|
1150
|
-
|
1151
|
-
|
1152
|
-
|
1153
|
-
|
1154
|
-
|
1155
|
-
|
1156
|
-
|
1157
|
-
|
1158
|
-
|
1159
|
-
|
1160
|
-
|
1161
|
-
|
1162
|
-
|
1163
|
-
|
1164
|
-
|
1165
|
-
|
1166
|
-
|
1061
|
+
packages : Dict[str, str], default: {}
|
1062
|
+
Packages to use for this flow. The key is the name of the package
|
1063
|
+
and the value is the version to use.
|
1064
|
+
python : str, optional, default: None
|
1065
|
+
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
1066
|
+
that the version used will correspond to the version of the Python interpreter used to start the run.
|
1067
|
+
"""
|
1068
|
+
...
|
1069
|
+
|
1070
|
+
@typing.overload
|
1071
|
+
def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
1072
|
+
...
|
1073
|
+
|
1074
|
+
def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
|
1075
|
+
"""
|
1076
|
+
Specifies the PyPI packages for all steps of the flow.
|
1077
|
+
|
1078
|
+
Use `@pypi_base` to set common packages required by all
|
1079
|
+
steps and use `@pypi` to specify step-specific overrides.
|
1080
|
+
|
1081
|
+
Parameters
|
1082
|
+
----------
|
1083
|
+
packages : Dict[str, str], default: {}
|
1084
|
+
Packages to use for this flow. The key is the name of the package
|
1085
|
+
and the value is the version to use.
|
1086
|
+
python : str, optional, default: None
|
1087
|
+
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
1088
|
+
that the version used will correspond to the version of the Python interpreter used to start the run.
|
1089
|
+
"""
|
1090
|
+
...
|
1091
|
+
|
1092
|
+
@typing.overload
|
1093
|
+
def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1094
|
+
"""
|
1095
|
+
Specifies the Conda environment for all steps of the flow.
|
1096
|
+
|
1097
|
+
Use `@conda_base` to set common libraries required by all
|
1098
|
+
steps and use `@conda` to specify step-specific additions.
|
1099
|
+
|
1100
|
+
|
1101
|
+
Parameters
|
1102
|
+
----------
|
1103
|
+
packages : Dict[str, str], default {}
|
1104
|
+
Packages to use for this flow. The key is the name of the package
|
1105
|
+
and the value is the version to use.
|
1106
|
+
libraries : Dict[str, str], default {}
|
1107
|
+
Supported for backward compatibility. When used with packages, packages will take precedence.
|
1108
|
+
python : str, optional, default None
|
1109
|
+
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
1110
|
+
that the version used will correspond to the version of the Python interpreter used to start the run.
|
1111
|
+
disabled : bool, default False
|
1112
|
+
If set to True, disables Conda.
|
1113
|
+
"""
|
1114
|
+
...
|
1115
|
+
|
1116
|
+
@typing.overload
|
1117
|
+
def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
1118
|
+
...
|
1119
|
+
|
1120
|
+
def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
|
1121
|
+
"""
|
1122
|
+
Specifies the Conda environment for all steps of the flow.
|
1123
|
+
|
1124
|
+
Use `@conda_base` to set common libraries required by all
|
1125
|
+
steps and use `@conda` to specify step-specific additions.
|
1126
|
+
|
1127
|
+
|
1128
|
+
Parameters
|
1129
|
+
----------
|
1130
|
+
packages : Dict[str, str], default {}
|
1131
|
+
Packages to use for this flow. The key is the name of the package
|
1132
|
+
and the value is the version to use.
|
1133
|
+
libraries : Dict[str, str], default {}
|
1134
|
+
Supported for backward compatibility. When used with packages, packages will take precedence.
|
1135
|
+
python : str, optional, default None
|
1136
|
+
Version of Python to use, e.g. '3.7.4'. A default value of None implies
|
1137
|
+
that the version used will correspond to the version of the Python interpreter used to start the run.
|
1138
|
+
disabled : bool, default False
|
1139
|
+
If set to True, disables Conda.
|
1167
1140
|
"""
|
1168
1141
|
...
|
1169
1142
|
|
@@ -1268,54 +1241,81 @@ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *
|
|
1268
1241
|
"""
|
1269
1242
|
...
|
1270
1243
|
|
1271
|
-
|
1272
|
-
def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1244
|
+
def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1273
1245
|
"""
|
1274
|
-
|
1275
|
-
|
1246
|
+
The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
|
1247
|
+
before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
|
1248
|
+
and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
|
1249
|
+
added as a flow decorators. Adding more than one decorator will ensure that `start` step
|
1250
|
+
starts only after all sensors finish.
|
1276
1251
|
|
1277
1252
|
|
1278
1253
|
Parameters
|
1279
1254
|
----------
|
1280
|
-
|
1281
|
-
|
1282
|
-
|
1283
|
-
|
1284
|
-
|
1285
|
-
|
1286
|
-
|
1287
|
-
|
1288
|
-
|
1289
|
-
|
1290
|
-
|
1291
|
-
|
1255
|
+
timeout : int
|
1256
|
+
Time, in seconds before the task times out and fails. (Default: 3600)
|
1257
|
+
poke_interval : int
|
1258
|
+
Time in seconds that the job should wait in between each try. (Default: 60)
|
1259
|
+
mode : str
|
1260
|
+
How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
|
1261
|
+
exponential_backoff : bool
|
1262
|
+
allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
|
1263
|
+
pool : str
|
1264
|
+
the slot pool this task should run in,
|
1265
|
+
slot pools are a way to limit concurrency for certain tasks. (Default:None)
|
1266
|
+
soft_fail : bool
|
1267
|
+
Set to true to mark the task as SKIPPED on failure. (Default: False)
|
1268
|
+
name : str
|
1269
|
+
Name of the sensor on Airflow
|
1270
|
+
description : str
|
1271
|
+
Description of sensor in the Airflow UI
|
1272
|
+
bucket_key : Union[str, List[str]]
|
1273
|
+
The key(s) being waited on. Supports full s3:// style url or relative path from root level.
|
1274
|
+
When it's specified as a full s3:// url, please leave `bucket_name` as None
|
1275
|
+
bucket_name : str
|
1276
|
+
Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
|
1277
|
+
When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
|
1278
|
+
wildcard_match : bool
|
1279
|
+
whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
|
1280
|
+
aws_conn_id : str
|
1281
|
+
a reference to the s3 connection on Airflow. (Default: None)
|
1282
|
+
verify : bool
|
1283
|
+
Whether or not to verify SSL certificates for S3 connection. (Default: None)
|
1292
1284
|
"""
|
1293
1285
|
...
|
1294
1286
|
|
1295
|
-
|
1296
|
-
def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
|
1297
|
-
...
|
1298
|
-
|
1299
|
-
def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
|
1287
|
+
def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
|
1300
1288
|
"""
|
1301
|
-
Specifies
|
1302
|
-
|
1289
|
+
Specifies what flows belong to the same project.
|
1290
|
+
|
1291
|
+
A project-specific namespace is created for all flows that
|
1292
|
+
use the same `@project(name)`.
|
1303
1293
|
|
1304
1294
|
|
1305
1295
|
Parameters
|
1306
1296
|
----------
|
1307
|
-
|
1308
|
-
|
1309
|
-
|
1310
|
-
|
1311
|
-
|
1312
|
-
|
1313
|
-
|
1314
|
-
|
1315
|
-
|
1316
|
-
|
1317
|
-
|
1318
|
-
|
1297
|
+
name : str
|
1298
|
+
Project name. Make sure that the name is unique amongst all
|
1299
|
+
projects that use the same production scheduler. The name may
|
1300
|
+
contain only lowercase alphanumeric characters and underscores.
|
1301
|
+
|
1302
|
+
branch : Optional[str], default None
|
1303
|
+
The branch to use. If not specified, the branch is set to
|
1304
|
+
`user.<username>` unless `production` is set to `True`. This can
|
1305
|
+
also be set on the command line using `--branch` as a top-level option.
|
1306
|
+
It is an error to specify `branch` in the decorator and on the command line.
|
1307
|
+
|
1308
|
+
production : bool, default False
|
1309
|
+
Whether or not the branch is the production branch. This can also be set on the
|
1310
|
+
command line using `--production` as a top-level option. It is an error to specify
|
1311
|
+
`production` in the decorator and on the command line.
|
1312
|
+
The project branch name will be:
|
1313
|
+
- if `branch` is specified:
|
1314
|
+
- if `production` is True: `prod.<branch>`
|
1315
|
+
- if `production` is False: `test.<branch>`
|
1316
|
+
- if `branch` is not specified:
|
1317
|
+
- if `production` is True: `prod`
|
1318
|
+
- if `production` is False: `user.<username>`
|
1319
1319
|
"""
|
1320
1320
|
...
|
1321
1321
|
|