metaflow-stubs 2.17.5__py2.py3-none-any.whl → 2.18.1__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of metaflow-stubs might be problematic. Click here for more details.

Files changed (166) hide show
  1. metaflow-stubs/__init__.pyi +462 -456
  2. metaflow-stubs/cards.pyi +2 -2
  3. metaflow-stubs/cli.pyi +2 -2
  4. metaflow-stubs/cli_components/__init__.pyi +2 -2
  5. metaflow-stubs/cli_components/utils.pyi +2 -2
  6. metaflow-stubs/client/__init__.pyi +2 -2
  7. metaflow-stubs/client/core.pyi +4 -4
  8. metaflow-stubs/client/filecache.pyi +3 -3
  9. metaflow-stubs/events.pyi +2 -2
  10. metaflow-stubs/exception.pyi +2 -2
  11. metaflow-stubs/flowspec.pyi +4 -4
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +2 -2
  14. metaflow-stubs/meta_files.pyi +2 -2
  15. metaflow-stubs/metadata_provider/__init__.pyi +2 -2
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +2 -2
  17. metaflow-stubs/metadata_provider/metadata.pyi +2 -2
  18. metaflow-stubs/metadata_provider/util.pyi +2 -2
  19. metaflow-stubs/metaflow_config.pyi +4 -2
  20. metaflow-stubs/metaflow_current.pyi +6 -6
  21. metaflow-stubs/metaflow_git.pyi +2 -2
  22. metaflow-stubs/multicore_utils.pyi +2 -2
  23. metaflow-stubs/packaging_sys/__init__.pyi +5 -5
  24. metaflow-stubs/packaging_sys/backend.pyi +4 -4
  25. metaflow-stubs/packaging_sys/distribution_support.pyi +3 -3
  26. metaflow-stubs/packaging_sys/tar_backend.pyi +5 -5
  27. metaflow-stubs/packaging_sys/utils.pyi +2 -2
  28. metaflow-stubs/packaging_sys/v1.pyi +2 -2
  29. metaflow-stubs/parameters.pyi +2 -2
  30. metaflow-stubs/plugins/__init__.pyi +10 -10
  31. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  32. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
  33. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  34. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  35. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  36. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
  37. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
  38. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
  39. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  40. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  41. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  42. metaflow-stubs/plugins/argo/argo_workflows.pyi +2 -2
  43. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +3 -3
  44. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +2 -2
  45. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +3 -3
  46. metaflow-stubs/plugins/argo/exit_hooks.pyi +2 -2
  47. metaflow-stubs/plugins/aws/__init__.pyi +2 -2
  48. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  49. metaflow-stubs/plugins/aws/aws_utils.pyi +5 -2
  50. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  51. metaflow-stubs/plugins/aws/batch/batch.pyi +4 -4
  52. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  53. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +8 -2
  54. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  55. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +3 -3
  56. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  57. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
  58. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  59. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  60. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +3 -3
  61. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  62. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
  63. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +3 -3
  64. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +2 -2
  65. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  66. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  67. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  68. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +3 -3
  69. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  70. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  71. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  72. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  73. metaflow-stubs/plugins/cards/card_client.pyi +3 -3
  74. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  75. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  76. metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
  77. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  78. metaflow-stubs/plugins/cards/card_modules/basic.pyi +2 -2
  79. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  80. metaflow-stubs/plugins/cards/card_modules/components.pyi +3 -3
  81. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  82. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  83. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  84. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  85. metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
  86. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  87. metaflow-stubs/plugins/catch_decorator.pyi +3 -3
  88. metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
  89. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  90. metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
  91. metaflow-stubs/plugins/datatools/s3/s3.pyi +3 -3
  92. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  93. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  94. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  95. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  96. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  97. metaflow-stubs/plugins/events_decorator.pyi +2 -2
  98. metaflow-stubs/plugins/exit_hook/__init__.pyi +2 -2
  99. metaflow-stubs/plugins/exit_hook/exit_hook_decorator.pyi +2 -2
  100. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  101. metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
  102. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  103. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +3 -3
  104. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  105. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  106. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  107. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  108. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  109. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +2 -2
  110. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
  111. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  112. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
  113. metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +2 -2
  114. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
  115. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +2 -2
  116. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  117. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  118. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  119. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  120. metaflow-stubs/plugins/pypi/conda_environment.pyi +5 -5
  121. metaflow-stubs/plugins/pypi/parsers.pyi +2 -2
  122. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  123. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  124. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  125. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  126. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  127. metaflow-stubs/plugins/secrets/__init__.pyi +3 -3
  128. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +2 -2
  129. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  130. metaflow-stubs/plugins/secrets/secrets_func.pyi +2 -2
  131. metaflow-stubs/plugins/secrets/secrets_spec.pyi +2 -2
  132. metaflow-stubs/plugins/secrets/utils.pyi +2 -2
  133. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  134. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +2 -2
  135. metaflow-stubs/plugins/timeout_decorator.pyi +3 -3
  136. metaflow-stubs/plugins/uv/__init__.pyi +2 -2
  137. metaflow-stubs/plugins/uv/uv_environment.pyi +3 -3
  138. metaflow-stubs/pylint_wrapper.pyi +2 -2
  139. metaflow-stubs/runner/__init__.pyi +2 -2
  140. metaflow-stubs/runner/deployer.pyi +32 -32
  141. metaflow-stubs/runner/deployer_impl.pyi +2 -2
  142. metaflow-stubs/runner/metaflow_runner.pyi +3 -3
  143. metaflow-stubs/runner/nbdeploy.pyi +2 -2
  144. metaflow-stubs/runner/nbrun.pyi +2 -2
  145. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  146. metaflow-stubs/runner/utils.pyi +4 -4
  147. metaflow-stubs/system/__init__.pyi +2 -2
  148. metaflow-stubs/system/system_logger.pyi +2 -2
  149. metaflow-stubs/system/system_monitor.pyi +2 -2
  150. metaflow-stubs/tagging_util.pyi +2 -2
  151. metaflow-stubs/tuple_util.pyi +2 -2
  152. metaflow-stubs/user_configs/__init__.pyi +2 -2
  153. metaflow-stubs/user_configs/config_options.pyi +2 -2
  154. metaflow-stubs/user_configs/config_parameters.pyi +8 -6
  155. metaflow-stubs/user_decorators/__init__.pyi +2 -2
  156. metaflow-stubs/user_decorators/common.pyi +2 -2
  157. metaflow-stubs/user_decorators/mutable_flow.pyi +4 -4
  158. metaflow-stubs/user_decorators/mutable_step.pyi +4 -4
  159. metaflow-stubs/user_decorators/user_flow_decorator.pyi +3 -3
  160. metaflow-stubs/user_decorators/user_step_decorator.pyi +4 -4
  161. metaflow-stubs/version.pyi +2 -2
  162. {metaflow_stubs-2.17.5.dist-info → metaflow_stubs-2.18.1.dist-info}/METADATA +2 -2
  163. metaflow_stubs-2.18.1.dist-info/RECORD +166 -0
  164. metaflow_stubs-2.17.5.dist-info/RECORD +0 -166
  165. {metaflow_stubs-2.17.5.dist-info → metaflow_stubs-2.18.1.dist-info}/WHEEL +0 -0
  166. {metaflow_stubs-2.17.5.dist-info → metaflow_stubs-2.18.1.dist-info}/top_level.txt +0 -0
@@ -1,7 +1,7 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.17.5 #
4
- # Generated on 2025-08-25T21:01:36.499633 #
3
+ # MF version: 2.18.1 #
4
+ # Generated on 2025-08-29T13:35:56.240161 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
@@ -39,16 +39,16 @@ from .user_decorators.user_step_decorator import UserStepDecorator as UserStepDe
39
39
  from .user_decorators.user_step_decorator import StepMutator as StepMutator
40
40
  from .user_decorators.user_step_decorator import user_step_decorator as user_step_decorator
41
41
  from .user_decorators.user_flow_decorator import FlowMutator as FlowMutator
42
+ from . import events as events
42
43
  from . import tuple_util as tuple_util
43
44
  from . import metaflow_git as metaflow_git
44
- from . import events as events
45
45
  from . import runner as runner
46
46
  from . import plugins as plugins
47
47
  from .plugins.datatools.s3.s3 import S3 as S3
48
48
  from . import includefile as includefile
49
49
  from .includefile import IncludeFile as IncludeFile
50
- from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
51
50
  from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
51
+ from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
52
52
  from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
53
53
  from . import cards as cards
54
54
  from . import client as client
@@ -153,7 +153,7 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
153
153
  ...
154
154
 
155
155
  @typing.overload
156
- def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
156
+ def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, aws_batch_tags: typing.Optional[typing.Dict[str, str]] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
157
157
  """
158
158
  Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
159
159
 
@@ -194,6 +194,9 @@ def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optio
194
194
  A swappiness value of 0 causes swapping not to happen unless absolutely
195
195
  necessary. A swappiness value of 100 causes pages to be swapped very
196
196
  aggressively. Accepted values are whole numbers between 0 and 100.
197
+ aws_batch_tags: Dict[str, str], optional, default None
198
+ Sets arbitrary AWS tags on the AWS Batch compute environment.
199
+ Set as string key-value pairs.
197
200
  use_tmpfs : bool, default False
198
201
  This enables an explicit tmpfs mount for this step. Note that tmpfs is
199
202
  not available on Fargate compute environments
@@ -231,7 +234,7 @@ def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Calla
231
234
  def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
232
235
  ...
233
236
 
234
- def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
237
+ def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, aws_batch_tags: typing.Optional[typing.Dict[str, str]] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
235
238
  """
236
239
  Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
237
240
 
@@ -272,6 +275,9 @@ def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
272
275
  A swappiness value of 0 causes swapping not to happen unless absolutely
273
276
  necessary. A swappiness value of 100 causes pages to be swapped very
274
277
  aggressively. Accepted values are whole numbers between 0 and 100.
278
+ aws_batch_tags: Dict[str, str], optional, default None
279
+ Sets arbitrary AWS tags on the AWS Batch compute environment.
280
+ Set as string key-value pairs.
275
281
  use_tmpfs : bool, default False
276
282
  This enables an explicit tmpfs mount for this step. Note that tmpfs is
277
283
  not available on Fargate compute environments
@@ -302,149 +308,72 @@ def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
302
308
  ...
303
309
 
304
310
  @typing.overload
305
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
306
- """
307
- Decorator prototype for all step decorators. This function gets specialized
308
- and imported for all decorators types by _import_plugin_decorators().
309
- """
310
- ...
311
-
312
- @typing.overload
313
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
314
- ...
315
-
316
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
317
- """
318
- Decorator prototype for all step decorators. This function gets specialized
319
- and imported for all decorators types by _import_plugin_decorators().
320
- """
321
- ...
322
-
323
- @typing.overload
324
- def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
311
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
325
312
  """
326
- Specifies the resources needed when executing this step.
327
-
328
- Use `@resources` to specify the resource requirements
329
- independently of the specific compute layer (`@batch`, `@kubernetes`).
313
+ Specifies that the step will success under all circumstances.
330
314
 
331
- You can choose the compute layer on the command line by executing e.g.
332
- ```
333
- python myflow.py run --with batch
334
- ```
335
- or
336
- ```
337
- python myflow.py run --with kubernetes
338
- ```
339
- which executes the flow on the desired system using the
340
- requirements specified in `@resources`.
315
+ The decorator will create an optional artifact, specified by `var`, which
316
+ contains the exception raised. You can use it to detect the presence
317
+ of errors, indicating that all happy-path artifacts produced by the step
318
+ are missing.
341
319
 
342
320
 
343
321
  Parameters
344
322
  ----------
345
- cpu : int, default 1
346
- Number of CPUs required for this step.
347
- gpu : int, optional, default None
348
- Number of GPUs required for this step.
349
- disk : int, optional, default None
350
- Disk size (in MB) required for this step. Only applies on Kubernetes.
351
- memory : int, default 4096
352
- Memory size (in MB) required for this step.
353
- shared_memory : int, optional, default None
354
- The value for the size (in MiB) of the /dev/shm volume for this step.
355
- This parameter maps to the `--shm-size` option in Docker.
323
+ var : str, optional, default None
324
+ Name of the artifact in which to store the caught exception.
325
+ If not specified, the exception is not stored.
326
+ print_exception : bool, default True
327
+ Determines whether or not the exception is printed to
328
+ stdout when caught.
356
329
  """
357
330
  ...
358
331
 
359
332
  @typing.overload
360
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
333
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
361
334
  ...
362
335
 
363
336
  @typing.overload
364
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
337
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
365
338
  ...
366
339
 
367
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
340
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
368
341
  """
369
- Specifies the resources needed when executing this step.
370
-
371
- Use `@resources` to specify the resource requirements
372
- independently of the specific compute layer (`@batch`, `@kubernetes`).
342
+ Specifies that the step will success under all circumstances.
373
343
 
374
- You can choose the compute layer on the command line by executing e.g.
375
- ```
376
- python myflow.py run --with batch
377
- ```
378
- or
379
- ```
380
- python myflow.py run --with kubernetes
381
- ```
382
- which executes the flow on the desired system using the
383
- requirements specified in `@resources`.
344
+ The decorator will create an optional artifact, specified by `var`, which
345
+ contains the exception raised. You can use it to detect the presence
346
+ of errors, indicating that all happy-path artifacts produced by the step
347
+ are missing.
384
348
 
385
349
 
386
350
  Parameters
387
351
  ----------
388
- cpu : int, default 1
389
- Number of CPUs required for this step.
390
- gpu : int, optional, default None
391
- Number of GPUs required for this step.
392
- disk : int, optional, default None
393
- Disk size (in MB) required for this step. Only applies on Kubernetes.
394
- memory : int, default 4096
395
- Memory size (in MB) required for this step.
396
- shared_memory : int, optional, default None
397
- The value for the size (in MiB) of the /dev/shm volume for this step.
398
- This parameter maps to the `--shm-size` option in Docker.
352
+ var : str, optional, default None
353
+ Name of the artifact in which to store the caught exception.
354
+ If not specified, the exception is not stored.
355
+ print_exception : bool, default True
356
+ Determines whether or not the exception is printed to
357
+ stdout when caught.
399
358
  """
400
359
  ...
401
360
 
402
361
  @typing.overload
403
- def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
362
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
404
363
  """
405
- Creates a human-readable report, a Metaflow Card, after this step completes.
406
-
407
- Note that you may add multiple `@card` decorators in a step with different parameters.
408
-
409
-
410
- Parameters
411
- ----------
412
- type : str, default 'default'
413
- Card type.
414
- id : str, optional, default None
415
- If multiple cards are present, use this id to identify this card.
416
- options : Dict[str, Any], default {}
417
- Options passed to the card. The contents depend on the card type.
418
- timeout : int, default 45
419
- Interrupt reporting if it takes more than this many seconds.
364
+ Decorator prototype for all step decorators. This function gets specialized
365
+ and imported for all decorators types by _import_plugin_decorators().
420
366
  """
421
367
  ...
422
368
 
423
369
  @typing.overload
424
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
425
- ...
426
-
427
- @typing.overload
428
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
370
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
429
371
  ...
430
372
 
431
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
373
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
432
374
  """
433
- Creates a human-readable report, a Metaflow Card, after this step completes.
434
-
435
- Note that you may add multiple `@card` decorators in a step with different parameters.
436
-
437
-
438
- Parameters
439
- ----------
440
- type : str, default 'default'
441
- Card type.
442
- id : str, optional, default None
443
- If multiple cards are present, use this id to identify this card.
444
- options : Dict[str, Any], default {}
445
- Options passed to the card. The contents depend on the card type.
446
- timeout : int, default 45
447
- Interrupt reporting if it takes more than this many seconds.
375
+ Decorator prototype for all step decorators. This function gets specialized
376
+ and imported for all decorators types by _import_plugin_decorators().
448
377
  """
449
378
  ...
450
379
 
@@ -507,108 +436,6 @@ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
507
436
  """
508
437
  ...
509
438
 
510
- @typing.overload
511
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
512
- """
513
- Specifies that the step will success under all circumstances.
514
-
515
- The decorator will create an optional artifact, specified by `var`, which
516
- contains the exception raised. You can use it to detect the presence
517
- of errors, indicating that all happy-path artifacts produced by the step
518
- are missing.
519
-
520
-
521
- Parameters
522
- ----------
523
- var : str, optional, default None
524
- Name of the artifact in which to store the caught exception.
525
- If not specified, the exception is not stored.
526
- print_exception : bool, default True
527
- Determines whether or not the exception is printed to
528
- stdout when caught.
529
- """
530
- ...
531
-
532
- @typing.overload
533
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
534
- ...
535
-
536
- @typing.overload
537
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
538
- ...
539
-
540
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
541
- """
542
- Specifies that the step will success under all circumstances.
543
-
544
- The decorator will create an optional artifact, specified by `var`, which
545
- contains the exception raised. You can use it to detect the presence
546
- of errors, indicating that all happy-path artifacts produced by the step
547
- are missing.
548
-
549
-
550
- Parameters
551
- ----------
552
- var : str, optional, default None
553
- Name of the artifact in which to store the caught exception.
554
- If not specified, the exception is not stored.
555
- print_exception : bool, default True
556
- Determines whether or not the exception is printed to
557
- stdout when caught.
558
- """
559
- ...
560
-
561
- @typing.overload
562
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
563
- """
564
- Specifies the PyPI packages for the step.
565
-
566
- Information in this decorator will augment any
567
- attributes set in the `@pyi_base` flow-level decorator. Hence,
568
- you can use `@pypi_base` to set packages required by all
569
- steps and use `@pypi` to specify step-specific overrides.
570
-
571
-
572
- Parameters
573
- ----------
574
- packages : Dict[str, str], default: {}
575
- Packages to use for this step. The key is the name of the package
576
- and the value is the version to use.
577
- python : str, optional, default: None
578
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
579
- that the version used will correspond to the version of the Python interpreter used to start the run.
580
- """
581
- ...
582
-
583
- @typing.overload
584
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
585
- ...
586
-
587
- @typing.overload
588
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
589
- ...
590
-
591
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
592
- """
593
- Specifies the PyPI packages for the step.
594
-
595
- Information in this decorator will augment any
596
- attributes set in the `@pyi_base` flow-level decorator. Hence,
597
- you can use `@pypi_base` to set packages required by all
598
- steps and use `@pypi` to specify step-specific overrides.
599
-
600
-
601
- Parameters
602
- ----------
603
- packages : Dict[str, str], default: {}
604
- Packages to use for this step. The key is the name of the package
605
- and the value is the version to use.
606
- python : str, optional, default: None
607
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
608
- that the version used will correspond to the version of the Python interpreter used to start the run.
609
- """
610
- ...
611
-
612
439
  @typing.overload
613
440
  def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
614
441
  """
@@ -665,61 +492,169 @@ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
665
492
  ...
666
493
 
667
494
  @typing.overload
668
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
495
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
669
496
  """
670
- Specifies the Conda environment for the step.
497
+ Specifies secrets to be retrieved and injected as environment variables prior to
498
+ the execution of a step.
671
499
 
672
- Information in this decorator will augment any
673
- attributes set in the `@conda_base` flow-level decorator. Hence,
674
- you can use `@conda_base` to set packages required by all
675
- steps and use `@conda` to specify step-specific overrides.
500
+
501
+ Parameters
502
+ ----------
503
+ sources : List[Union[str, Dict[str, Any]]], default: []
504
+ List of secret specs, defining how the secrets are to be retrieved
505
+ role : str, optional, default: None
506
+ Role to use for fetching secrets
507
+ """
508
+ ...
509
+
510
+ @typing.overload
511
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
512
+ ...
513
+
514
+ @typing.overload
515
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
516
+ ...
517
+
518
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
519
+ """
520
+ Specifies secrets to be retrieved and injected as environment variables prior to
521
+ the execution of a step.
676
522
 
677
523
 
678
524
  Parameters
679
525
  ----------
680
- packages : Dict[str, str], default {}
681
- Packages to use for this step. The key is the name of the package
682
- and the value is the version to use.
683
- libraries : Dict[str, str], default {}
684
- Supported for backward compatibility. When used with packages, packages will take precedence.
685
- python : str, optional, default None
686
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
687
- that the version used will correspond to the version of the Python interpreter used to start the run.
688
- disabled : bool, default False
689
- If set to True, disables @conda.
526
+ sources : List[Union[str, Dict[str, Any]]], default: []
527
+ List of secret specs, defining how the secrets are to be retrieved
528
+ role : str, optional, default: None
529
+ Role to use for fetching secrets
690
530
  """
691
531
  ...
692
532
 
693
533
  @typing.overload
694
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
534
+ def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
535
+ """
536
+ Creates a human-readable report, a Metaflow Card, after this step completes.
537
+
538
+ Note that you may add multiple `@card` decorators in a step with different parameters.
539
+
540
+
541
+ Parameters
542
+ ----------
543
+ type : str, default 'default'
544
+ Card type.
545
+ id : str, optional, default None
546
+ If multiple cards are present, use this id to identify this card.
547
+ options : Dict[str, Any], default {}
548
+ Options passed to the card. The contents depend on the card type.
549
+ timeout : int, default 45
550
+ Interrupt reporting if it takes more than this many seconds.
551
+ """
695
552
  ...
696
553
 
697
554
  @typing.overload
698
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
555
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
699
556
  ...
700
557
 
701
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
558
+ @typing.overload
559
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
560
+ ...
561
+
562
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
702
563
  """
703
- Specifies the Conda environment for the step.
564
+ Creates a human-readable report, a Metaflow Card, after this step completes.
704
565
 
705
- Information in this decorator will augment any
706
- attributes set in the `@conda_base` flow-level decorator. Hence,
707
- you can use `@conda_base` to set packages required by all
708
- steps and use `@conda` to specify step-specific overrides.
566
+ Note that you may add multiple `@card` decorators in a step with different parameters.
709
567
 
710
568
 
711
569
  Parameters
712
570
  ----------
713
- packages : Dict[str, str], default {}
714
- Packages to use for this step. The key is the name of the package
715
- and the value is the version to use.
716
- libraries : Dict[str, str], default {}
717
- Supported for backward compatibility. When used with packages, packages will take precedence.
718
- python : str, optional, default None
719
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
720
- that the version used will correspond to the version of the Python interpreter used to start the run.
721
- disabled : bool, default False
722
- If set to True, disables @conda.
571
+ type : str, default 'default'
572
+ Card type.
573
+ id : str, optional, default None
574
+ If multiple cards are present, use this id to identify this card.
575
+ options : Dict[str, Any], default {}
576
+ Options passed to the card. The contents depend on the card type.
577
+ timeout : int, default 45
578
+ Interrupt reporting if it takes more than this many seconds.
579
+ """
580
+ ...
581
+
582
+ @typing.overload
583
+ def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
584
+ """
585
+ Specifies the resources needed when executing this step.
586
+
587
+ Use `@resources` to specify the resource requirements
588
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
589
+
590
+ You can choose the compute layer on the command line by executing e.g.
591
+ ```
592
+ python myflow.py run --with batch
593
+ ```
594
+ or
595
+ ```
596
+ python myflow.py run --with kubernetes
597
+ ```
598
+ which executes the flow on the desired system using the
599
+ requirements specified in `@resources`.
600
+
601
+
602
+ Parameters
603
+ ----------
604
+ cpu : int, default 1
605
+ Number of CPUs required for this step.
606
+ gpu : int, optional, default None
607
+ Number of GPUs required for this step.
608
+ disk : int, optional, default None
609
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
610
+ memory : int, default 4096
611
+ Memory size (in MB) required for this step.
612
+ shared_memory : int, optional, default None
613
+ The value for the size (in MiB) of the /dev/shm volume for this step.
614
+ This parameter maps to the `--shm-size` option in Docker.
615
+ """
616
+ ...
617
+
618
+ @typing.overload
619
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
620
+ ...
621
+
622
+ @typing.overload
623
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
624
+ ...
625
+
626
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
627
+ """
628
+ Specifies the resources needed when executing this step.
629
+
630
+ Use `@resources` to specify the resource requirements
631
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
632
+
633
+ You can choose the compute layer on the command line by executing e.g.
634
+ ```
635
+ python myflow.py run --with batch
636
+ ```
637
+ or
638
+ ```
639
+ python myflow.py run --with kubernetes
640
+ ```
641
+ which executes the flow on the desired system using the
642
+ requirements specified in `@resources`.
643
+
644
+
645
+ Parameters
646
+ ----------
647
+ cpu : int, default 1
648
+ Number of CPUs required for this step.
649
+ gpu : int, optional, default None
650
+ Number of GPUs required for this step.
651
+ disk : int, optional, default None
652
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
653
+ memory : int, default 4096
654
+ Memory size (in MB) required for this step.
655
+ shared_memory : int, optional, default None
656
+ The value for the size (in MiB) of the /dev/shm volume for this step.
657
+ This parameter maps to the `--shm-size` option in Docker.
723
658
  """
724
659
  ...
725
660
 
@@ -812,6 +747,116 @@ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: ty
812
747
  """
813
748
  ...
814
749
 
750
+ @typing.overload
751
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
752
+ """
753
+ Specifies the PyPI packages for the step.
754
+
755
+ Information in this decorator will augment any
756
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
757
+ you can use `@pypi_base` to set packages required by all
758
+ steps and use `@pypi` to specify step-specific overrides.
759
+
760
+
761
+ Parameters
762
+ ----------
763
+ packages : Dict[str, str], default: {}
764
+ Packages to use for this step. The key is the name of the package
765
+ and the value is the version to use.
766
+ python : str, optional, default: None
767
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
768
+ that the version used will correspond to the version of the Python interpreter used to start the run.
769
+ """
770
+ ...
771
+
772
+ @typing.overload
773
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
774
+ ...
775
+
776
+ @typing.overload
777
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
778
+ ...
779
+
780
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
781
+ """
782
+ Specifies the PyPI packages for the step.
783
+
784
+ Information in this decorator will augment any
785
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
786
+ you can use `@pypi_base` to set packages required by all
787
+ steps and use `@pypi` to specify step-specific overrides.
788
+
789
+
790
+ Parameters
791
+ ----------
792
+ packages : Dict[str, str], default: {}
793
+ Packages to use for this step. The key is the name of the package
794
+ and the value is the version to use.
795
+ python : str, optional, default: None
796
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
797
+ that the version used will correspond to the version of the Python interpreter used to start the run.
798
+ """
799
+ ...
800
+
801
+ @typing.overload
802
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
803
+ """
804
+ Specifies the Conda environment for the step.
805
+
806
+ Information in this decorator will augment any
807
+ attributes set in the `@conda_base` flow-level decorator. Hence,
808
+ you can use `@conda_base` to set packages required by all
809
+ steps and use `@conda` to specify step-specific overrides.
810
+
811
+
812
+ Parameters
813
+ ----------
814
+ packages : Dict[str, str], default {}
815
+ Packages to use for this step. The key is the name of the package
816
+ and the value is the version to use.
817
+ libraries : Dict[str, str], default {}
818
+ Supported for backward compatibility. When used with packages, packages will take precedence.
819
+ python : str, optional, default None
820
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
821
+ that the version used will correspond to the version of the Python interpreter used to start the run.
822
+ disabled : bool, default False
823
+ If set to True, disables @conda.
824
+ """
825
+ ...
826
+
827
+ @typing.overload
828
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
829
+ ...
830
+
831
+ @typing.overload
832
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
833
+ ...
834
+
835
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
836
+ """
837
+ Specifies the Conda environment for the step.
838
+
839
+ Information in this decorator will augment any
840
+ attributes set in the `@conda_base` flow-level decorator. Hence,
841
+ you can use `@conda_base` to set packages required by all
842
+ steps and use `@conda` to specify step-specific overrides.
843
+
844
+
845
+ Parameters
846
+ ----------
847
+ packages : Dict[str, str], default {}
848
+ Packages to use for this step. The key is the name of the package
849
+ and the value is the version to use.
850
+ libraries : Dict[str, str], default {}
851
+ Supported for backward compatibility. When used with packages, packages will take precedence.
852
+ python : str, optional, default None
853
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
854
+ that the version used will correspond to the version of the Python interpreter used to start the run.
855
+ disabled : bool, default False
856
+ If set to True, disables @conda.
857
+ """
858
+ ...
859
+
815
860
  @typing.overload
816
861
  def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
817
862
  """
@@ -845,42 +890,139 @@ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], Non
845
890
  """
846
891
  ...
847
892
 
848
- @typing.overload
849
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
893
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
850
894
  """
851
- Specifies secrets to be retrieved and injected as environment variables prior to
852
- the execution of a step.
895
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
896
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
853
897
 
854
898
 
855
899
  Parameters
856
900
  ----------
857
- sources : List[Union[str, Dict[str, Any]]], default: []
858
- List of secret specs, defining how the secrets are to be retrieved
859
- role : str, optional, default: None
860
- Role to use for fetching secrets
901
+ timeout : int
902
+ Time, in seconds before the task times out and fails. (Default: 3600)
903
+ poke_interval : int
904
+ Time in seconds that the job should wait in between each try. (Default: 60)
905
+ mode : str
906
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
907
+ exponential_backoff : bool
908
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
909
+ pool : str
910
+ the slot pool this task should run in,
911
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
912
+ soft_fail : bool
913
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
914
+ name : str
915
+ Name of the sensor on Airflow
916
+ description : str
917
+ Description of sensor in the Airflow UI
918
+ external_dag_id : str
919
+ The dag_id that contains the task you want to wait for.
920
+ external_task_ids : List[str]
921
+ The list of task_ids that you want to wait for.
922
+ If None (default value) the sensor waits for the DAG. (Default: None)
923
+ allowed_states : List[str]
924
+ Iterable of allowed states, (Default: ['success'])
925
+ failed_states : List[str]
926
+ Iterable of failed or dis-allowed states. (Default: None)
927
+ execution_delta : datetime.timedelta
928
+ time difference with the previous execution to look at,
929
+ the default is the same logical date as the current task or DAG. (Default: None)
930
+ check_existence: bool
931
+ Set to True to check if the external task exists or check if
932
+ the DAG to wait for exists. (Default: True)
861
933
  """
862
934
  ...
863
935
 
864
936
  @typing.overload
865
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
937
+ def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
938
+ """
939
+ Specifies the event(s) that this flow depends on.
940
+
941
+ ```
942
+ @trigger(event='foo')
943
+ ```
944
+ or
945
+ ```
946
+ @trigger(events=['foo', 'bar'])
947
+ ```
948
+
949
+ Additionally, you can specify the parameter mappings
950
+ to map event payload to Metaflow parameters for the flow.
951
+ ```
952
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
953
+ ```
954
+ or
955
+ ```
956
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
957
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
958
+ ```
959
+
960
+ 'parameters' can also be a list of strings and tuples like so:
961
+ ```
962
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
963
+ ```
964
+ This is equivalent to:
965
+ ```
966
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
967
+ ```
968
+
969
+
970
+ Parameters
971
+ ----------
972
+ event : Union[str, Dict[str, Any]], optional, default None
973
+ Event dependency for this flow.
974
+ events : List[Union[str, Dict[str, Any]]], default []
975
+ Events dependency for this flow.
976
+ options : Dict[str, Any], default {}
977
+ Backend-specific configuration for tuning eventing behavior.
978
+ """
866
979
  ...
867
980
 
868
981
  @typing.overload
869
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
982
+ def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
870
983
  ...
871
984
 
872
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
985
+ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
873
986
  """
874
- Specifies secrets to be retrieved and injected as environment variables prior to
875
- the execution of a step.
987
+ Specifies the event(s) that this flow depends on.
988
+
989
+ ```
990
+ @trigger(event='foo')
991
+ ```
992
+ or
993
+ ```
994
+ @trigger(events=['foo', 'bar'])
995
+ ```
996
+
997
+ Additionally, you can specify the parameter mappings
998
+ to map event payload to Metaflow parameters for the flow.
999
+ ```
1000
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1001
+ ```
1002
+ or
1003
+ ```
1004
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1005
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1006
+ ```
1007
+
1008
+ 'parameters' can also be a list of strings and tuples like so:
1009
+ ```
1010
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1011
+ ```
1012
+ This is equivalent to:
1013
+ ```
1014
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1015
+ ```
876
1016
 
877
1017
 
878
1018
  Parameters
879
1019
  ----------
880
- sources : List[Union[str, Dict[str, Any]]], default: []
881
- List of secret specs, defining how the secrets are to be retrieved
882
- role : str, optional, default: None
883
- Role to use for fetching secrets
1020
+ event : Union[str, Dict[str, Any]], optional, default None
1021
+ Event dependency for this flow.
1022
+ events : List[Union[str, Dict[str, Any]]], default []
1023
+ Events dependency for this flow.
1024
+ options : Dict[str, Any], default {}
1025
+ Backend-specific configuration for tuning eventing behavior.
884
1026
  """
885
1027
  ...
886
1028
 
@@ -935,6 +1077,57 @@ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly:
935
1077
  """
936
1078
  ...
937
1079
 
1080
+ @typing.overload
1081
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1082
+ """
1083
+ Specifies the Conda environment for all steps of the flow.
1084
+
1085
+ Use `@conda_base` to set common libraries required by all
1086
+ steps and use `@conda` to specify step-specific additions.
1087
+
1088
+
1089
+ Parameters
1090
+ ----------
1091
+ packages : Dict[str, str], default {}
1092
+ Packages to use for this flow. The key is the name of the package
1093
+ and the value is the version to use.
1094
+ libraries : Dict[str, str], default {}
1095
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1096
+ python : str, optional, default None
1097
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1098
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1099
+ disabled : bool, default False
1100
+ If set to True, disables Conda.
1101
+ """
1102
+ ...
1103
+
1104
+ @typing.overload
1105
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1106
+ ...
1107
+
1108
+ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1109
+ """
1110
+ Specifies the Conda environment for all steps of the flow.
1111
+
1112
+ Use `@conda_base` to set common libraries required by all
1113
+ steps and use `@conda` to specify step-specific additions.
1114
+
1115
+
1116
+ Parameters
1117
+ ----------
1118
+ packages : Dict[str, str], default {}
1119
+ Packages to use for this flow. The key is the name of the package
1120
+ and the value is the version to use.
1121
+ libraries : Dict[str, str], default {}
1122
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1123
+ python : str, optional, default None
1124
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1125
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1126
+ disabled : bool, default False
1127
+ If set to True, disables Conda.
1128
+ """
1129
+ ...
1130
+
938
1131
  @typing.overload
939
1132
  def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
940
1133
  """
@@ -1155,190 +1348,3 @@ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, expone
1155
1348
  """
1156
1349
  ...
1157
1350
 
1158
- @typing.overload
1159
- def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1160
- """
1161
- Specifies the event(s) that this flow depends on.
1162
-
1163
- ```
1164
- @trigger(event='foo')
1165
- ```
1166
- or
1167
- ```
1168
- @trigger(events=['foo', 'bar'])
1169
- ```
1170
-
1171
- Additionally, you can specify the parameter mappings
1172
- to map event payload to Metaflow parameters for the flow.
1173
- ```
1174
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1175
- ```
1176
- or
1177
- ```
1178
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1179
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1180
- ```
1181
-
1182
- 'parameters' can also be a list of strings and tuples like so:
1183
- ```
1184
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1185
- ```
1186
- This is equivalent to:
1187
- ```
1188
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1189
- ```
1190
-
1191
-
1192
- Parameters
1193
- ----------
1194
- event : Union[str, Dict[str, Any]], optional, default None
1195
- Event dependency for this flow.
1196
- events : List[Union[str, Dict[str, Any]]], default []
1197
- Events dependency for this flow.
1198
- options : Dict[str, Any], default {}
1199
- Backend-specific configuration for tuning eventing behavior.
1200
- """
1201
- ...
1202
-
1203
- @typing.overload
1204
- def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1205
- ...
1206
-
1207
- def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1208
- """
1209
- Specifies the event(s) that this flow depends on.
1210
-
1211
- ```
1212
- @trigger(event='foo')
1213
- ```
1214
- or
1215
- ```
1216
- @trigger(events=['foo', 'bar'])
1217
- ```
1218
-
1219
- Additionally, you can specify the parameter mappings
1220
- to map event payload to Metaflow parameters for the flow.
1221
- ```
1222
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1223
- ```
1224
- or
1225
- ```
1226
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1227
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1228
- ```
1229
-
1230
- 'parameters' can also be a list of strings and tuples like so:
1231
- ```
1232
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1233
- ```
1234
- This is equivalent to:
1235
- ```
1236
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1237
- ```
1238
-
1239
-
1240
- Parameters
1241
- ----------
1242
- event : Union[str, Dict[str, Any]], optional, default None
1243
- Event dependency for this flow.
1244
- events : List[Union[str, Dict[str, Any]]], default []
1245
- Events dependency for this flow.
1246
- options : Dict[str, Any], default {}
1247
- Backend-specific configuration for tuning eventing behavior.
1248
- """
1249
- ...
1250
-
1251
- @typing.overload
1252
- def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1253
- """
1254
- Specifies the Conda environment for all steps of the flow.
1255
-
1256
- Use `@conda_base` to set common libraries required by all
1257
- steps and use `@conda` to specify step-specific additions.
1258
-
1259
-
1260
- Parameters
1261
- ----------
1262
- packages : Dict[str, str], default {}
1263
- Packages to use for this flow. The key is the name of the package
1264
- and the value is the version to use.
1265
- libraries : Dict[str, str], default {}
1266
- Supported for backward compatibility. When used with packages, packages will take precedence.
1267
- python : str, optional, default None
1268
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1269
- that the version used will correspond to the version of the Python interpreter used to start the run.
1270
- disabled : bool, default False
1271
- If set to True, disables Conda.
1272
- """
1273
- ...
1274
-
1275
- @typing.overload
1276
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1277
- ...
1278
-
1279
- def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1280
- """
1281
- Specifies the Conda environment for all steps of the flow.
1282
-
1283
- Use `@conda_base` to set common libraries required by all
1284
- steps and use `@conda` to specify step-specific additions.
1285
-
1286
-
1287
- Parameters
1288
- ----------
1289
- packages : Dict[str, str], default {}
1290
- Packages to use for this flow. The key is the name of the package
1291
- and the value is the version to use.
1292
- libraries : Dict[str, str], default {}
1293
- Supported for backward compatibility. When used with packages, packages will take precedence.
1294
- python : str, optional, default None
1295
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1296
- that the version used will correspond to the version of the Python interpreter used to start the run.
1297
- disabled : bool, default False
1298
- If set to True, disables Conda.
1299
- """
1300
- ...
1301
-
1302
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1303
- """
1304
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1305
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1306
-
1307
-
1308
- Parameters
1309
- ----------
1310
- timeout : int
1311
- Time, in seconds before the task times out and fails. (Default: 3600)
1312
- poke_interval : int
1313
- Time in seconds that the job should wait in between each try. (Default: 60)
1314
- mode : str
1315
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1316
- exponential_backoff : bool
1317
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1318
- pool : str
1319
- the slot pool this task should run in,
1320
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1321
- soft_fail : bool
1322
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1323
- name : str
1324
- Name of the sensor on Airflow
1325
- description : str
1326
- Description of sensor in the Airflow UI
1327
- external_dag_id : str
1328
- The dag_id that contains the task you want to wait for.
1329
- external_task_ids : List[str]
1330
- The list of task_ids that you want to wait for.
1331
- If None (default value) the sensor waits for the DAG. (Default: None)
1332
- allowed_states : List[str]
1333
- Iterable of allowed states, (Default: ['success'])
1334
- failed_states : List[str]
1335
- Iterable of failed or dis-allowed states. (Default: None)
1336
- execution_delta : datetime.timedelta
1337
- time difference with the previous execution to look at,
1338
- the default is the same logical date as the current task or DAG. (Default: None)
1339
- check_existence: bool
1340
- Set to True to check if the external task exists or check if
1341
- the DAG to wait for exists. (Default: True)
1342
- """
1343
- ...
1344
-