metaflow-stubs 2.12.31__py2.py3-none-any.whl → 2.12.32__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (158) hide show
  1. metaflow-stubs/__init__.pyi +230 -230
  2. metaflow-stubs/cards.pyi +2 -2
  3. metaflow-stubs/cli.pyi +2 -2
  4. metaflow-stubs/client/__init__.pyi +2 -2
  5. metaflow-stubs/client/core.pyi +5 -5
  6. metaflow-stubs/client/filecache.pyi +3 -3
  7. metaflow-stubs/clone_util.pyi +2 -2
  8. metaflow-stubs/events.pyi +3 -3
  9. metaflow-stubs/exception.pyi +2 -2
  10. metaflow-stubs/flowspec.pyi +5 -5
  11. metaflow-stubs/generated_for.txt +1 -1
  12. metaflow-stubs/includefile.pyi +4 -4
  13. metaflow-stubs/info_file.pyi +2 -2
  14. metaflow-stubs/metadata_provider/__init__.pyi +2 -2
  15. metaflow-stubs/metadata_provider/heartbeat.pyi +2 -2
  16. metaflow-stubs/metadata_provider/metadata.pyi +2 -2
  17. metaflow-stubs/metadata_provider/util.pyi +2 -2
  18. metaflow-stubs/metaflow_config.pyi +2 -2
  19. metaflow-stubs/metaflow_current.pyi +27 -27
  20. metaflow-stubs/mflog/__init__.pyi +2 -2
  21. metaflow-stubs/mflog/mflog.pyi +2 -2
  22. metaflow-stubs/multicore_utils.pyi +2 -2
  23. metaflow-stubs/parameters.pyi +4 -4
  24. metaflow-stubs/plugins/__init__.pyi +13 -13
  25. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  26. metaflow-stubs/plugins/airflow/airflow.pyi +2 -2
  27. metaflow-stubs/plugins/airflow/airflow_cli.pyi +2 -2
  28. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
  29. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  30. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  31. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  32. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
  33. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
  34. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
  35. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  36. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  37. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  38. metaflow-stubs/plugins/argo/argo_workflows.pyi +3 -3
  39. metaflow-stubs/plugins/argo/argo_workflows_cli.pyi +2 -2
  40. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +3 -3
  41. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +4 -4
  42. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +4 -4
  43. metaflow-stubs/plugins/aws/__init__.pyi +3 -3
  44. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  45. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  46. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  47. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  48. metaflow-stubs/plugins/aws/batch/batch_cli.pyi +2 -2
  49. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  50. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
  51. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  52. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +4 -4
  53. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  54. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
  55. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  56. metaflow-stubs/plugins/aws/step_functions/production_token.pyi +2 -2
  57. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  58. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  59. metaflow-stubs/plugins/aws/step_functions/step_functions_cli.pyi +2 -2
  60. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  61. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
  62. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +4 -4
  63. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +3 -3
  64. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  65. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  66. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  67. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +4 -4
  68. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  69. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  70. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  71. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  72. metaflow-stubs/plugins/cards/card_cli.pyi +2 -2
  73. metaflow-stubs/plugins/cards/card_client.pyi +3 -3
  74. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  75. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  76. metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
  77. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  78. metaflow-stubs/plugins/cards/card_modules/basic.pyi +3 -3
  79. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  80. metaflow-stubs/plugins/cards/card_modules/chevron/__init__.pyi +2 -2
  81. metaflow-stubs/plugins/cards/card_modules/chevron/main.pyi +2 -2
  82. metaflow-stubs/plugins/cards/card_modules/chevron/metadata.pyi +2 -2
  83. metaflow-stubs/plugins/cards/card_modules/chevron/renderer.pyi +2 -2
  84. metaflow-stubs/plugins/cards/card_modules/chevron/tokenizer.pyi +2 -2
  85. metaflow-stubs/plugins/cards/card_modules/components.pyi +4 -4
  86. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  87. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  88. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  89. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  90. metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
  91. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  92. metaflow-stubs/plugins/catch_decorator.pyi +3 -3
  93. metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
  94. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  95. metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
  96. metaflow-stubs/plugins/datatools/s3/s3.pyi +4 -4
  97. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  98. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  99. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  100. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  101. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  102. metaflow-stubs/plugins/events_decorator.pyi +2 -2
  103. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  104. metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
  105. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  106. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +4 -4
  107. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  108. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  109. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  110. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  111. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  112. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +2 -2
  113. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +3 -3
  114. metaflow-stubs/plugins/kubernetes/kubernetes_cli.pyi +2 -2
  115. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  116. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
  117. metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +2 -2
  118. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
  119. metaflow-stubs/plugins/logs_cli.pyi +2 -2
  120. metaflow-stubs/plugins/package_cli.pyi +2 -2
  121. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  122. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  123. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  124. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  125. metaflow-stubs/plugins/pypi/conda_environment.pyi +3 -3
  126. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  127. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  128. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  129. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  130. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  131. metaflow-stubs/plugins/secrets/__init__.pyi +3 -3
  132. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +4 -4
  133. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  134. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  135. metaflow-stubs/plugins/tag_cli.pyi +2 -2
  136. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +2 -2
  137. metaflow-stubs/plugins/timeout_decorator.pyi +3 -3
  138. metaflow-stubs/procpoll.pyi +2 -2
  139. metaflow-stubs/pylint_wrapper.pyi +2 -2
  140. metaflow-stubs/runner/__init__.pyi +2 -2
  141. metaflow-stubs/runner/deployer.pyi +30 -30
  142. metaflow-stubs/runner/deployer_impl.pyi +3 -3
  143. metaflow-stubs/runner/metaflow_runner.pyi +3 -3
  144. metaflow-stubs/runner/nbdeploy.pyi +2 -2
  145. metaflow-stubs/runner/nbrun.pyi +2 -2
  146. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  147. metaflow-stubs/runner/utils.pyi +3 -3
  148. metaflow-stubs/system/__init__.pyi +2 -2
  149. metaflow-stubs/system/system_logger.pyi +3 -3
  150. metaflow-stubs/system/system_monitor.pyi +2 -2
  151. metaflow-stubs/tagging_util.pyi +2 -2
  152. metaflow-stubs/tuple_util.pyi +2 -2
  153. metaflow-stubs/version.pyi +2 -2
  154. {metaflow_stubs-2.12.31.dist-info → metaflow_stubs-2.12.32.dist-info}/METADATA +2 -2
  155. metaflow_stubs-2.12.32.dist-info/RECORD +158 -0
  156. metaflow_stubs-2.12.31.dist-info/RECORD +0 -158
  157. {metaflow_stubs-2.12.31.dist-info → metaflow_stubs-2.12.32.dist-info}/WHEEL +0 -0
  158. {metaflow_stubs-2.12.31.dist-info → metaflow_stubs-2.12.32.dist-info}/top_level.txt +0 -0
@@ -1,15 +1,15 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.12.31 #
4
- # Generated on 2024-11-22T20:12:01.558412 #
3
+ # MF version: 2.12.32 #
4
+ # Generated on 2024-11-26T19:51:56.545525 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import datetime
12
11
  import typing
12
+ import datetime
13
13
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
14
14
  StepFlag = typing.NewType("StepFlag", bool)
15
15
 
@@ -29,8 +29,8 @@ from .flowspec import FlowSpec as FlowSpec
29
29
  from .parameters import Parameter as Parameter
30
30
  from .parameters import JSONTypeClass as JSONTypeClass
31
31
  from .parameters import JSONType as JSONType
32
- from . import events as events
33
32
  from . import tuple_util as tuple_util
33
+ from . import events as events
34
34
  from . import runner as runner
35
35
  from . import plugins as plugins
36
36
  from .plugins.datatools.s3.s3 import S3 as S3
@@ -138,54 +138,21 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
138
138
  ...
139
139
 
140
140
  @typing.overload
141
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
142
- """
143
- Specifies the Conda environment for the step.
144
-
145
- Information in this decorator will augment any
146
- attributes set in the `@conda_base` flow-level decorator. Hence,
147
- you can use `@conda_base` to set packages required by all
148
- steps and use `@conda` to specify step-specific overrides.
149
- """
150
- ...
151
-
152
- @typing.overload
153
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
154
- ...
155
-
156
- @typing.overload
157
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
158
- ...
159
-
160
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
161
- """
162
- Specifies the Conda environment for the step.
163
-
164
- Information in this decorator will augment any
165
- attributes set in the `@conda_base` flow-level decorator. Hence,
166
- you can use `@conda_base` to set packages required by all
167
- steps and use `@conda` to specify step-specific overrides.
168
- """
169
- ...
170
-
171
- @typing.overload
172
- def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
141
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
173
142
  """
174
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
143
+ Decorator prototype for all step decorators. This function gets specialized
144
+ and imported for all decorators types by _import_plugin_decorators().
175
145
  """
176
146
  ...
177
147
 
178
148
  @typing.overload
179
- def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
180
- ...
181
-
182
- @typing.overload
183
- def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
149
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
184
150
  ...
185
151
 
186
- def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
152
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
187
153
  """
188
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
154
+ Decorator prototype for all step decorators. This function gets specialized
155
+ and imported for all decorators types by _import_plugin_decorators().
189
156
  """
190
157
  ...
191
158
 
@@ -196,33 +163,23 @@ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: ty
196
163
  ...
197
164
 
198
165
  @typing.overload
199
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
166
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
200
167
  """
201
- Specifies that the step will success under all circumstances.
202
-
203
- The decorator will create an optional artifact, specified by `var`, which
204
- contains the exception raised. You can use it to detect the presence
205
- of errors, indicating that all happy-path artifacts produced by the step
206
- are missing.
168
+ Specifies environment variables to be set prior to the execution of a step.
207
169
  """
208
170
  ...
209
171
 
210
172
  @typing.overload
211
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
173
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
212
174
  ...
213
175
 
214
176
  @typing.overload
215
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
177
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
216
178
  ...
217
179
 
218
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
180
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
219
181
  """
220
- Specifies that the step will success under all circumstances.
221
-
222
- The decorator will create an optional artifact, specified by `var`, which
223
- contains the exception raised. You can use it to detect the presence
224
- of errors, indicating that all happy-path artifacts produced by the step
225
- are missing.
182
+ Specifies environment variables to be set prior to the execution of a step.
226
183
  """
227
184
  ...
228
185
 
@@ -266,178 +223,221 @@ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
266
223
  ...
267
224
 
268
225
  @typing.overload
269
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
226
+ def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
270
227
  """
271
- Specifies the number of times the task corresponding
272
- to a step needs to be retried.
228
+ Specifies the resources needed when executing this step.
273
229
 
274
- This decorator is useful for handling transient errors, such as networking issues.
275
- If your task contains operations that can't be retried safely, e.g. database updates,
276
- it is advisable to annotate it with `@retry(times=0)`.
230
+ Use `@resources` to specify the resource requirements
231
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
277
232
 
278
- This can be used in conjunction with the `@catch` decorator. The `@catch`
279
- decorator will execute a no-op task after all retries have been exhausted,
280
- ensuring that the flow execution can continue.
233
+ You can choose the compute layer on the command line by executing e.g.
234
+ ```
235
+ python myflow.py run --with batch
236
+ ```
237
+ or
238
+ ```
239
+ python myflow.py run --with kubernetes
240
+ ```
241
+ which executes the flow on the desired system using the
242
+ requirements specified in `@resources`.
281
243
  """
282
244
  ...
283
245
 
284
246
  @typing.overload
285
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
247
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
286
248
  ...
287
249
 
288
250
  @typing.overload
289
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
251
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
290
252
  ...
291
253
 
292
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
254
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
293
255
  """
294
- Specifies the number of times the task corresponding
295
- to a step needs to be retried.
256
+ Specifies the resources needed when executing this step.
296
257
 
297
- This decorator is useful for handling transient errors, such as networking issues.
298
- If your task contains operations that can't be retried safely, e.g. database updates,
299
- it is advisable to annotate it with `@retry(times=0)`.
258
+ Use `@resources` to specify the resource requirements
259
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
300
260
 
301
- This can be used in conjunction with the `@catch` decorator. The `@catch`
302
- decorator will execute a no-op task after all retries have been exhausted,
303
- ensuring that the flow execution can continue.
261
+ You can choose the compute layer on the command line by executing e.g.
262
+ ```
263
+ python myflow.py run --with batch
264
+ ```
265
+ or
266
+ ```
267
+ python myflow.py run --with kubernetes
268
+ ```
269
+ which executes the flow on the desired system using the
270
+ requirements specified in `@resources`.
304
271
  """
305
272
  ...
306
273
 
307
274
  @typing.overload
308
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
275
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
309
276
  """
310
- Specifies environment variables to be set prior to the execution of a step.
277
+ Specifies secrets to be retrieved and injected as environment variables prior to
278
+ the execution of a step.
311
279
  """
312
280
  ...
313
281
 
314
282
  @typing.overload
315
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
283
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
316
284
  ...
317
285
 
318
286
  @typing.overload
319
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
287
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
320
288
  ...
321
289
 
322
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
290
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
323
291
  """
324
- Specifies environment variables to be set prior to the execution of a step.
292
+ Specifies secrets to be retrieved and injected as environment variables prior to
293
+ the execution of a step.
325
294
  """
326
295
  ...
327
296
 
328
297
  @typing.overload
329
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
298
+ def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
330
299
  """
331
- Specifies secrets to be retrieved and injected as environment variables prior to
332
- the execution of a step.
300
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
333
301
  """
334
302
  ...
335
303
 
336
304
  @typing.overload
337
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
305
+ def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
338
306
  ...
339
307
 
340
308
  @typing.overload
341
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
309
+ def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
342
310
  ...
343
311
 
344
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
312
+ def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
345
313
  """
346
- Specifies secrets to be retrieved and injected as environment variables prior to
347
- the execution of a step.
314
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
348
315
  """
349
316
  ...
350
317
 
351
318
  @typing.overload
352
- def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
319
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
353
320
  """
354
- Creates a human-readable report, a Metaflow Card, after this step completes.
321
+ Specifies that the step will success under all circumstances.
355
322
 
356
- Note that you may add multiple `@card` decorators in a step with different parameters.
323
+ The decorator will create an optional artifact, specified by `var`, which
324
+ contains the exception raised. You can use it to detect the presence
325
+ of errors, indicating that all happy-path artifacts produced by the step
326
+ are missing.
357
327
  """
358
328
  ...
359
329
 
360
330
  @typing.overload
361
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
331
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
362
332
  ...
363
333
 
364
334
  @typing.overload
365
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
335
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
366
336
  ...
367
337
 
368
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
338
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
369
339
  """
370
- Creates a human-readable report, a Metaflow Card, after this step completes.
340
+ Specifies that the step will success under all circumstances.
371
341
 
372
- Note that you may add multiple `@card` decorators in a step with different parameters.
342
+ The decorator will create an optional artifact, specified by `var`, which
343
+ contains the exception raised. You can use it to detect the presence
344
+ of errors, indicating that all happy-path artifacts produced by the step
345
+ are missing.
373
346
  """
374
347
  ...
375
348
 
376
349
  @typing.overload
377
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
350
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
378
351
  """
379
- Decorator prototype for all step decorators. This function gets specialized
380
- and imported for all decorators types by _import_plugin_decorators().
352
+ Specifies the Conda environment for the step.
353
+
354
+ Information in this decorator will augment any
355
+ attributes set in the `@conda_base` flow-level decorator. Hence,
356
+ you can use `@conda_base` to set packages required by all
357
+ steps and use `@conda` to specify step-specific overrides.
381
358
  """
382
359
  ...
383
360
 
384
361
  @typing.overload
385
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
362
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
386
363
  ...
387
364
 
388
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
365
+ @typing.overload
366
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
367
+ ...
368
+
369
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
389
370
  """
390
- Decorator prototype for all step decorators. This function gets specialized
391
- and imported for all decorators types by _import_plugin_decorators().
371
+ Specifies the Conda environment for the step.
372
+
373
+ Information in this decorator will augment any
374
+ attributes set in the `@conda_base` flow-level decorator. Hence,
375
+ you can use `@conda_base` to set packages required by all
376
+ steps and use `@conda` to specify step-specific overrides.
392
377
  """
393
378
  ...
394
379
 
395
380
  @typing.overload
396
- def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
381
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
397
382
  """
398
- Specifies the resources needed when executing this step.
383
+ Specifies the number of times the task corresponding
384
+ to a step needs to be retried.
399
385
 
400
- Use `@resources` to specify the resource requirements
401
- independently of the specific compute layer (`@batch`, `@kubernetes`).
386
+ This decorator is useful for handling transient errors, such as networking issues.
387
+ If your task contains operations that can't be retried safely, e.g. database updates,
388
+ it is advisable to annotate it with `@retry(times=0)`.
402
389
 
403
- You can choose the compute layer on the command line by executing e.g.
404
- ```
405
- python myflow.py run --with batch
406
- ```
407
- or
408
- ```
409
- python myflow.py run --with kubernetes
410
- ```
411
- which executes the flow on the desired system using the
412
- requirements specified in `@resources`.
390
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
391
+ decorator will execute a no-op task after all retries have been exhausted,
392
+ ensuring that the flow execution can continue.
413
393
  """
414
394
  ...
415
395
 
416
396
  @typing.overload
417
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
397
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
418
398
  ...
419
399
 
420
400
  @typing.overload
421
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
401
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
422
402
  ...
423
403
 
424
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
404
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
425
405
  """
426
- Specifies the resources needed when executing this step.
406
+ Specifies the number of times the task corresponding
407
+ to a step needs to be retried.
427
408
 
428
- Use `@resources` to specify the resource requirements
429
- independently of the specific compute layer (`@batch`, `@kubernetes`).
409
+ This decorator is useful for handling transient errors, such as networking issues.
410
+ If your task contains operations that can't be retried safely, e.g. database updates,
411
+ it is advisable to annotate it with `@retry(times=0)`.
430
412
 
431
- You can choose the compute layer on the command line by executing e.g.
432
- ```
433
- python myflow.py run --with batch
434
- ```
435
- or
436
- ```
437
- python myflow.py run --with kubernetes
438
- ```
439
- which executes the flow on the desired system using the
440
- requirements specified in `@resources`.
413
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
414
+ decorator will execute a no-op task after all retries have been exhausted,
415
+ ensuring that the flow execution can continue.
416
+ """
417
+ ...
418
+
419
+ @typing.overload
420
+ def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
421
+ """
422
+ Creates a human-readable report, a Metaflow Card, after this step completes.
423
+
424
+ Note that you may add multiple `@card` decorators in a step with different parameters.
425
+ """
426
+ ...
427
+
428
+ @typing.overload
429
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
430
+ ...
431
+
432
+ @typing.overload
433
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
434
+ ...
435
+
436
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
437
+ """
438
+ Creates a human-readable report, a Metaflow Card, after this step completes.
439
+
440
+ Note that you may add multiple `@card` decorators in a step with different parameters.
441
441
  """
442
442
  ...
443
443
 
@@ -472,6 +472,13 @@ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
472
472
  """
473
473
  ...
474
474
 
475
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
476
+ """
477
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
478
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
479
+ """
480
+ ...
481
+
475
482
  @typing.overload
476
483
  def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
477
484
  """
@@ -495,13 +502,85 @@ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packa
495
502
  """
496
503
  ...
497
504
 
498
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
505
+ def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
499
506
  """
500
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
501
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
502
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
503
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
504
- starts only after all sensors finish.
507
+ Specifies what flows belong to the same project.
508
+
509
+ A project-specific namespace is created for all flows that
510
+ use the same `@project(name)`.
511
+ """
512
+ ...
513
+
514
+ @typing.overload
515
+ def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
516
+ """
517
+ Specifies the event(s) that this flow depends on.
518
+
519
+ ```
520
+ @trigger(event='foo')
521
+ ```
522
+ or
523
+ ```
524
+ @trigger(events=['foo', 'bar'])
525
+ ```
526
+
527
+ Additionally, you can specify the parameter mappings
528
+ to map event payload to Metaflow parameters for the flow.
529
+ ```
530
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
531
+ ```
532
+ or
533
+ ```
534
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
535
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
536
+ ```
537
+
538
+ 'parameters' can also be a list of strings and tuples like so:
539
+ ```
540
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
541
+ ```
542
+ This is equivalent to:
543
+ ```
544
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
545
+ ```
546
+ """
547
+ ...
548
+
549
+ @typing.overload
550
+ def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
551
+ ...
552
+
553
+ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
554
+ """
555
+ Specifies the event(s) that this flow depends on.
556
+
557
+ ```
558
+ @trigger(event='foo')
559
+ ```
560
+ or
561
+ ```
562
+ @trigger(events=['foo', 'bar'])
563
+ ```
564
+
565
+ Additionally, you can specify the parameter mappings
566
+ to map event payload to Metaflow parameters for the flow.
567
+ ```
568
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
569
+ ```
570
+ or
571
+ ```
572
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
573
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
574
+ ```
575
+
576
+ 'parameters' can also be a list of strings and tuples like so:
577
+ ```
578
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
579
+ ```
580
+ This is equivalent to:
581
+ ```
582
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
583
+ ```
505
584
  """
506
585
  ...
507
586
 
@@ -609,22 +688,6 @@ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *
609
688
  """
610
689
  ...
611
690
 
612
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
613
- """
614
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
615
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
616
- """
617
- ...
618
-
619
- def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
620
- """
621
- Specifies what flows belong to the same project.
622
-
623
- A project-specific namespace is created for all flows that
624
- use the same `@project(name)`.
625
- """
626
- ...
627
-
628
691
  @typing.overload
629
692
  def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
630
693
  """
@@ -644,76 +707,13 @@ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly:
644
707
  """
645
708
  ...
646
709
 
647
- @typing.overload
648
- def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
649
- """
650
- Specifies the event(s) that this flow depends on.
651
-
652
- ```
653
- @trigger(event='foo')
654
- ```
655
- or
656
- ```
657
- @trigger(events=['foo', 'bar'])
658
- ```
659
-
660
- Additionally, you can specify the parameter mappings
661
- to map event payload to Metaflow parameters for the flow.
662
- ```
663
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
664
- ```
665
- or
666
- ```
667
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
668
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
669
- ```
670
-
671
- 'parameters' can also be a list of strings and tuples like so:
672
- ```
673
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
674
- ```
675
- This is equivalent to:
676
- ```
677
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
678
- ```
679
- """
680
- ...
681
-
682
- @typing.overload
683
- def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
684
- ...
685
-
686
- def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
710
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
687
711
  """
688
- Specifies the event(s) that this flow depends on.
689
-
690
- ```
691
- @trigger(event='foo')
692
- ```
693
- or
694
- ```
695
- @trigger(events=['foo', 'bar'])
696
- ```
697
-
698
- Additionally, you can specify the parameter mappings
699
- to map event payload to Metaflow parameters for the flow.
700
- ```
701
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
702
- ```
703
- or
704
- ```
705
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
706
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
707
- ```
708
-
709
- 'parameters' can also be a list of strings and tuples like so:
710
- ```
711
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
712
- ```
713
- This is equivalent to:
714
- ```
715
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
716
- ```
712
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
713
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
714
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
715
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
716
+ starts only after all sensors finish.
717
717
  """
718
718
  ...
719
719