metaflow-stubs 2.12.39__py2.py3-none-any.whl → 2.13__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (144) hide show
  1. metaflow-stubs/__init__.pyi +251 -250
  2. metaflow-stubs/cards.pyi +2 -2
  3. metaflow-stubs/cli.pyi +2 -2
  4. metaflow-stubs/cli_components/__init__.pyi +2 -2
  5. metaflow-stubs/cli_components/utils.pyi +2 -2
  6. metaflow-stubs/client/__init__.pyi +2 -2
  7. metaflow-stubs/client/core.pyi +6 -6
  8. metaflow-stubs/client/filecache.pyi +3 -3
  9. metaflow-stubs/events.pyi +2 -2
  10. metaflow-stubs/exception.pyi +3 -3
  11. metaflow-stubs/flowspec.pyi +7 -7
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +5 -5
  14. metaflow-stubs/info_file.pyi +2 -2
  15. metaflow-stubs/metadata_provider/__init__.pyi +2 -2
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +2 -2
  17. metaflow-stubs/metadata_provider/metadata.pyi +2 -2
  18. metaflow-stubs/metadata_provider/util.pyi +2 -2
  19. metaflow-stubs/metaflow_config.pyi +4 -2
  20. metaflow-stubs/metaflow_current.pyi +22 -22
  21. metaflow-stubs/multicore_utils.pyi +2 -2
  22. metaflow-stubs/parameters.pyi +5 -5
  23. metaflow-stubs/plugins/__init__.pyi +14 -14
  24. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  25. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
  26. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  27. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  28. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  29. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
  30. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
  31. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
  32. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  33. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  34. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  35. metaflow-stubs/plugins/argo/argo_workflows.pyi +2 -2
  36. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +3 -3
  37. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +3 -3
  38. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +31 -3
  39. metaflow-stubs/plugins/aws/__init__.pyi +2 -2
  40. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  41. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  42. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  43. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  44. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  45. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
  46. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  47. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +16 -14
  48. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  49. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
  50. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  51. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  52. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  53. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  54. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
  55. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +4 -4
  56. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +4 -4
  57. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  58. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  59. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  60. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +4 -4
  61. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  62. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  63. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  64. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  65. metaflow-stubs/plugins/cards/card_client.pyi +2 -2
  66. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  67. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  68. metaflow-stubs/plugins/cards/card_decorator.pyi +4 -2
  69. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  70. metaflow-stubs/plugins/cards/card_modules/basic.pyi +2 -2
  71. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  72. metaflow-stubs/plugins/cards/card_modules/components.pyi +3 -3
  73. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  74. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  75. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  76. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  77. metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
  78. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  79. metaflow-stubs/plugins/catch_decorator.pyi +3 -3
  80. metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
  81. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  82. metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
  83. metaflow-stubs/plugins/datatools/s3/s3.pyi +4 -4
  84. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  85. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  86. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  87. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  88. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  89. metaflow-stubs/plugins/events_decorator.pyi +2 -2
  90. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  91. metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
  92. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  93. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +4 -4
  94. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  95. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  96. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  97. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  98. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  99. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +2 -2
  100. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +3 -3
  101. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  102. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
  103. metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +2 -2
  104. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
  105. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  106. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  107. metaflow-stubs/plugins/pypi/__init__.pyi +3 -3
  108. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  109. metaflow-stubs/plugins/pypi/conda_environment.pyi +3 -3
  110. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  111. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  112. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  113. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  114. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  115. metaflow-stubs/plugins/secrets/__init__.pyi +3 -3
  116. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +4 -4
  117. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  118. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  119. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +3 -3
  120. metaflow-stubs/plugins/timeout_decorator.pyi +3 -3
  121. metaflow-stubs/pylint_wrapper.pyi +2 -2
  122. metaflow-stubs/runner/__init__.pyi +2 -2
  123. metaflow-stubs/runner/deployer.pyi +28 -28
  124. metaflow-stubs/runner/deployer_impl.pyi +2 -2
  125. metaflow-stubs/runner/metaflow_runner.pyi +3 -3
  126. metaflow-stubs/runner/nbdeploy.pyi +2 -2
  127. metaflow-stubs/runner/nbrun.pyi +2 -2
  128. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  129. metaflow-stubs/runner/utils.pyi +3 -3
  130. metaflow-stubs/system/__init__.pyi +2 -2
  131. metaflow-stubs/system/system_logger.pyi +2 -2
  132. metaflow-stubs/system/system_monitor.pyi +2 -2
  133. metaflow-stubs/tagging_util.pyi +2 -2
  134. metaflow-stubs/tuple_util.pyi +2 -2
  135. metaflow-stubs/user_configs/__init__.pyi +2 -2
  136. metaflow-stubs/user_configs/config_decorators.pyi +5 -5
  137. metaflow-stubs/user_configs/config_options.pyi +9 -4
  138. metaflow-stubs/user_configs/config_parameters.pyi +9 -9
  139. metaflow-stubs/version.pyi +2 -2
  140. {metaflow_stubs-2.12.39.dist-info → metaflow_stubs-2.13.dist-info}/METADATA +2 -2
  141. metaflow_stubs-2.13.dist-info/RECORD +144 -0
  142. metaflow_stubs-2.12.39.dist-info/RECORD +0 -144
  143. {metaflow_stubs-2.12.39.dist-info → metaflow_stubs-2.13.dist-info}/WHEEL +0 -0
  144. {metaflow_stubs-2.12.39.dist-info → metaflow_stubs-2.13.dist-info}/top_level.txt +0 -0
@@ -1,15 +1,15 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.12.39 #
4
- # Generated on 2024-12-10T16:02:32.687901 #
3
+ # MF version: 2.13 #
4
+ # Generated on 2024-12-20T07:38:30.145823 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import typing
12
11
  import datetime
12
+ import typing
13
13
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
14
14
  StepFlag = typing.NewType("StepFlag", bool)
15
15
 
@@ -31,11 +31,12 @@ from .parameters import Parameter as Parameter
31
31
  from .parameters import JSONTypeClass as JSONTypeClass
32
32
  from .parameters import JSONType as JSONType
33
33
  from .user_configs.config_parameters import Config as Config
34
+ from .user_configs.config_parameters import ConfigValue as ConfigValue
34
35
  from .user_configs.config_parameters import config_expr as config_expr
35
36
  from .user_configs.config_decorators import CustomFlowDecorator as CustomFlowDecorator
36
37
  from .user_configs.config_decorators import CustomStepDecorator as CustomStepDecorator
37
- from . import events as events
38
38
  from . import tuple_util as tuple_util
39
+ from . import events as events
39
40
  from . import runner as runner
40
41
  from . import plugins as plugins
41
42
  from .plugins.datatools.s3.s3 import S3 as S3
@@ -142,105 +143,41 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
142
143
  ...
143
144
 
144
145
  @typing.overload
145
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
146
- """
147
- Specifies secrets to be retrieved and injected as environment variables prior to
148
- the execution of a step.
149
- """
150
- ...
151
-
152
- @typing.overload
153
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
154
- ...
155
-
156
- @typing.overload
157
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
158
- ...
159
-
160
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
161
- """
162
- Specifies secrets to be retrieved and injected as environment variables prior to
163
- the execution of a step.
164
- """
165
- ...
166
-
167
- @typing.overload
168
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
169
- """
170
- Specifies the Conda environment for the step.
171
-
172
- Information in this decorator will augment any
173
- attributes set in the `@conda_base` flow-level decorator. Hence,
174
- you can use `@conda_base` to set packages required by all
175
- steps and use `@conda` to specify step-specific overrides.
176
- """
177
- ...
178
-
179
- @typing.overload
180
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
181
- ...
182
-
183
- @typing.overload
184
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
185
- ...
186
-
187
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
146
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
188
147
  """
189
- Specifies the Conda environment for the step.
148
+ Specifies a timeout for your step.
190
149
 
191
- Information in this decorator will augment any
192
- attributes set in the `@conda_base` flow-level decorator. Hence,
193
- you can use `@conda_base` to set packages required by all
194
- steps and use `@conda` to specify step-specific overrides.
195
- """
196
- ...
197
-
198
- @typing.overload
199
- def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
200
- """
201
- Specifies the resources needed when executing this step.
150
+ This decorator is useful if this step may hang indefinitely.
202
151
 
203
- Use `@resources` to specify the resource requirements
204
- independently of the specific compute layer (`@batch`, `@kubernetes`).
152
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
153
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
154
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
205
155
 
206
- You can choose the compute layer on the command line by executing e.g.
207
- ```
208
- python myflow.py run --with batch
209
- ```
210
- or
211
- ```
212
- python myflow.py run --with kubernetes
213
- ```
214
- which executes the flow on the desired system using the
215
- requirements specified in `@resources`.
156
+ Note that all the values specified in parameters are added together so if you specify
157
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
216
158
  """
217
159
  ...
218
160
 
219
161
  @typing.overload
220
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
162
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
221
163
  ...
222
164
 
223
165
  @typing.overload
224
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
166
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
225
167
  ...
226
168
 
227
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
169
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
228
170
  """
229
- Specifies the resources needed when executing this step.
171
+ Specifies a timeout for your step.
230
172
 
231
- Use `@resources` to specify the resource requirements
232
- independently of the specific compute layer (`@batch`, `@kubernetes`).
173
+ This decorator is useful if this step may hang indefinitely.
233
174
 
234
- You can choose the compute layer on the command line by executing e.g.
235
- ```
236
- python myflow.py run --with batch
237
- ```
238
- or
239
- ```
240
- python myflow.py run --with kubernetes
241
- ```
242
- which executes the flow on the desired system using the
243
- requirements specified in `@resources`.
175
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
176
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
177
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
178
+
179
+ Note that all the values specified in parameters are added together so if you specify
180
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
244
181
  """
245
182
  ...
246
183
 
@@ -266,27 +203,33 @@ def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
266
203
  ...
267
204
 
268
205
  @typing.overload
269
- def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
206
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
270
207
  """
271
- Creates a human-readable report, a Metaflow Card, after this step completes.
208
+ Specifies the PyPI packages for the step.
272
209
 
273
- Note that you may add multiple `@card` decorators in a step with different parameters.
210
+ Information in this decorator will augment any
211
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
212
+ you can use `@pypi_base` to set packages required by all
213
+ steps and use `@pypi` to specify step-specific overrides.
274
214
  """
275
215
  ...
276
216
 
277
217
  @typing.overload
278
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
218
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
279
219
  ...
280
220
 
281
221
  @typing.overload
282
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
222
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
283
223
  ...
284
224
 
285
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
225
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
286
226
  """
287
- Creates a human-readable report, a Metaflow Card, after this step completes.
227
+ Specifies the PyPI packages for the step.
288
228
 
289
- Note that you may add multiple `@card` decorators in a step with different parameters.
229
+ Information in this decorator will augment any
230
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
231
+ you can use `@pypi_base` to set packages required by all
232
+ steps and use `@pypi` to specify step-specific overrides.
290
233
  """
291
234
  ...
292
235
 
@@ -311,12 +254,6 @@ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], Non
311
254
  """
312
255
  ...
313
256
 
314
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable') -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
315
- """
316
- Specifies that this step should execute on Kubernetes.
317
- """
318
- ...
319
-
320
257
  @typing.overload
321
258
  def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
322
259
  """
@@ -376,103 +313,240 @@ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
376
313
  ...
377
314
 
378
315
  @typing.overload
379
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
316
+ def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
380
317
  """
381
- Specifies that the step will success under all circumstances.
318
+ Specifies the resources needed when executing this step.
382
319
 
383
- The decorator will create an optional artifact, specified by `var`, which
384
- contains the exception raised. You can use it to detect the presence
385
- of errors, indicating that all happy-path artifacts produced by the step
386
- are missing.
320
+ Use `@resources` to specify the resource requirements
321
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
322
+
323
+ You can choose the compute layer on the command line by executing e.g.
324
+ ```
325
+ python myflow.py run --with batch
326
+ ```
327
+ or
328
+ ```
329
+ python myflow.py run --with kubernetes
330
+ ```
331
+ which executes the flow on the desired system using the
332
+ requirements specified in `@resources`.
387
333
  """
388
334
  ...
389
335
 
390
336
  @typing.overload
391
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
337
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
392
338
  ...
393
339
 
394
340
  @typing.overload
395
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
341
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
396
342
  ...
397
343
 
398
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
344
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
399
345
  """
400
- Specifies that the step will success under all circumstances.
346
+ Specifies the resources needed when executing this step.
401
347
 
402
- The decorator will create an optional artifact, specified by `var`, which
403
- contains the exception raised. You can use it to detect the presence
404
- of errors, indicating that all happy-path artifacts produced by the step
405
- are missing.
348
+ Use `@resources` to specify the resource requirements
349
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
350
+
351
+ You can choose the compute layer on the command line by executing e.g.
352
+ ```
353
+ python myflow.py run --with batch
354
+ ```
355
+ or
356
+ ```
357
+ python myflow.py run --with kubernetes
358
+ ```
359
+ which executes the flow on the desired system using the
360
+ requirements specified in `@resources`.
406
361
  """
407
362
  ...
408
363
 
409
364
  @typing.overload
410
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
365
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
411
366
  """
412
- Specifies the PyPI packages for the step.
367
+ Specifies secrets to be retrieved and injected as environment variables prior to
368
+ the execution of a step.
369
+ """
370
+ ...
371
+
372
+ @typing.overload
373
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
374
+ ...
375
+
376
+ @typing.overload
377
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
378
+ ...
379
+
380
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
381
+ """
382
+ Specifies secrets to be retrieved and injected as environment variables prior to
383
+ the execution of a step.
384
+ """
385
+ ...
386
+
387
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable') -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
388
+ """
389
+ Specifies that this step should execute on Kubernetes.
390
+ """
391
+ ...
392
+
393
+ @typing.overload
394
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
395
+ """
396
+ Specifies the Conda environment for the step.
413
397
 
414
398
  Information in this decorator will augment any
415
- attributes set in the `@pyi_base` flow-level decorator. Hence,
416
- you can use `@pypi_base` to set packages required by all
417
- steps and use `@pypi` to specify step-specific overrides.
399
+ attributes set in the `@conda_base` flow-level decorator. Hence,
400
+ you can use `@conda_base` to set packages required by all
401
+ steps and use `@conda` to specify step-specific overrides.
418
402
  """
419
403
  ...
420
404
 
421
405
  @typing.overload
422
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
406
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
423
407
  ...
424
408
 
425
409
  @typing.overload
426
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
410
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
427
411
  ...
428
412
 
429
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
413
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
430
414
  """
431
- Specifies the PyPI packages for the step.
415
+ Specifies the Conda environment for the step.
432
416
 
433
417
  Information in this decorator will augment any
434
- attributes set in the `@pyi_base` flow-level decorator. Hence,
435
- you can use `@pypi_base` to set packages required by all
436
- steps and use `@pypi` to specify step-specific overrides.
418
+ attributes set in the `@conda_base` flow-level decorator. Hence,
419
+ you can use `@conda_base` to set packages required by all
420
+ steps and use `@conda` to specify step-specific overrides.
437
421
  """
438
422
  ...
439
423
 
440
424
  @typing.overload
441
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
425
+ def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
442
426
  """
443
- Specifies a timeout for your step.
427
+ Creates a human-readable report, a Metaflow Card, after this step completes.
444
428
 
445
- This decorator is useful if this step may hang indefinitely.
429
+ Note that you may add multiple `@card` decorators in a step with different parameters.
430
+ """
431
+ ...
432
+
433
+ @typing.overload
434
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
435
+ ...
436
+
437
+ @typing.overload
438
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
439
+ ...
440
+
441
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
442
+ """
443
+ Creates a human-readable report, a Metaflow Card, after this step completes.
446
444
 
447
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
448
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
449
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
445
+ Note that you may add multiple `@card` decorators in a step with different parameters.
446
+ """
447
+ ...
448
+
449
+ @typing.overload
450
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
451
+ """
452
+ Specifies that the step will success under all circumstances.
450
453
 
451
- Note that all the values specified in parameters are added together so if you specify
452
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
454
+ The decorator will create an optional artifact, specified by `var`, which
455
+ contains the exception raised. You can use it to detect the presence
456
+ of errors, indicating that all happy-path artifacts produced by the step
457
+ are missing.
453
458
  """
454
459
  ...
455
460
 
456
461
  @typing.overload
457
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
462
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
458
463
  ...
459
464
 
460
465
  @typing.overload
461
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
466
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
462
467
  ...
463
468
 
464
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
469
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
465
470
  """
466
- Specifies a timeout for your step.
471
+ Specifies that the step will success under all circumstances.
467
472
 
468
- This decorator is useful if this step may hang indefinitely.
473
+ The decorator will create an optional artifact, specified by `var`, which
474
+ contains the exception raised. You can use it to detect the presence
475
+ of errors, indicating that all happy-path artifacts produced by the step
476
+ are missing.
477
+ """
478
+ ...
479
+
480
+ @typing.overload
481
+ def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
482
+ """
483
+ Specifies the event(s) that this flow depends on.
469
484
 
470
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
471
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
472
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
485
+ ```
486
+ @trigger(event='foo')
487
+ ```
488
+ or
489
+ ```
490
+ @trigger(events=['foo', 'bar'])
491
+ ```
473
492
 
474
- Note that all the values specified in parameters are added together so if you specify
475
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
493
+ Additionally, you can specify the parameter mappings
494
+ to map event payload to Metaflow parameters for the flow.
495
+ ```
496
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
497
+ ```
498
+ or
499
+ ```
500
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
501
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
502
+ ```
503
+
504
+ 'parameters' can also be a list of strings and tuples like so:
505
+ ```
506
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
507
+ ```
508
+ This is equivalent to:
509
+ ```
510
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
511
+ ```
512
+ """
513
+ ...
514
+
515
+ @typing.overload
516
+ def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
517
+ ...
518
+
519
+ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
520
+ """
521
+ Specifies the event(s) that this flow depends on.
522
+
523
+ ```
524
+ @trigger(event='foo')
525
+ ```
526
+ or
527
+ ```
528
+ @trigger(events=['foo', 'bar'])
529
+ ```
530
+
531
+ Additionally, you can specify the parameter mappings
532
+ to map event payload to Metaflow parameters for the flow.
533
+ ```
534
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
535
+ ```
536
+ or
537
+ ```
538
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
539
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
540
+ ```
541
+
542
+ 'parameters' can also be a list of strings and tuples like so:
543
+ ```
544
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
545
+ ```
546
+ This is equivalent to:
547
+ ```
548
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
549
+ ```
476
550
  """
477
551
  ...
478
552
 
@@ -557,52 +631,22 @@ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *
557
631
  """
558
632
  ...
559
633
 
560
- def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
561
- """
562
- Specifies what flows belong to the same project.
563
-
564
- A project-specific namespace is created for all flows that
565
- use the same `@project(name)`.
566
- """
567
- ...
568
-
569
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
570
- """
571
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
572
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
573
- """
574
- ...
575
-
576
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
577
- """
578
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
579
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
580
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
581
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
582
- starts only after all sensors finish.
583
- """
584
- ...
585
-
586
634
  @typing.overload
587
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
635
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
588
636
  """
589
- Specifies the PyPI packages for all steps of the flow.
590
-
591
- Use `@pypi_base` to set common packages required by all
592
- steps and use `@pypi` to specify step-specific overrides.
637
+ Specifies the times when the flow should be run when running on a
638
+ production scheduler.
593
639
  """
594
640
  ...
595
641
 
596
642
  @typing.overload
597
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
643
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
598
644
  ...
599
645
 
600
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
646
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
601
647
  """
602
- Specifies the PyPI packages for all steps of the flow.
603
-
604
- Use `@pypi_base` to set common packages required by all
605
- steps and use `@pypi` to specify step-specific overrides.
648
+ Specifies the times when the flow should be run when running on a
649
+ production scheduler.
606
650
  """
607
651
  ...
608
652
 
@@ -630,94 +674,51 @@ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packa
630
674
  ...
631
675
 
632
676
  @typing.overload
633
- def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
677
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
634
678
  """
635
- Specifies the event(s) that this flow depends on.
636
-
637
- ```
638
- @trigger(event='foo')
639
- ```
640
- or
641
- ```
642
- @trigger(events=['foo', 'bar'])
643
- ```
644
-
645
- Additionally, you can specify the parameter mappings
646
- to map event payload to Metaflow parameters for the flow.
647
- ```
648
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
649
- ```
650
- or
651
- ```
652
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
653
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
654
- ```
679
+ Specifies the PyPI packages for all steps of the flow.
655
680
 
656
- 'parameters' can also be a list of strings and tuples like so:
657
- ```
658
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
659
- ```
660
- This is equivalent to:
661
- ```
662
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
663
- ```
681
+ Use `@pypi_base` to set common packages required by all
682
+ steps and use `@pypi` to specify step-specific overrides.
664
683
  """
665
684
  ...
666
685
 
667
686
  @typing.overload
668
- def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
687
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
669
688
  ...
670
689
 
671
- def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
690
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
672
691
  """
673
- Specifies the event(s) that this flow depends on.
674
-
675
- ```
676
- @trigger(event='foo')
677
- ```
678
- or
679
- ```
680
- @trigger(events=['foo', 'bar'])
681
- ```
682
-
683
- Additionally, you can specify the parameter mappings
684
- to map event payload to Metaflow parameters for the flow.
685
- ```
686
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
687
- ```
688
- or
689
- ```
690
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
691
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
692
- ```
692
+ Specifies the PyPI packages for all steps of the flow.
693
693
 
694
- 'parameters' can also be a list of strings and tuples like so:
695
- ```
696
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
697
- ```
698
- This is equivalent to:
699
- ```
700
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
701
- ```
694
+ Use `@pypi_base` to set common packages required by all
695
+ steps and use `@pypi` to specify step-specific overrides.
702
696
  """
703
697
  ...
704
698
 
705
- @typing.overload
706
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
699
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
707
700
  """
708
- Specifies the times when the flow should be run when running on a
709
- production scheduler.
701
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
702
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
703
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
704
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
705
+ starts only after all sensors finish.
710
706
  """
711
707
  ...
712
708
 
713
- @typing.overload
714
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
709
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
710
+ """
711
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
712
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
713
+ """
715
714
  ...
716
715
 
717
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
716
+ def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
718
717
  """
719
- Specifies the times when the flow should be run when running on a
720
- production scheduler.
718
+ Specifies what flows belong to the same project.
719
+
720
+ A project-specific namespace is created for all flows that
721
+ use the same `@project(name)`.
721
722
  """
722
723
  ...
723
724