metaflow-stubs 2.13.3__py2.py3-none-any.whl → 2.13.5__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (145) hide show
  1. metaflow-stubs/__init__.pyi +232 -232
  2. metaflow-stubs/cards.pyi +2 -2
  3. metaflow-stubs/cli.pyi +2 -2
  4. metaflow-stubs/cli_components/__init__.pyi +2 -2
  5. metaflow-stubs/cli_components/utils.pyi +2 -2
  6. metaflow-stubs/client/__init__.pyi +2 -2
  7. metaflow-stubs/client/core.pyi +4 -4
  8. metaflow-stubs/client/filecache.pyi +3 -3
  9. metaflow-stubs/events.pyi +2 -2
  10. metaflow-stubs/exception.pyi +2 -2
  11. metaflow-stubs/flowspec.pyi +3 -3
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +3 -3
  14. metaflow-stubs/info_file.pyi +2 -2
  15. metaflow-stubs/metadata_provider/__init__.pyi +2 -2
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +2 -2
  17. metaflow-stubs/metadata_provider/metadata.pyi +2 -2
  18. metaflow-stubs/metadata_provider/util.pyi +2 -2
  19. metaflow-stubs/metaflow_config.pyi +2 -2
  20. metaflow-stubs/metaflow_current.pyi +4 -4
  21. metaflow-stubs/multicore_utils.pyi +2 -2
  22. metaflow-stubs/parameters.pyi +3 -3
  23. metaflow-stubs/plugins/__init__.pyi +13 -13
  24. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  25. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
  26. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  27. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  28. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  29. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
  30. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
  31. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
  32. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  33. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  34. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  35. metaflow-stubs/plugins/argo/argo_workflows.pyi +2 -2
  36. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +3 -3
  37. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +3 -3
  38. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +2 -2
  39. metaflow-stubs/plugins/aws/__init__.pyi +4 -4
  40. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  41. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  42. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  43. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  44. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  45. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
  46. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  47. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +3 -3
  48. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  49. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
  50. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  51. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  52. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  53. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  54. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
  55. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +3 -3
  56. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +3 -3
  57. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  58. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  59. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  60. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +3 -3
  61. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  62. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  63. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  64. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  65. metaflow-stubs/plugins/cards/card_client.pyi +2 -2
  66. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  67. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  68. metaflow-stubs/plugins/cards/card_decorator.pyi +4 -2
  69. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  70. metaflow-stubs/plugins/cards/card_modules/basic.pyi +2 -2
  71. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  72. metaflow-stubs/plugins/cards/card_modules/components.pyi +3 -3
  73. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  74. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  75. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  76. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  77. metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
  78. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  79. metaflow-stubs/plugins/catch_decorator.pyi +2 -2
  80. metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
  81. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  82. metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
  83. metaflow-stubs/plugins/datatools/s3/s3.pyi +4 -4
  84. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  85. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  86. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  87. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  88. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  89. metaflow-stubs/plugins/events_decorator.pyi +2 -2
  90. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  91. metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
  92. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  93. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +3 -3
  94. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  95. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  96. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  97. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  98. metaflow-stubs/plugins/kubernetes/__init__.pyi +3 -2
  99. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +2 -2
  100. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
  101. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  102. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
  103. metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +2 -2
  104. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
  105. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +20 -0
  106. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  107. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  108. metaflow-stubs/plugins/pypi/__init__.pyi +3 -3
  109. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  110. metaflow-stubs/plugins/pypi/conda_environment.pyi +5 -5
  111. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  112. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  113. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  114. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  115. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  116. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  117. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +2 -2
  118. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  119. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  120. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +2 -2
  121. metaflow-stubs/plugins/timeout_decorator.pyi +2 -2
  122. metaflow-stubs/pylint_wrapper.pyi +2 -2
  123. metaflow-stubs/runner/__init__.pyi +2 -2
  124. metaflow-stubs/runner/deployer.pyi +4 -4
  125. metaflow-stubs/runner/deployer_impl.pyi +2 -2
  126. metaflow-stubs/runner/metaflow_runner.pyi +3 -3
  127. metaflow-stubs/runner/nbdeploy.pyi +2 -2
  128. metaflow-stubs/runner/nbrun.pyi +2 -2
  129. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  130. metaflow-stubs/runner/utils.pyi +2 -2
  131. metaflow-stubs/system/__init__.pyi +2 -2
  132. metaflow-stubs/system/system_logger.pyi +2 -2
  133. metaflow-stubs/system/system_monitor.pyi +2 -2
  134. metaflow-stubs/tagging_util.pyi +2 -2
  135. metaflow-stubs/tuple_util.pyi +2 -2
  136. metaflow-stubs/user_configs/__init__.pyi +2 -2
  137. metaflow-stubs/user_configs/config_decorators.pyi +4 -4
  138. metaflow-stubs/user_configs/config_options.pyi +3 -3
  139. metaflow-stubs/user_configs/config_parameters.pyi +6 -6
  140. metaflow-stubs/version.pyi +2 -2
  141. {metaflow_stubs-2.13.3.dist-info → metaflow_stubs-2.13.5.dist-info}/METADATA +2 -2
  142. metaflow_stubs-2.13.5.dist-info/RECORD +145 -0
  143. metaflow_stubs-2.13.3.dist-info/RECORD +0 -144
  144. {metaflow_stubs-2.13.3.dist-info → metaflow_stubs-2.13.5.dist-info}/WHEEL +0 -0
  145. {metaflow_stubs-2.13.3.dist-info → metaflow_stubs-2.13.5.dist-info}/top_level.txt +0 -0
@@ -1,7 +1,7 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.13.3 #
4
- # Generated on 2025-01-10T15:23:16.057741 #
3
+ # MF version: 2.13.5 #
4
+ # Generated on 2025-01-16T23:35:54.592023 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
@@ -143,126 +143,33 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
143
143
  ...
144
144
 
145
145
  @typing.overload
146
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
147
- """
148
- Specifies secrets to be retrieved and injected as environment variables prior to
149
- the execution of a step.
150
- """
151
- ...
152
-
153
- @typing.overload
154
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
155
- ...
156
-
157
- @typing.overload
158
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
159
- ...
160
-
161
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
162
- """
163
- Specifies secrets to be retrieved and injected as environment variables prior to
164
- the execution of a step.
165
- """
166
- ...
167
-
168
- @typing.overload
169
- def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
170
- """
171
- Specifies the resources needed when executing this step.
172
-
173
- Use `@resources` to specify the resource requirements
174
- independently of the specific compute layer (`@batch`, `@kubernetes`).
175
-
176
- You can choose the compute layer on the command line by executing e.g.
177
- ```
178
- python myflow.py run --with batch
179
- ```
180
- or
181
- ```
182
- python myflow.py run --with kubernetes
183
- ```
184
- which executes the flow on the desired system using the
185
- requirements specified in `@resources`.
186
- """
187
- ...
188
-
189
- @typing.overload
190
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
191
- ...
192
-
193
- @typing.overload
194
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
195
- ...
196
-
197
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
198
- """
199
- Specifies the resources needed when executing this step.
200
-
201
- Use `@resources` to specify the resource requirements
202
- independently of the specific compute layer (`@batch`, `@kubernetes`).
203
-
204
- You can choose the compute layer on the command line by executing e.g.
205
- ```
206
- python myflow.py run --with batch
207
- ```
208
- or
209
- ```
210
- python myflow.py run --with kubernetes
211
- ```
212
- which executes the flow on the desired system using the
213
- requirements specified in `@resources`.
214
- """
215
- ...
216
-
217
- @typing.overload
218
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
146
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
219
147
  """
220
- Specifies the PyPI packages for the step.
148
+ Specifies the Conda environment for the step.
221
149
 
222
150
  Information in this decorator will augment any
223
- attributes set in the `@pyi_base` flow-level decorator. Hence,
224
- you can use `@pypi_base` to set packages required by all
225
- steps and use `@pypi` to specify step-specific overrides.
151
+ attributes set in the `@conda_base` flow-level decorator. Hence,
152
+ you can use `@conda_base` to set packages required by all
153
+ steps and use `@conda` to specify step-specific overrides.
226
154
  """
227
155
  ...
228
156
 
229
157
  @typing.overload
230
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
158
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
231
159
  ...
232
160
 
233
161
  @typing.overload
234
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
162
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
235
163
  ...
236
164
 
237
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
165
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
238
166
  """
239
- Specifies the PyPI packages for the step.
167
+ Specifies the Conda environment for the step.
240
168
 
241
169
  Information in this decorator will augment any
242
- attributes set in the `@pyi_base` flow-level decorator. Hence,
243
- you can use `@pypi_base` to set packages required by all
244
- steps and use `@pypi` to specify step-specific overrides.
245
- """
246
- ...
247
-
248
- @typing.overload
249
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
250
- """
251
- Specifies environment variables to be set prior to the execution of a step.
252
- """
253
- ...
254
-
255
- @typing.overload
256
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
257
- ...
258
-
259
- @typing.overload
260
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
261
- ...
262
-
263
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
264
- """
265
- Specifies environment variables to be set prior to the execution of a step.
170
+ attributes set in the `@conda_base` flow-level decorator. Hence,
171
+ you can use `@conda_base` to set packages required by all
172
+ steps and use `@conda` to specify step-specific overrides.
266
173
  """
267
174
  ...
268
175
 
@@ -311,72 +218,44 @@ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
311
218
  ...
312
219
 
313
220
  @typing.overload
314
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
221
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
315
222
  """
316
- Specifies a timeout for your step.
317
-
318
- This decorator is useful if this step may hang indefinitely.
319
-
320
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
321
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
322
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
323
-
324
- Note that all the values specified in parameters are added together so if you specify
325
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
223
+ Specifies environment variables to be set prior to the execution of a step.
326
224
  """
327
225
  ...
328
226
 
329
227
  @typing.overload
330
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
228
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
331
229
  ...
332
230
 
333
231
  @typing.overload
334
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
232
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
335
233
  ...
336
234
 
337
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
235
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
338
236
  """
339
- Specifies a timeout for your step.
340
-
341
- This decorator is useful if this step may hang indefinitely.
342
-
343
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
344
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
345
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
346
-
347
- Note that all the values specified in parameters are added together so if you specify
348
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
237
+ Specifies environment variables to be set prior to the execution of a step.
349
238
  """
350
239
  ...
351
240
 
352
241
  @typing.overload
353
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
242
+ def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
354
243
  """
355
- Specifies the Conda environment for the step.
356
-
357
- Information in this decorator will augment any
358
- attributes set in the `@conda_base` flow-level decorator. Hence,
359
- you can use `@conda_base` to set packages required by all
360
- steps and use `@conda` to specify step-specific overrides.
244
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
361
245
  """
362
246
  ...
363
247
 
364
248
  @typing.overload
365
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
249
+ def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
366
250
  ...
367
251
 
368
252
  @typing.overload
369
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
253
+ def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
370
254
  ...
371
255
 
372
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
256
+ def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
373
257
  """
374
- Specifies the Conda environment for the step.
375
-
376
- Information in this decorator will augment any
377
- attributes set in the `@conda_base` flow-level decorator. Hence,
378
- you can use `@conda_base` to set packages required by all
379
- steps and use `@conda` to specify step-specific overrides.
258
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
380
259
  """
381
260
  ...
382
261
 
@@ -412,41 +291,82 @@ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
412
291
  ...
413
292
 
414
293
  @typing.overload
415
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
294
+ def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
416
295
  """
417
- Specifies the number of times the task corresponding
418
- to a step needs to be retried.
296
+ Specifies the resources needed when executing this step.
419
297
 
420
- This decorator is useful for handling transient errors, such as networking issues.
421
- If your task contains operations that can't be retried safely, e.g. database updates,
422
- it is advisable to annotate it with `@retry(times=0)`.
298
+ Use `@resources` to specify the resource requirements
299
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
423
300
 
424
- This can be used in conjunction with the `@catch` decorator. The `@catch`
425
- decorator will execute a no-op task after all retries have been exhausted,
426
- ensuring that the flow execution can continue.
301
+ You can choose the compute layer on the command line by executing e.g.
302
+ ```
303
+ python myflow.py run --with batch
304
+ ```
305
+ or
306
+ ```
307
+ python myflow.py run --with kubernetes
308
+ ```
309
+ which executes the flow on the desired system using the
310
+ requirements specified in `@resources`.
427
311
  """
428
312
  ...
429
313
 
430
314
  @typing.overload
431
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
315
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
432
316
  ...
433
317
 
434
318
  @typing.overload
435
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
319
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
436
320
  ...
437
321
 
438
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
322
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
439
323
  """
440
- Specifies the number of times the task corresponding
441
- to a step needs to be retried.
324
+ Specifies the resources needed when executing this step.
442
325
 
443
- This decorator is useful for handling transient errors, such as networking issues.
444
- If your task contains operations that can't be retried safely, e.g. database updates,
445
- it is advisable to annotate it with `@retry(times=0)`.
326
+ Use `@resources` to specify the resource requirements
327
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
446
328
 
447
- This can be used in conjunction with the `@catch` decorator. The `@catch`
448
- decorator will execute a no-op task after all retries have been exhausted,
449
- ensuring that the flow execution can continue.
329
+ You can choose the compute layer on the command line by executing e.g.
330
+ ```
331
+ python myflow.py run --with batch
332
+ ```
333
+ or
334
+ ```
335
+ python myflow.py run --with kubernetes
336
+ ```
337
+ which executes the flow on the desired system using the
338
+ requirements specified in `@resources`.
339
+ """
340
+ ...
341
+
342
+ @typing.overload
343
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
344
+ """
345
+ Specifies the PyPI packages for the step.
346
+
347
+ Information in this decorator will augment any
348
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
349
+ you can use `@pypi_base` to set packages required by all
350
+ steps and use `@pypi` to specify step-specific overrides.
351
+ """
352
+ ...
353
+
354
+ @typing.overload
355
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
356
+ ...
357
+
358
+ @typing.overload
359
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
360
+ ...
361
+
362
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
363
+ """
364
+ Specifies the PyPI packages for the step.
365
+
366
+ Information in this decorator will augment any
367
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
368
+ you can use `@pypi_base` to set packages required by all
369
+ steps and use `@pypi` to specify step-specific overrides.
450
370
  """
451
371
  ...
452
372
 
@@ -457,72 +377,103 @@ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: ty
457
377
  ...
458
378
 
459
379
  @typing.overload
460
- def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
380
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
461
381
  """
462
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
382
+ Specifies secrets to be retrieved and injected as environment variables prior to
383
+ the execution of a step.
463
384
  """
464
385
  ...
465
386
 
466
387
  @typing.overload
467
- def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
388
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
468
389
  ...
469
390
 
470
391
  @typing.overload
471
- def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
392
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
472
393
  ...
473
394
 
474
- def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
395
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
475
396
  """
476
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
397
+ Specifies secrets to be retrieved and injected as environment variables prior to
398
+ the execution of a step.
477
399
  """
478
400
  ...
479
401
 
480
402
  @typing.overload
481
- def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
403
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
482
404
  """
483
- Specifies the Conda environment for all steps of the flow.
405
+ Specifies a timeout for your step.
484
406
 
485
- Use `@conda_base` to set common libraries required by all
486
- steps and use `@conda` to specify step-specific additions.
407
+ This decorator is useful if this step may hang indefinitely.
408
+
409
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
410
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
411
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
412
+
413
+ Note that all the values specified in parameters are added together so if you specify
414
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
487
415
  """
488
416
  ...
489
417
 
490
418
  @typing.overload
491
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
419
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
492
420
  ...
493
421
 
494
- def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
422
+ @typing.overload
423
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
424
+ ...
425
+
426
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
495
427
  """
496
- Specifies the Conda environment for all steps of the flow.
428
+ Specifies a timeout for your step.
497
429
 
498
- Use `@conda_base` to set common libraries required by all
499
- steps and use `@conda` to specify step-specific additions.
430
+ This decorator is useful if this step may hang indefinitely.
431
+
432
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
433
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
434
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
435
+
436
+ Note that all the values specified in parameters are added together so if you specify
437
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
500
438
  """
501
439
  ...
502
440
 
503
441
  @typing.overload
504
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
442
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
505
443
  """
506
- Specifies the times when the flow should be run when running on a
507
- production scheduler.
444
+ Specifies the number of times the task corresponding
445
+ to a step needs to be retried.
446
+
447
+ This decorator is useful for handling transient errors, such as networking issues.
448
+ If your task contains operations that can't be retried safely, e.g. database updates,
449
+ it is advisable to annotate it with `@retry(times=0)`.
450
+
451
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
452
+ decorator will execute a no-op task after all retries have been exhausted,
453
+ ensuring that the flow execution can continue.
508
454
  """
509
455
  ...
510
456
 
511
457
  @typing.overload
512
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
458
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
513
459
  ...
514
460
 
515
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
516
- """
517
- Specifies the times when the flow should be run when running on a
518
- production scheduler.
519
- """
461
+ @typing.overload
462
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
520
463
  ...
521
464
 
522
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
465
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
523
466
  """
524
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
525
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
467
+ Specifies the number of times the task corresponding
468
+ to a step needs to be retried.
469
+
470
+ This decorator is useful for handling transient errors, such as networking issues.
471
+ If your task contains operations that can't be retried safely, e.g. database updates,
472
+ it is advisable to annotate it with `@retry(times=0)`.
473
+
474
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
475
+ decorator will execute a no-op task after all retries have been exhausted,
476
+ ensuring that the flow execution can continue.
526
477
  """
527
478
  ...
528
479
 
@@ -607,45 +558,10 @@ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *
607
558
  """
608
559
  ...
609
560
 
610
- @typing.overload
611
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
612
- """
613
- Specifies the PyPI packages for all steps of the flow.
614
-
615
- Use `@pypi_base` to set common packages required by all
616
- steps and use `@pypi` to specify step-specific overrides.
617
- """
618
- ...
619
-
620
- @typing.overload
621
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
622
- ...
623
-
624
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
625
- """
626
- Specifies the PyPI packages for all steps of the flow.
627
-
628
- Use `@pypi_base` to set common packages required by all
629
- steps and use `@pypi` to specify step-specific overrides.
630
- """
631
- ...
632
-
633
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
634
- """
635
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
636
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
637
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
638
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
639
- starts only after all sensors finish.
640
- """
641
- ...
642
-
643
- def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
561
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
644
562
  """
645
- Specifies what flows belong to the same project.
646
-
647
- A project-specific namespace is created for all flows that
648
- use the same `@project(name)`.
563
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
564
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
649
565
  """
650
566
  ...
651
567
 
@@ -722,3 +638,87 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
722
638
  """
723
639
  ...
724
640
 
641
+ @typing.overload
642
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
643
+ """
644
+ Specifies the times when the flow should be run when running on a
645
+ production scheduler.
646
+ """
647
+ ...
648
+
649
+ @typing.overload
650
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
651
+ ...
652
+
653
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
654
+ """
655
+ Specifies the times when the flow should be run when running on a
656
+ production scheduler.
657
+ """
658
+ ...
659
+
660
+ @typing.overload
661
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
662
+ """
663
+ Specifies the PyPI packages for all steps of the flow.
664
+
665
+ Use `@pypi_base` to set common packages required by all
666
+ steps and use `@pypi` to specify step-specific overrides.
667
+ """
668
+ ...
669
+
670
+ @typing.overload
671
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
672
+ ...
673
+
674
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
675
+ """
676
+ Specifies the PyPI packages for all steps of the flow.
677
+
678
+ Use `@pypi_base` to set common packages required by all
679
+ steps and use `@pypi` to specify step-specific overrides.
680
+ """
681
+ ...
682
+
683
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
684
+ """
685
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
686
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
687
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
688
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
689
+ starts only after all sensors finish.
690
+ """
691
+ ...
692
+
693
+ @typing.overload
694
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
695
+ """
696
+ Specifies the Conda environment for all steps of the flow.
697
+
698
+ Use `@conda_base` to set common libraries required by all
699
+ steps and use `@conda` to specify step-specific additions.
700
+ """
701
+ ...
702
+
703
+ @typing.overload
704
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
705
+ ...
706
+
707
+ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
708
+ """
709
+ Specifies the Conda environment for all steps of the flow.
710
+
711
+ Use `@conda_base` to set common libraries required by all
712
+ steps and use `@conda` to specify step-specific additions.
713
+ """
714
+ ...
715
+
716
+ def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
717
+ """
718
+ Specifies what flows belong to the same project.
719
+
720
+ A project-specific namespace is created for all flows that
721
+ use the same `@project(name)`.
722
+ """
723
+ ...
724
+
metaflow-stubs/cards.pyi CHANGED
@@ -1,7 +1,7 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.13.3 #
4
- # Generated on 2025-01-10T15:23:15.978105 #
3
+ # MF version: 2.13.5 #
4
+ # Generated on 2025-01-16T23:35:54.513033 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
metaflow-stubs/cli.pyi CHANGED
@@ -1,7 +1,7 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.13.3 #
4
- # Generated on 2025-01-10T15:23:15.986979 #
3
+ # MF version: 2.13.5 #
4
+ # Generated on 2025-01-16T23:35:54.521819 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations