metaflow-stubs 2.16.1__py2.py3-none-any.whl → 2.16.2__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of metaflow-stubs might be problematic. Click here for more details.

Files changed (166) hide show
  1. metaflow-stubs/__init__.pyi +569 -569
  2. metaflow-stubs/cards.pyi +2 -2
  3. metaflow-stubs/cli.pyi +2 -2
  4. metaflow-stubs/cli_components/__init__.pyi +2 -2
  5. metaflow-stubs/cli_components/utils.pyi +2 -2
  6. metaflow-stubs/client/__init__.pyi +2 -2
  7. metaflow-stubs/client/core.pyi +6 -6
  8. metaflow-stubs/client/filecache.pyi +2 -2
  9. metaflow-stubs/events.pyi +2 -2
  10. metaflow-stubs/exception.pyi +2 -2
  11. metaflow-stubs/flowspec.pyi +3 -3
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +2 -2
  14. metaflow-stubs/meta_files.pyi +2 -2
  15. metaflow-stubs/metadata_provider/__init__.pyi +2 -2
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +2 -2
  17. metaflow-stubs/metadata_provider/metadata.pyi +2 -2
  18. metaflow-stubs/metadata_provider/util.pyi +2 -2
  19. metaflow-stubs/metaflow_config.pyi +2 -2
  20. metaflow-stubs/metaflow_current.pyi +17 -17
  21. metaflow-stubs/metaflow_git.pyi +2 -2
  22. metaflow-stubs/multicore_utils.pyi +2 -2
  23. metaflow-stubs/packaging_sys/__init__.pyi +7 -7
  24. metaflow-stubs/packaging_sys/backend.pyi +4 -4
  25. metaflow-stubs/packaging_sys/distribution_support.pyi +5 -5
  26. metaflow-stubs/packaging_sys/tar_backend.pyi +6 -6
  27. metaflow-stubs/packaging_sys/utils.pyi +2 -2
  28. metaflow-stubs/packaging_sys/v1.pyi +2 -2
  29. metaflow-stubs/parameters.pyi +2 -2
  30. metaflow-stubs/plugins/__init__.pyi +13 -13
  31. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  32. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
  33. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  34. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  35. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  36. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
  37. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
  38. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
  39. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  40. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  41. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  42. metaflow-stubs/plugins/argo/argo_workflows.pyi +4 -4
  43. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +2 -2
  44. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +2 -2
  45. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +3 -3
  46. metaflow-stubs/plugins/argo/exit_hooks.pyi +2 -2
  47. metaflow-stubs/plugins/aws/__init__.pyi +3 -3
  48. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  49. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  50. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  51. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  52. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  53. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
  54. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  55. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +2 -2
  56. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  57. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
  58. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  59. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  60. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  61. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  62. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
  63. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +2 -2
  64. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +3 -3
  65. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  66. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  67. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  68. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +2 -2
  69. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  70. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  71. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  72. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  73. metaflow-stubs/plugins/cards/card_client.pyi +3 -3
  74. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  75. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  76. metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
  77. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  78. metaflow-stubs/plugins/cards/card_modules/basic.pyi +2 -2
  79. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  80. metaflow-stubs/plugins/cards/card_modules/components.pyi +3 -3
  81. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  82. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  83. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  84. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  85. metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
  86. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  87. metaflow-stubs/plugins/catch_decorator.pyi +3 -3
  88. metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
  89. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  90. metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
  91. metaflow-stubs/plugins/datatools/s3/s3.pyi +4 -4
  92. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  93. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  94. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  95. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  96. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  97. metaflow-stubs/plugins/events_decorator.pyi +2 -2
  98. metaflow-stubs/plugins/exit_hook/__init__.pyi +2 -2
  99. metaflow-stubs/plugins/exit_hook/exit_hook_decorator.pyi +2 -2
  100. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  101. metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
  102. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  103. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +2 -2
  104. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  105. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  106. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  107. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  108. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  109. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +2 -2
  110. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
  111. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  112. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
  113. metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +2 -2
  114. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
  115. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +2 -2
  116. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  117. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  118. metaflow-stubs/plugins/pypi/__init__.pyi +3 -3
  119. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  120. metaflow-stubs/plugins/pypi/conda_environment.pyi +6 -6
  121. metaflow-stubs/plugins/pypi/parsers.pyi +2 -2
  122. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  123. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  124. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  125. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  126. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  127. metaflow-stubs/plugins/secrets/__init__.pyi +3 -3
  128. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +2 -2
  129. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  130. metaflow-stubs/plugins/secrets/secrets_func.pyi +2 -2
  131. metaflow-stubs/plugins/secrets/secrets_spec.pyi +2 -2
  132. metaflow-stubs/plugins/secrets/utils.pyi +2 -2
  133. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  134. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +3 -3
  135. metaflow-stubs/plugins/timeout_decorator.pyi +3 -3
  136. metaflow-stubs/plugins/uv/__init__.pyi +2 -2
  137. metaflow-stubs/plugins/uv/uv_environment.pyi +3 -3
  138. metaflow-stubs/pylint_wrapper.pyi +2 -2
  139. metaflow-stubs/runner/__init__.pyi +2 -2
  140. metaflow-stubs/runner/deployer.pyi +5 -5
  141. metaflow-stubs/runner/deployer_impl.pyi +3 -3
  142. metaflow-stubs/runner/metaflow_runner.pyi +2 -2
  143. metaflow-stubs/runner/nbdeploy.pyi +2 -2
  144. metaflow-stubs/runner/nbrun.pyi +2 -2
  145. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  146. metaflow-stubs/runner/utils.pyi +3 -3
  147. metaflow-stubs/system/__init__.pyi +2 -2
  148. metaflow-stubs/system/system_logger.pyi +2 -2
  149. metaflow-stubs/system/system_monitor.pyi +2 -2
  150. metaflow-stubs/tagging_util.pyi +2 -2
  151. metaflow-stubs/tuple_util.pyi +2 -2
  152. metaflow-stubs/user_configs/__init__.pyi +2 -2
  153. metaflow-stubs/user_configs/config_options.pyi +2 -2
  154. metaflow-stubs/user_configs/config_parameters.pyi +6 -6
  155. metaflow-stubs/user_decorators/__init__.pyi +2 -2
  156. metaflow-stubs/user_decorators/common.pyi +2 -2
  157. metaflow-stubs/user_decorators/mutable_flow.pyi +5 -5
  158. metaflow-stubs/user_decorators/mutable_step.pyi +5 -5
  159. metaflow-stubs/user_decorators/user_flow_decorator.pyi +3 -3
  160. metaflow-stubs/user_decorators/user_step_decorator.pyi +5 -5
  161. metaflow-stubs/version.pyi +2 -2
  162. {metaflow_stubs-2.16.1.dist-info → metaflow_stubs-2.16.2.dist-info}/METADATA +2 -2
  163. metaflow_stubs-2.16.2.dist-info/RECORD +166 -0
  164. metaflow_stubs-2.16.1.dist-info/RECORD +0 -166
  165. {metaflow_stubs-2.16.1.dist-info → metaflow_stubs-2.16.2.dist-info}/WHEEL +0 -0
  166. {metaflow_stubs-2.16.1.dist-info → metaflow_stubs-2.16.2.dist-info}/top_level.txt +0 -0
@@ -1,7 +1,7 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.16.1 #
4
- # Generated on 2025-07-15T19:29:43.053659 #
3
+ # MF version: 2.16.2 #
4
+ # Generated on 2025-07-15T23:31:58.903931 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
@@ -40,16 +40,16 @@ from .user_decorators.user_step_decorator import StepMutator as StepMutator
40
40
  from .user_decorators.user_step_decorator import user_step_decorator as user_step_decorator
41
41
  from .user_decorators.user_flow_decorator import FlowMutator as FlowMutator
42
42
  from . import metaflow_git as metaflow_git
43
- from . import events as events
44
43
  from . import tuple_util as tuple_util
44
+ from . import events as events
45
45
  from . import runner as runner
46
46
  from . import plugins as plugins
47
47
  from .plugins.datatools.s3.s3 import S3 as S3
48
48
  from . import includefile as includefile
49
49
  from .includefile import IncludeFile as IncludeFile
50
- from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
51
- from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
52
50
  from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
51
+ from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
52
+ from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
53
53
  from . import cards as cards
54
54
  from . import client as client
55
55
  from .client.core import namespace as namespace
@@ -152,124 +152,6 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
152
152
  """
153
153
  ...
154
154
 
155
- @typing.overload
156
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
157
- """
158
- Specifies the Conda environment for the step.
159
-
160
- Information in this decorator will augment any
161
- attributes set in the `@conda_base` flow-level decorator. Hence,
162
- you can use `@conda_base` to set packages required by all
163
- steps and use `@conda` to specify step-specific overrides.
164
-
165
-
166
- Parameters
167
- ----------
168
- packages : Dict[str, str], default {}
169
- Packages to use for this step. The key is the name of the package
170
- and the value is the version to use.
171
- libraries : Dict[str, str], default {}
172
- Supported for backward compatibility. When used with packages, packages will take precedence.
173
- python : str, optional, default None
174
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
175
- that the version used will correspond to the version of the Python interpreter used to start the run.
176
- disabled : bool, default False
177
- If set to True, disables @conda.
178
- """
179
- ...
180
-
181
- @typing.overload
182
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
183
- ...
184
-
185
- @typing.overload
186
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
187
- ...
188
-
189
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
190
- """
191
- Specifies the Conda environment for the step.
192
-
193
- Information in this decorator will augment any
194
- attributes set in the `@conda_base` flow-level decorator. Hence,
195
- you can use `@conda_base` to set packages required by all
196
- steps and use `@conda` to specify step-specific overrides.
197
-
198
-
199
- Parameters
200
- ----------
201
- packages : Dict[str, str], default {}
202
- Packages to use for this step. The key is the name of the package
203
- and the value is the version to use.
204
- libraries : Dict[str, str], default {}
205
- Supported for backward compatibility. When used with packages, packages will take precedence.
206
- python : str, optional, default None
207
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
208
- that the version used will correspond to the version of the Python interpreter used to start the run.
209
- disabled : bool, default False
210
- If set to True, disables @conda.
211
- """
212
- ...
213
-
214
- @typing.overload
215
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
216
- """
217
- Specifies a timeout for your step.
218
-
219
- This decorator is useful if this step may hang indefinitely.
220
-
221
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
222
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
223
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
224
-
225
- Note that all the values specified in parameters are added together so if you specify
226
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
227
-
228
-
229
- Parameters
230
- ----------
231
- seconds : int, default 0
232
- Number of seconds to wait prior to timing out.
233
- minutes : int, default 0
234
- Number of minutes to wait prior to timing out.
235
- hours : int, default 0
236
- Number of hours to wait prior to timing out.
237
- """
238
- ...
239
-
240
- @typing.overload
241
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
242
- ...
243
-
244
- @typing.overload
245
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
246
- ...
247
-
248
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
249
- """
250
- Specifies a timeout for your step.
251
-
252
- This decorator is useful if this step may hang indefinitely.
253
-
254
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
255
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
256
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
257
-
258
- Note that all the values specified in parameters are added together so if you specify
259
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
260
-
261
-
262
- Parameters
263
- ----------
264
- seconds : int, default 0
265
- Number of seconds to wait prior to timing out.
266
- minutes : int, default 0
267
- Number of minutes to wait prior to timing out.
268
- hours : int, default 0
269
- Number of hours to wait prior to timing out.
270
- """
271
- ...
272
-
273
155
  @typing.overload
274
156
  def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
275
157
  """
@@ -350,54 +232,70 @@ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None]
350
232
  ...
351
233
 
352
234
  @typing.overload
353
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
235
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
354
236
  """
355
- Specifies environment variables to be set prior to the execution of a step.
356
-
357
-
358
- Parameters
359
- ----------
360
- vars : Dict[str, str], default {}
361
- Dictionary of environment variables to set.
237
+ Decorator prototype for all step decorators. This function gets specialized
238
+ and imported for all decorators types by _import_plugin_decorators().
362
239
  """
363
240
  ...
364
241
 
365
242
  @typing.overload
366
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
243
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
367
244
  ...
368
245
 
369
- @typing.overload
370
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
246
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
247
+ """
248
+ Decorator prototype for all step decorators. This function gets specialized
249
+ and imported for all decorators types by _import_plugin_decorators().
250
+ """
371
251
  ...
372
252
 
373
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
253
+ @typing.overload
254
+ def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
374
255
  """
375
- Specifies environment variables to be set prior to the execution of a step.
256
+ Creates a human-readable report, a Metaflow Card, after this step completes.
257
+
258
+ Note that you may add multiple `@card` decorators in a step with different parameters.
376
259
 
377
260
 
378
261
  Parameters
379
262
  ----------
380
- vars : Dict[str, str], default {}
381
- Dictionary of environment variables to set.
263
+ type : str, default 'default'
264
+ Card type.
265
+ id : str, optional, default None
266
+ If multiple cards are present, use this id to identify this card.
267
+ options : Dict[str, Any], default {}
268
+ Options passed to the card. The contents depend on the card type.
269
+ timeout : int, default 45
270
+ Interrupt reporting if it takes more than this many seconds.
382
271
  """
383
272
  ...
384
273
 
385
274
  @typing.overload
386
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
387
- """
388
- Decorator prototype for all step decorators. This function gets specialized
389
- and imported for all decorators types by _import_plugin_decorators().
390
- """
275
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
391
276
  ...
392
277
 
393
278
  @typing.overload
394
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
279
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
395
280
  ...
396
281
 
397
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
282
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
398
283
  """
399
- Decorator prototype for all step decorators. This function gets specialized
400
- and imported for all decorators types by _import_plugin_decorators().
284
+ Creates a human-readable report, a Metaflow Card, after this step completes.
285
+
286
+ Note that you may add multiple `@card` decorators in a step with different parameters.
287
+
288
+
289
+ Parameters
290
+ ----------
291
+ type : str, default 'default'
292
+ Card type.
293
+ id : str, optional, default None
294
+ If multiple cards are present, use this id to identify this card.
295
+ options : Dict[str, Any], default {}
296
+ Options passed to the card. The contents depend on the card type.
297
+ timeout : int, default 45
298
+ Interrupt reporting if it takes more than this many seconds.
401
299
  """
402
300
  ...
403
301
 
@@ -453,253 +351,9 @@ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
453
351
  ...
454
352
 
455
353
  @typing.overload
456
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
457
- """
458
- Specifies that the step will success under all circumstances.
459
-
460
- The decorator will create an optional artifact, specified by `var`, which
461
- contains the exception raised. You can use it to detect the presence
462
- of errors, indicating that all happy-path artifacts produced by the step
463
- are missing.
464
-
465
-
466
- Parameters
467
- ----------
468
- var : str, optional, default None
469
- Name of the artifact in which to store the caught exception.
470
- If not specified, the exception is not stored.
471
- print_exception : bool, default True
472
- Determines whether or not the exception is printed to
473
- stdout when caught.
474
- """
475
- ...
476
-
477
- @typing.overload
478
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
479
- ...
480
-
481
- @typing.overload
482
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
483
- ...
484
-
485
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
486
- """
487
- Specifies that the step will success under all circumstances.
488
-
489
- The decorator will create an optional artifact, specified by `var`, which
490
- contains the exception raised. You can use it to detect the presence
491
- of errors, indicating that all happy-path artifacts produced by the step
492
- are missing.
493
-
494
-
495
- Parameters
496
- ----------
497
- var : str, optional, default None
498
- Name of the artifact in which to store the caught exception.
499
- If not specified, the exception is not stored.
500
- print_exception : bool, default True
501
- Determines whether or not the exception is printed to
502
- stdout when caught.
503
- """
504
- ...
505
-
506
- @typing.overload
507
- def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
354
+ def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
508
355
  """
509
- Creates a human-readable report, a Metaflow Card, after this step completes.
510
-
511
- Note that you may add multiple `@card` decorators in a step with different parameters.
512
-
513
-
514
- Parameters
515
- ----------
516
- type : str, default 'default'
517
- Card type.
518
- id : str, optional, default None
519
- If multiple cards are present, use this id to identify this card.
520
- options : Dict[str, Any], default {}
521
- Options passed to the card. The contents depend on the card type.
522
- timeout : int, default 45
523
- Interrupt reporting if it takes more than this many seconds.
524
- """
525
- ...
526
-
527
- @typing.overload
528
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
529
- ...
530
-
531
- @typing.overload
532
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
533
- ...
534
-
535
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
536
- """
537
- Creates a human-readable report, a Metaflow Card, after this step completes.
538
-
539
- Note that you may add multiple `@card` decorators in a step with different parameters.
540
-
541
-
542
- Parameters
543
- ----------
544
- type : str, default 'default'
545
- Card type.
546
- id : str, optional, default None
547
- If multiple cards are present, use this id to identify this card.
548
- options : Dict[str, Any], default {}
549
- Options passed to the card. The contents depend on the card type.
550
- timeout : int, default 45
551
- Interrupt reporting if it takes more than this many seconds.
552
- """
553
- ...
554
-
555
- @typing.overload
556
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
557
- """
558
- Specifies the number of times the task corresponding
559
- to a step needs to be retried.
560
-
561
- This decorator is useful for handling transient errors, such as networking issues.
562
- If your task contains operations that can't be retried safely, e.g. database updates,
563
- it is advisable to annotate it with `@retry(times=0)`.
564
-
565
- This can be used in conjunction with the `@catch` decorator. The `@catch`
566
- decorator will execute a no-op task after all retries have been exhausted,
567
- ensuring that the flow execution can continue.
568
-
569
-
570
- Parameters
571
- ----------
572
- times : int, default 3
573
- Number of times to retry this task.
574
- minutes_between_retries : int, default 2
575
- Number of minutes between retries.
576
- """
577
- ...
578
-
579
- @typing.overload
580
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
581
- ...
582
-
583
- @typing.overload
584
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
585
- ...
586
-
587
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
588
- """
589
- Specifies the number of times the task corresponding
590
- to a step needs to be retried.
591
-
592
- This decorator is useful for handling transient errors, such as networking issues.
593
- If your task contains operations that can't be retried safely, e.g. database updates,
594
- it is advisable to annotate it with `@retry(times=0)`.
595
-
596
- This can be used in conjunction with the `@catch` decorator. The `@catch`
597
- decorator will execute a no-op task after all retries have been exhausted,
598
- ensuring that the flow execution can continue.
599
-
600
-
601
- Parameters
602
- ----------
603
- times : int, default 3
604
- Number of times to retry this task.
605
- minutes_between_retries : int, default 2
606
- Number of minutes between retries.
607
- """
608
- ...
609
-
610
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
611
- """
612
- Specifies that this step should execute on Kubernetes.
613
-
614
-
615
- Parameters
616
- ----------
617
- cpu : int, default 1
618
- Number of CPUs required for this step. If `@resources` is
619
- also present, the maximum value from all decorators is used.
620
- memory : int, default 4096
621
- Memory size (in MB) required for this step. If
622
- `@resources` is also present, the maximum value from all decorators is
623
- used.
624
- disk : int, default 10240
625
- Disk size (in MB) required for this step. If
626
- `@resources` is also present, the maximum value from all decorators is
627
- used.
628
- image : str, optional, default None
629
- Docker image to use when launching on Kubernetes. If not specified, and
630
- METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
631
- not, a default Docker image mapping to the current version of Python is used.
632
- image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
633
- If given, the imagePullPolicy to be applied to the Docker image of the step.
634
- image_pull_secrets: List[str], default []
635
- The default is extracted from METAFLOW_KUBERNETES_IMAGE_PULL_SECRETS.
636
- Kubernetes image pull secrets to use when pulling container images
637
- in Kubernetes.
638
- service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
639
- Kubernetes service account to use when launching pod in Kubernetes.
640
- secrets : List[str], optional, default None
641
- Kubernetes secrets to use when launching pod in Kubernetes. These
642
- secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
643
- in Metaflow configuration.
644
- node_selector: Union[Dict[str,str], str], optional, default None
645
- Kubernetes node selector(s) to apply to the pod running the task.
646
- Can be passed in as a comma separated string of values e.g.
647
- 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
648
- {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
649
- namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
650
- Kubernetes namespace to use when launching pod in Kubernetes.
651
- gpu : int, optional, default None
652
- Number of GPUs required for this step. A value of zero implies that
653
- the scheduled node should not have GPUs.
654
- gpu_vendor : str, default KUBERNETES_GPU_VENDOR
655
- The vendor of the GPUs to be used for this step.
656
- tolerations : List[str], default []
657
- The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
658
- Kubernetes tolerations to use when launching pod in Kubernetes.
659
- labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
660
- Kubernetes labels to use when launching pod in Kubernetes.
661
- annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
662
- Kubernetes annotations to use when launching pod in Kubernetes.
663
- use_tmpfs : bool, default False
664
- This enables an explicit tmpfs mount for this step.
665
- tmpfs_tempdir : bool, default True
666
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
667
- tmpfs_size : int, optional, default: None
668
- The value for the size (in MiB) of the tmpfs mount for this step.
669
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
670
- memory allocated for this step.
671
- tmpfs_path : str, optional, default /metaflow_temp
672
- Path to tmpfs mount for this step.
673
- persistent_volume_claims : Dict[str, str], optional, default None
674
- A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
675
- volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
676
- shared_memory: int, optional
677
- Shared memory size (in MiB) required for this step
678
- port: int, optional
679
- Port number to specify in the Kubernetes job object
680
- compute_pool : str, optional, default None
681
- Compute pool to be used for for this step.
682
- If not specified, any accessible compute pool within the perimeter is used.
683
- hostname_resolution_timeout: int, default 10 * 60
684
- Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
685
- Only applicable when @parallel is used.
686
- qos: str, default: Burstable
687
- Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
688
-
689
- security_context: Dict[str, Any], optional, default None
690
- Container security context. Applies to the task container. Allows the following keys:
691
- - privileged: bool, optional, default None
692
- - allow_privilege_escalation: bool, optional, default None
693
- - run_as_user: int, optional, default None
694
- - run_as_group: int, optional, default None
695
- - run_as_non_root: bool, optional, default None
696
- """
697
- ...
698
-
699
- @typing.overload
700
- def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
701
- """
702
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
356
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
703
357
 
704
358
 
705
359
  Parameters
@@ -846,82 +500,387 @@ def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
846
500
  ...
847
501
 
848
502
  @typing.overload
849
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
503
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
850
504
  """
851
- Specifies secrets to be retrieved and injected as environment variables prior to
852
- the execution of a step.
505
+ Specifies the number of times the task corresponding
506
+ to a step needs to be retried.
507
+
508
+ This decorator is useful for handling transient errors, such as networking issues.
509
+ If your task contains operations that can't be retried safely, e.g. database updates,
510
+ it is advisable to annotate it with `@retry(times=0)`.
511
+
512
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
513
+ decorator will execute a no-op task after all retries have been exhausted,
514
+ ensuring that the flow execution can continue.
853
515
 
854
516
 
855
517
  Parameters
856
518
  ----------
857
- sources : List[Union[str, Dict[str, Any]]], default: []
858
- List of secret specs, defining how the secrets are to be retrieved
859
- role : str, optional, default: None
860
- Role to use for fetching secrets
519
+ times : int, default 3
520
+ Number of times to retry this task.
521
+ minutes_between_retries : int, default 2
522
+ Number of minutes between retries.
861
523
  """
862
524
  ...
863
525
 
864
526
  @typing.overload
865
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
527
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
866
528
  ...
867
529
 
868
530
  @typing.overload
869
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
531
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
870
532
  ...
871
533
 
872
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
534
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
873
535
  """
874
- Specifies secrets to be retrieved and injected as environment variables prior to
875
- the execution of a step.
536
+ Specifies the number of times the task corresponding
537
+ to a step needs to be retried.
538
+
539
+ This decorator is useful for handling transient errors, such as networking issues.
540
+ If your task contains operations that can't be retried safely, e.g. database updates,
541
+ it is advisable to annotate it with `@retry(times=0)`.
542
+
543
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
544
+ decorator will execute a no-op task after all retries have been exhausted,
545
+ ensuring that the flow execution can continue.
876
546
 
877
547
 
878
548
  Parameters
879
549
  ----------
880
- sources : List[Union[str, Dict[str, Any]]], default: []
881
- List of secret specs, defining how the secrets are to be retrieved
882
- role : str, optional, default: None
883
- Role to use for fetching secrets
550
+ times : int, default 3
551
+ Number of times to retry this task.
552
+ minutes_between_retries : int, default 2
553
+ Number of minutes between retries.
884
554
  """
885
555
  ...
886
556
 
887
557
  @typing.overload
888
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
558
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
889
559
  """
890
- Specifies the PyPI packages for all steps of the flow.
560
+ Specifies that the step will success under all circumstances.
561
+
562
+ The decorator will create an optional artifact, specified by `var`, which
563
+ contains the exception raised. You can use it to detect the presence
564
+ of errors, indicating that all happy-path artifacts produced by the step
565
+ are missing.
891
566
 
892
- Use `@pypi_base` to set common packages required by all
893
- steps and use `@pypi` to specify step-specific overrides.
894
567
 
895
568
  Parameters
896
569
  ----------
897
- packages : Dict[str, str], default: {}
898
- Packages to use for this flow. The key is the name of the package
899
- and the value is the version to use.
900
- python : str, optional, default: None
901
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
902
- that the version used will correspond to the version of the Python interpreter used to start the run.
570
+ var : str, optional, default None
571
+ Name of the artifact in which to store the caught exception.
572
+ If not specified, the exception is not stored.
573
+ print_exception : bool, default True
574
+ Determines whether or not the exception is printed to
575
+ stdout when caught.
903
576
  """
904
577
  ...
905
578
 
906
579
  @typing.overload
907
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
580
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
908
581
  ...
909
582
 
910
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
911
- """
912
- Specifies the PyPI packages for all steps of the flow.
583
+ @typing.overload
584
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
585
+ ...
586
+
587
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
588
+ """
589
+ Specifies that the step will success under all circumstances.
590
+
591
+ The decorator will create an optional artifact, specified by `var`, which
592
+ contains the exception raised. You can use it to detect the presence
593
+ of errors, indicating that all happy-path artifacts produced by the step
594
+ are missing.
913
595
 
914
- Use `@pypi_base` to set common packages required by all
915
- steps and use `@pypi` to specify step-specific overrides.
916
596
 
917
597
  Parameters
918
598
  ----------
919
- packages : Dict[str, str], default: {}
920
- Packages to use for this flow. The key is the name of the package
599
+ var : str, optional, default None
600
+ Name of the artifact in which to store the caught exception.
601
+ If not specified, the exception is not stored.
602
+ print_exception : bool, default True
603
+ Determines whether or not the exception is printed to
604
+ stdout when caught.
605
+ """
606
+ ...
607
+
608
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
609
+ """
610
+ Specifies that this step should execute on Kubernetes.
611
+
612
+
613
+ Parameters
614
+ ----------
615
+ cpu : int, default 1
616
+ Number of CPUs required for this step. If `@resources` is
617
+ also present, the maximum value from all decorators is used.
618
+ memory : int, default 4096
619
+ Memory size (in MB) required for this step. If
620
+ `@resources` is also present, the maximum value from all decorators is
621
+ used.
622
+ disk : int, default 10240
623
+ Disk size (in MB) required for this step. If
624
+ `@resources` is also present, the maximum value from all decorators is
625
+ used.
626
+ image : str, optional, default None
627
+ Docker image to use when launching on Kubernetes. If not specified, and
628
+ METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
629
+ not, a default Docker image mapping to the current version of Python is used.
630
+ image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
631
+ If given, the imagePullPolicy to be applied to the Docker image of the step.
632
+ image_pull_secrets: List[str], default []
633
+ The default is extracted from METAFLOW_KUBERNETES_IMAGE_PULL_SECRETS.
634
+ Kubernetes image pull secrets to use when pulling container images
635
+ in Kubernetes.
636
+ service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
637
+ Kubernetes service account to use when launching pod in Kubernetes.
638
+ secrets : List[str], optional, default None
639
+ Kubernetes secrets to use when launching pod in Kubernetes. These
640
+ secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
641
+ in Metaflow configuration.
642
+ node_selector: Union[Dict[str,str], str], optional, default None
643
+ Kubernetes node selector(s) to apply to the pod running the task.
644
+ Can be passed in as a comma separated string of values e.g.
645
+ 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
646
+ {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
647
+ namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
648
+ Kubernetes namespace to use when launching pod in Kubernetes.
649
+ gpu : int, optional, default None
650
+ Number of GPUs required for this step. A value of zero implies that
651
+ the scheduled node should not have GPUs.
652
+ gpu_vendor : str, default KUBERNETES_GPU_VENDOR
653
+ The vendor of the GPUs to be used for this step.
654
+ tolerations : List[str], default []
655
+ The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
656
+ Kubernetes tolerations to use when launching pod in Kubernetes.
657
+ labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
658
+ Kubernetes labels to use when launching pod in Kubernetes.
659
+ annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
660
+ Kubernetes annotations to use when launching pod in Kubernetes.
661
+ use_tmpfs : bool, default False
662
+ This enables an explicit tmpfs mount for this step.
663
+ tmpfs_tempdir : bool, default True
664
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
665
+ tmpfs_size : int, optional, default: None
666
+ The value for the size (in MiB) of the tmpfs mount for this step.
667
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
668
+ memory allocated for this step.
669
+ tmpfs_path : str, optional, default /metaflow_temp
670
+ Path to tmpfs mount for this step.
671
+ persistent_volume_claims : Dict[str, str], optional, default None
672
+ A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
673
+ volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
674
+ shared_memory: int, optional
675
+ Shared memory size (in MiB) required for this step
676
+ port: int, optional
677
+ Port number to specify in the Kubernetes job object
678
+ compute_pool : str, optional, default None
679
+ Compute pool to be used for for this step.
680
+ If not specified, any accessible compute pool within the perimeter is used.
681
+ hostname_resolution_timeout: int, default 10 * 60
682
+ Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
683
+ Only applicable when @parallel is used.
684
+ qos: str, default: Burstable
685
+ Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
686
+
687
+ security_context: Dict[str, Any], optional, default None
688
+ Container security context. Applies to the task container. Allows the following keys:
689
+ - privileged: bool, optional, default None
690
+ - allow_privilege_escalation: bool, optional, default None
691
+ - run_as_user: int, optional, default None
692
+ - run_as_group: int, optional, default None
693
+ - run_as_non_root: bool, optional, default None
694
+ """
695
+ ...
696
+
697
+ @typing.overload
698
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
699
+ """
700
+ Specifies secrets to be retrieved and injected as environment variables prior to
701
+ the execution of a step.
702
+
703
+
704
+ Parameters
705
+ ----------
706
+ sources : List[Union[str, Dict[str, Any]]], default: []
707
+ List of secret specs, defining how the secrets are to be retrieved
708
+ role : str, optional, default: None
709
+ Role to use for fetching secrets
710
+ """
711
+ ...
712
+
713
+ @typing.overload
714
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
715
+ ...
716
+
717
+ @typing.overload
718
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
719
+ ...
720
+
721
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
722
+ """
723
+ Specifies secrets to be retrieved and injected as environment variables prior to
724
+ the execution of a step.
725
+
726
+
727
+ Parameters
728
+ ----------
729
+ sources : List[Union[str, Dict[str, Any]]], default: []
730
+ List of secret specs, defining how the secrets are to be retrieved
731
+ role : str, optional, default: None
732
+ Role to use for fetching secrets
733
+ """
734
+ ...
735
+
736
+ @typing.overload
737
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
738
+ """
739
+ Specifies the Conda environment for the step.
740
+
741
+ Information in this decorator will augment any
742
+ attributes set in the `@conda_base` flow-level decorator. Hence,
743
+ you can use `@conda_base` to set packages required by all
744
+ steps and use `@conda` to specify step-specific overrides.
745
+
746
+
747
+ Parameters
748
+ ----------
749
+ packages : Dict[str, str], default {}
750
+ Packages to use for this step. The key is the name of the package
921
751
  and the value is the version to use.
922
- python : str, optional, default: None
752
+ libraries : Dict[str, str], default {}
753
+ Supported for backward compatibility. When used with packages, packages will take precedence.
754
+ python : str, optional, default None
755
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
756
+ that the version used will correspond to the version of the Python interpreter used to start the run.
757
+ disabled : bool, default False
758
+ If set to True, disables @conda.
759
+ """
760
+ ...
761
+
762
+ @typing.overload
763
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
764
+ ...
765
+
766
+ @typing.overload
767
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
768
+ ...
769
+
770
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
771
+ """
772
+ Specifies the Conda environment for the step.
773
+
774
+ Information in this decorator will augment any
775
+ attributes set in the `@conda_base` flow-level decorator. Hence,
776
+ you can use `@conda_base` to set packages required by all
777
+ steps and use `@conda` to specify step-specific overrides.
778
+
779
+
780
+ Parameters
781
+ ----------
782
+ packages : Dict[str, str], default {}
783
+ Packages to use for this step. The key is the name of the package
784
+ and the value is the version to use.
785
+ libraries : Dict[str, str], default {}
786
+ Supported for backward compatibility. When used with packages, packages will take precedence.
787
+ python : str, optional, default None
923
788
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
924
789
  that the version used will correspond to the version of the Python interpreter used to start the run.
790
+ disabled : bool, default False
791
+ If set to True, disables @conda.
792
+ """
793
+ ...
794
+
795
+ @typing.overload
796
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
797
+ """
798
+ Specifies a timeout for your step.
799
+
800
+ This decorator is useful if this step may hang indefinitely.
801
+
802
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
803
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
804
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
805
+
806
+ Note that all the values specified in parameters are added together so if you specify
807
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
808
+
809
+
810
+ Parameters
811
+ ----------
812
+ seconds : int, default 0
813
+ Number of seconds to wait prior to timing out.
814
+ minutes : int, default 0
815
+ Number of minutes to wait prior to timing out.
816
+ hours : int, default 0
817
+ Number of hours to wait prior to timing out.
818
+ """
819
+ ...
820
+
821
+ @typing.overload
822
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
823
+ ...
824
+
825
+ @typing.overload
826
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
827
+ ...
828
+
829
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
830
+ """
831
+ Specifies a timeout for your step.
832
+
833
+ This decorator is useful if this step may hang indefinitely.
834
+
835
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
836
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
837
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
838
+
839
+ Note that all the values specified in parameters are added together so if you specify
840
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
841
+
842
+
843
+ Parameters
844
+ ----------
845
+ seconds : int, default 0
846
+ Number of seconds to wait prior to timing out.
847
+ minutes : int, default 0
848
+ Number of minutes to wait prior to timing out.
849
+ hours : int, default 0
850
+ Number of hours to wait prior to timing out.
851
+ """
852
+ ...
853
+
854
+ @typing.overload
855
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
856
+ """
857
+ Specifies environment variables to be set prior to the execution of a step.
858
+
859
+
860
+ Parameters
861
+ ----------
862
+ vars : Dict[str, str], default {}
863
+ Dictionary of environment variables to set.
864
+ """
865
+ ...
866
+
867
+ @typing.overload
868
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
869
+ ...
870
+
871
+ @typing.overload
872
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
873
+ ...
874
+
875
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
876
+ """
877
+ Specifies environment variables to be set prior to the execution of a step.
878
+
879
+
880
+ Parameters
881
+ ----------
882
+ vars : Dict[str, str], default {}
883
+ Dictionary of environment variables to set.
925
884
  """
926
885
  ...
927
886
 
@@ -968,38 +927,104 @@ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str,
968
927
  """
969
928
  ...
970
929
 
971
- def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
930
+ @typing.overload
931
+ def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
972
932
  """
973
- Specifies what flows belong to the same project.
933
+ Specifies the flow(s) that this flow depends on.
934
+
935
+ ```
936
+ @trigger_on_finish(flow='FooFlow')
937
+ ```
938
+ or
939
+ ```
940
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
941
+ ```
942
+ This decorator respects the @project decorator and triggers the flow
943
+ when upstream runs within the same namespace complete successfully
944
+
945
+ Additionally, you can specify project aware upstream flow dependencies
946
+ by specifying the fully qualified project_flow_name.
947
+ ```
948
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
949
+ ```
950
+ or
951
+ ```
952
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
953
+ ```
954
+
955
+ You can also specify just the project or project branch (other values will be
956
+ inferred from the current project or project branch):
957
+ ```
958
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
959
+ ```
960
+
961
+ Note that `branch` is typically one of:
962
+ - `prod`
963
+ - `user.bob`
964
+ - `test.my_experiment`
965
+ - `prod.staging`
966
+
967
+
968
+ Parameters
969
+ ----------
970
+ flow : Union[str, Dict[str, str]], optional, default None
971
+ Upstream flow dependency for this flow.
972
+ flows : List[Union[str, Dict[str, str]]], default []
973
+ Upstream flow dependencies for this flow.
974
+ options : Dict[str, Any], default {}
975
+ Backend-specific configuration for tuning eventing behavior.
976
+ """
977
+ ...
978
+
979
+ @typing.overload
980
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
981
+ ...
982
+
983
+ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
984
+ """
985
+ Specifies the flow(s) that this flow depends on.
986
+
987
+ ```
988
+ @trigger_on_finish(flow='FooFlow')
989
+ ```
990
+ or
991
+ ```
992
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
993
+ ```
994
+ This decorator respects the @project decorator and triggers the flow
995
+ when upstream runs within the same namespace complete successfully
974
996
 
975
- A project-specific namespace is created for all flows that
976
- use the same `@project(name)`.
997
+ Additionally, you can specify project aware upstream flow dependencies
998
+ by specifying the fully qualified project_flow_name.
999
+ ```
1000
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1001
+ ```
1002
+ or
1003
+ ```
1004
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1005
+ ```
977
1006
 
1007
+ You can also specify just the project or project branch (other values will be
1008
+ inferred from the current project or project branch):
1009
+ ```
1010
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1011
+ ```
978
1012
 
979
- Parameters
980
- ----------
981
- name : str
982
- Project name. Make sure that the name is unique amongst all
983
- projects that use the same production scheduler. The name may
984
- contain only lowercase alphanumeric characters and underscores.
1013
+ Note that `branch` is typically one of:
1014
+ - `prod`
1015
+ - `user.bob`
1016
+ - `test.my_experiment`
1017
+ - `prod.staging`
985
1018
 
986
- branch : Optional[str], default None
987
- The branch to use. If not specified, the branch is set to
988
- `user.<username>` unless `production` is set to `True`. This can
989
- also be set on the command line using `--branch` as a top-level option.
990
- It is an error to specify `branch` in the decorator and on the command line.
991
1019
 
992
- production : bool, default False
993
- Whether or not the branch is the production branch. This can also be set on the
994
- command line using `--production` as a top-level option. It is an error to specify
995
- `production` in the decorator and on the command line.
996
- The project branch name will be:
997
- - if `branch` is specified:
998
- - if `production` is True: `prod.<branch>`
999
- - if `production` is False: `test.<branch>`
1000
- - if `branch` is not specified:
1001
- - if `production` is True: `prod`
1002
- - if `production` is False: `user.<username>`
1020
+ Parameters
1021
+ ----------
1022
+ flow : Union[str, Dict[str, str]], optional, default None
1023
+ Upstream flow dependency for this flow.
1024
+ flows : List[Union[str, Dict[str, str]]], default []
1025
+ Upstream flow dependencies for this flow.
1026
+ options : Dict[str, Any], default {}
1027
+ Backend-specific configuration for tuning eventing behavior.
1003
1028
  """
1004
1029
  ...
1005
1030
 
@@ -1054,46 +1079,38 @@ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly:
1054
1079
  """
1055
1080
  ...
1056
1081
 
1057
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1082
+ def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1058
1083
  """
1059
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1060
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1061
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1062
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
1063
- starts only after all sensors finish.
1084
+ Specifies what flows belong to the same project.
1085
+
1086
+ A project-specific namespace is created for all flows that
1087
+ use the same `@project(name)`.
1064
1088
 
1065
1089
 
1066
1090
  Parameters
1067
1091
  ----------
1068
- timeout : int
1069
- Time, in seconds before the task times out and fails. (Default: 3600)
1070
- poke_interval : int
1071
- Time in seconds that the job should wait in between each try. (Default: 60)
1072
- mode : str
1073
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1074
- exponential_backoff : bool
1075
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1076
- pool : str
1077
- the slot pool this task should run in,
1078
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1079
- soft_fail : bool
1080
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1081
1092
  name : str
1082
- Name of the sensor on Airflow
1083
- description : str
1084
- Description of sensor in the Airflow UI
1085
- bucket_key : Union[str, List[str]]
1086
- The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1087
- When it's specified as a full s3:// url, please leave `bucket_name` as None
1088
- bucket_name : str
1089
- Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1090
- When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1091
- wildcard_match : bool
1092
- whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1093
- aws_conn_id : str
1094
- a reference to the s3 connection on Airflow. (Default: None)
1095
- verify : bool
1096
- Whether or not to verify SSL certificates for S3 connection. (Default: None)
1093
+ Project name. Make sure that the name is unique amongst all
1094
+ projects that use the same production scheduler. The name may
1095
+ contain only lowercase alphanumeric characters and underscores.
1096
+
1097
+ branch : Optional[str], default None
1098
+ The branch to use. If not specified, the branch is set to
1099
+ `user.<username>` unless `production` is set to `True`. This can
1100
+ also be set on the command line using `--branch` as a top-level option.
1101
+ It is an error to specify `branch` in the decorator and on the command line.
1102
+
1103
+ production : bool, default False
1104
+ Whether or not the branch is the production branch. This can also be set on the
1105
+ command line using `--production` as a top-level option. It is an error to specify
1106
+ `production` in the decorator and on the command line.
1107
+ The project branch name will be:
1108
+ - if `branch` is specified:
1109
+ - if `production` is True: `prod.<branch>`
1110
+ - if `production` is False: `test.<branch>`
1111
+ - if `branch` is not specified:
1112
+ - if `production` is True: `prod`
1113
+ - if `production` is False: `user.<username>`
1097
1114
  """
1098
1115
  ...
1099
1116
 
@@ -1191,103 +1208,43 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
1191
1208
  ...
1192
1209
 
1193
1210
  @typing.overload
1194
- def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1211
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1195
1212
  """
1196
- Specifies the flow(s) that this flow depends on.
1197
-
1198
- ```
1199
- @trigger_on_finish(flow='FooFlow')
1200
- ```
1201
- or
1202
- ```
1203
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1204
- ```
1205
- This decorator respects the @project decorator and triggers the flow
1206
- when upstream runs within the same namespace complete successfully
1207
-
1208
- Additionally, you can specify project aware upstream flow dependencies
1209
- by specifying the fully qualified project_flow_name.
1210
- ```
1211
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1212
- ```
1213
- or
1214
- ```
1215
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1216
- ```
1217
-
1218
- You can also specify just the project or project branch (other values will be
1219
- inferred from the current project or project branch):
1220
- ```
1221
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1222
- ```
1223
-
1224
- Note that `branch` is typically one of:
1225
- - `prod`
1226
- - `user.bob`
1227
- - `test.my_experiment`
1228
- - `prod.staging`
1213
+ Specifies the PyPI packages for all steps of the flow.
1229
1214
 
1215
+ Use `@pypi_base` to set common packages required by all
1216
+ steps and use `@pypi` to specify step-specific overrides.
1230
1217
 
1231
1218
  Parameters
1232
1219
  ----------
1233
- flow : Union[str, Dict[str, str]], optional, default None
1234
- Upstream flow dependency for this flow.
1235
- flows : List[Union[str, Dict[str, str]]], default []
1236
- Upstream flow dependencies for this flow.
1237
- options : Dict[str, Any], default {}
1238
- Backend-specific configuration for tuning eventing behavior.
1220
+ packages : Dict[str, str], default: {}
1221
+ Packages to use for this flow. The key is the name of the package
1222
+ and the value is the version to use.
1223
+ python : str, optional, default: None
1224
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1225
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1239
1226
  """
1240
1227
  ...
1241
1228
 
1242
1229
  @typing.overload
1243
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1230
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1244
1231
  ...
1245
1232
 
1246
- def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1233
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1247
1234
  """
1248
- Specifies the flow(s) that this flow depends on.
1249
-
1250
- ```
1251
- @trigger_on_finish(flow='FooFlow')
1252
- ```
1253
- or
1254
- ```
1255
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1256
- ```
1257
- This decorator respects the @project decorator and triggers the flow
1258
- when upstream runs within the same namespace complete successfully
1259
-
1260
- Additionally, you can specify project aware upstream flow dependencies
1261
- by specifying the fully qualified project_flow_name.
1262
- ```
1263
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1264
- ```
1265
- or
1266
- ```
1267
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1268
- ```
1269
-
1270
- You can also specify just the project or project branch (other values will be
1271
- inferred from the current project or project branch):
1272
- ```
1273
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1274
- ```
1275
-
1276
- Note that `branch` is typically one of:
1277
- - `prod`
1278
- - `user.bob`
1279
- - `test.my_experiment`
1280
- - `prod.staging`
1235
+ Specifies the PyPI packages for all steps of the flow.
1281
1236
 
1237
+ Use `@pypi_base` to set common packages required by all
1238
+ steps and use `@pypi` to specify step-specific overrides.
1282
1239
 
1283
1240
  Parameters
1284
1241
  ----------
1285
- flow : Union[str, Dict[str, str]], optional, default None
1286
- Upstream flow dependency for this flow.
1287
- flows : List[Union[str, Dict[str, str]]], default []
1288
- Upstream flow dependencies for this flow.
1289
- options : Dict[str, Any], default {}
1290
- Backend-specific configuration for tuning eventing behavior.
1242
+ packages : Dict[str, str], default: {}
1243
+ Packages to use for this flow. The key is the name of the package
1244
+ and the value is the version to use.
1245
+ python : str, optional, default: None
1246
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1247
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1291
1248
  """
1292
1249
  ...
1293
1250
 
@@ -1342,3 +1299,46 @@ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packa
1342
1299
  """
1343
1300
  ...
1344
1301
 
1302
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1303
+ """
1304
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1305
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1306
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1307
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
1308
+ starts only after all sensors finish.
1309
+
1310
+
1311
+ Parameters
1312
+ ----------
1313
+ timeout : int
1314
+ Time, in seconds before the task times out and fails. (Default: 3600)
1315
+ poke_interval : int
1316
+ Time in seconds that the job should wait in between each try. (Default: 60)
1317
+ mode : str
1318
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1319
+ exponential_backoff : bool
1320
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1321
+ pool : str
1322
+ the slot pool this task should run in,
1323
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1324
+ soft_fail : bool
1325
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1326
+ name : str
1327
+ Name of the sensor on Airflow
1328
+ description : str
1329
+ Description of sensor in the Airflow UI
1330
+ bucket_key : Union[str, List[str]]
1331
+ The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1332
+ When it's specified as a full s3:// url, please leave `bucket_name` as None
1333
+ bucket_name : str
1334
+ Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1335
+ When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1336
+ wildcard_match : bool
1337
+ whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1338
+ aws_conn_id : str
1339
+ a reference to the s3 connection on Airflow. (Default: None)
1340
+ verify : bool
1341
+ Whether or not to verify SSL certificates for S3 connection. (Default: None)
1342
+ """
1343
+ ...
1344
+