metaflow-stubs 2.18.13__py2.py3-none-any.whl → 2.19.1__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of metaflow-stubs might be problematic. Click here for more details.

Files changed (168) hide show
  1. metaflow-stubs/__init__.pyi +651 -650
  2. metaflow-stubs/cards.pyi +2 -2
  3. metaflow-stubs/cli.pyi +2 -2
  4. metaflow-stubs/cli_components/__init__.pyi +2 -2
  5. metaflow-stubs/cli_components/utils.pyi +2 -2
  6. metaflow-stubs/client/__init__.pyi +3 -2
  7. metaflow-stubs/client/core.pyi +63 -32
  8. metaflow-stubs/client/filecache.pyi +20 -4
  9. metaflow-stubs/events.pyi +7 -8
  10. metaflow-stubs/exception.pyi +2 -2
  11. metaflow-stubs/flowspec.pyi +7 -8
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +5 -5
  14. metaflow-stubs/meta_files.pyi +2 -2
  15. metaflow-stubs/metadata_provider/__init__.pyi +2 -2
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +2 -2
  17. metaflow-stubs/metadata_provider/metadata.pyi +2 -2
  18. metaflow-stubs/metadata_provider/util.pyi +2 -2
  19. metaflow-stubs/metaflow_config.pyi +16 -2
  20. metaflow-stubs/metaflow_current.pyi +14 -16
  21. metaflow-stubs/metaflow_git.pyi +3 -5
  22. metaflow-stubs/multicore_utils.pyi +4 -4
  23. metaflow-stubs/packaging_sys/__init__.pyi +39 -40
  24. metaflow-stubs/packaging_sys/backend.pyi +13 -13
  25. metaflow-stubs/packaging_sys/distribution_support.pyi +6 -7
  26. metaflow-stubs/packaging_sys/tar_backend.pyi +13 -13
  27. metaflow-stubs/packaging_sys/utils.pyi +3 -6
  28. metaflow-stubs/packaging_sys/v1.pyi +5 -5
  29. metaflow-stubs/parameters.pyi +6 -9
  30. metaflow-stubs/plugins/__init__.pyi +13 -13
  31. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  32. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
  33. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  34. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  35. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  36. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
  37. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
  38. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
  39. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  40. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  41. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  42. metaflow-stubs/plugins/argo/argo_workflows.pyi +3 -3
  43. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +3 -3
  44. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +5 -3
  45. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +11 -9
  46. metaflow-stubs/plugins/argo/exit_hooks.pyi +4 -7
  47. metaflow-stubs/plugins/aws/__init__.pyi +3 -3
  48. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  49. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  50. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  51. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  52. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  53. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
  54. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  55. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +4 -4
  56. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  57. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
  58. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  59. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  60. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  61. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  62. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
  63. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +6 -4
  64. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +9 -7
  65. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  66. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  67. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  68. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +4 -4
  69. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  70. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  71. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  72. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  73. metaflow-stubs/plugins/cards/card_client.pyi +5 -6
  74. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  75. metaflow-stubs/plugins/cards/card_datastore.pyi +4 -2
  76. metaflow-stubs/plugins/cards/card_decorator.pyi +3 -3
  77. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  78. metaflow-stubs/plugins/cards/card_modules/basic.pyi +2 -2
  79. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  80. metaflow-stubs/plugins/cards/card_modules/components.pyi +14 -14
  81. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  82. metaflow-stubs/plugins/cards/card_modules/json_viewer.pyi +5 -5
  83. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  84. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  85. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  86. metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
  87. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  88. metaflow-stubs/plugins/catch_decorator.pyi +3 -3
  89. metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
  90. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  91. metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
  92. metaflow-stubs/plugins/datatools/s3/s3.pyi +27 -35
  93. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  94. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  95. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  96. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  97. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  98. metaflow-stubs/plugins/events_decorator.pyi +2 -2
  99. metaflow-stubs/plugins/exit_hook/__init__.pyi +2 -2
  100. metaflow-stubs/plugins/exit_hook/exit_hook_decorator.pyi +2 -2
  101. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  102. metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
  103. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  104. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +4 -4
  105. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  106. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  107. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  108. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  109. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  110. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +3 -3
  111. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
  112. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  113. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
  114. metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +2 -2
  115. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
  116. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +2 -2
  117. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  118. metaflow-stubs/plugins/parsers.pyi +2 -2
  119. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  120. metaflow-stubs/plugins/pypi/__init__.pyi +3 -3
  121. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  122. metaflow-stubs/plugins/pypi/conda_environment.pyi +5 -5
  123. metaflow-stubs/plugins/pypi/parsers.pyi +2 -2
  124. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  125. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  126. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  127. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  128. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  129. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  130. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +3 -3
  131. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  132. metaflow-stubs/plugins/secrets/secrets_func.pyi +3 -5
  133. metaflow-stubs/plugins/secrets/secrets_spec.pyi +2 -2
  134. metaflow-stubs/plugins/secrets/utils.pyi +2 -2
  135. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  136. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +2 -2
  137. metaflow-stubs/plugins/timeout_decorator.pyi +3 -3
  138. metaflow-stubs/plugins/uv/__init__.pyi +2 -2
  139. metaflow-stubs/plugins/uv/uv_environment.pyi +3 -3
  140. metaflow-stubs/pylint_wrapper.pyi +2 -2
  141. metaflow-stubs/runner/__init__.pyi +2 -2
  142. metaflow-stubs/runner/deployer.pyi +23 -22
  143. metaflow-stubs/runner/deployer_impl.pyi +6 -4
  144. metaflow-stubs/runner/metaflow_runner.pyi +134 -21
  145. metaflow-stubs/runner/nbdeploy.pyi +3 -3
  146. metaflow-stubs/runner/nbrun.pyi +3 -3
  147. metaflow-stubs/runner/subprocess_manager.pyi +8 -8
  148. metaflow-stubs/runner/utils.pyi +5 -6
  149. metaflow-stubs/system/__init__.pyi +2 -2
  150. metaflow-stubs/system/system_logger.pyi +4 -5
  151. metaflow-stubs/system/system_monitor.pyi +3 -3
  152. metaflow-stubs/tagging_util.pyi +2 -2
  153. metaflow-stubs/tuple_util.pyi +2 -2
  154. metaflow-stubs/user_configs/__init__.pyi +2 -2
  155. metaflow-stubs/user_configs/config_options.pyi +5 -6
  156. metaflow-stubs/user_configs/config_parameters.pyi +8 -8
  157. metaflow-stubs/user_decorators/__init__.pyi +2 -2
  158. metaflow-stubs/user_decorators/common.pyi +6 -6
  159. metaflow-stubs/user_decorators/mutable_flow.pyi +8 -9
  160. metaflow-stubs/user_decorators/mutable_step.pyi +6 -9
  161. metaflow-stubs/user_decorators/user_flow_decorator.pyi +7 -8
  162. metaflow-stubs/user_decorators/user_step_decorator.pyi +24 -17
  163. metaflow-stubs/version.pyi +2 -2
  164. {metaflow_stubs-2.18.13.dist-info → metaflow_stubs-2.19.1.dist-info}/METADATA +2 -2
  165. metaflow_stubs-2.19.1.dist-info/RECORD +168 -0
  166. metaflow_stubs-2.18.13.dist-info/RECORD +0 -168
  167. {metaflow_stubs-2.18.13.dist-info → metaflow_stubs-2.19.1.dist-info}/WHEEL +0 -0
  168. {metaflow_stubs-2.18.13.dist-info → metaflow_stubs-2.19.1.dist-info}/top_level.txt +0 -0
@@ -1,15 +1,15 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.18.13 #
4
- # Generated on 2025-10-20T17:35:52.643612 #
3
+ # MF version: 2.19.1 #
4
+ # Generated on 2025-10-28T01:39:06.721172 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import datetime
12
11
  import typing
12
+ import datetime
13
13
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
14
14
  StepFlag = typing.NewType("StepFlag", bool)
15
15
 
@@ -40,17 +40,17 @@ from .user_decorators.user_step_decorator import StepMutator as StepMutator
40
40
  from .user_decorators.user_step_decorator import user_step_decorator as user_step_decorator
41
41
  from .user_decorators.user_flow_decorator import FlowMutator as FlowMutator
42
42
  from . import metaflow_git as metaflow_git
43
- from . import events as events
44
43
  from . import tuple_util as tuple_util
44
+ from . import events as events
45
45
  from . import runner as runner
46
46
  from . import plugins as plugins
47
47
  from .plugins.datatools.s3.s3 import S3 as S3
48
48
  from . import includefile as includefile
49
49
  from .includefile import IncludeFile as IncludeFile
50
- from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
51
- from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
52
50
  from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
53
51
  from .plugins.parsers import yaml_parser as yaml_parser
52
+ from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
53
+ from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
54
54
  from . import cards as cards
55
55
  from . import client as client
56
56
  from .client.core import namespace as namespace
@@ -59,6 +59,7 @@ from .client.core import default_namespace as default_namespace
59
59
  from .client.core import metadata as metadata
60
60
  from .client.core import get_metadata as get_metadata
61
61
  from .client.core import default_metadata as default_metadata
62
+ from .client.core import inspect_spin as inspect_spin
62
63
  from .client.core import Metaflow as Metaflow
63
64
  from .client.core import Flow as Flow
64
65
  from .client.core import Run as Run
@@ -71,8 +72,8 @@ from .runner.deployer import Deployer as Deployer
71
72
  from .runner.deployer import DeployedFlow as DeployedFlow
72
73
  from .runner.nbdeploy import NBDeployer as NBDeployer
73
74
  from . import version as version
74
- from . import system as system
75
75
  from . import cli_components as cli_components
76
+ from . import system as system
76
77
  from . import pylint_wrapper as pylint_wrapper
77
78
  from . import cli as cli
78
79
 
@@ -119,7 +120,7 @@ def step(f: typing.Callable[[FlowSpecDerived], None]) -> typing.Callable[[FlowSp
119
120
  def step(f: typing.Callable[[FlowSpecDerived, typing.Any], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
120
121
  ...
121
122
 
122
- def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callable[[FlowSpecDerived, typing.Any], None]]):
123
+ def step(f: typing.Callable[[~FlowSpecDerived], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any], NoneType]):
123
124
  """
124
125
  Marks a method in a FlowSpec as a Metaflow Step. Note that this
125
126
  decorator needs to be placed as close to the method as possible (ie:
@@ -154,124 +155,141 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
154
155
  ...
155
156
 
156
157
  @typing.overload
157
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
158
- """
159
- Decorator prototype for all step decorators. This function gets specialized
160
- and imported for all decorators types by _import_plugin_decorators().
161
- """
162
- ...
163
-
164
- @typing.overload
165
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
166
- ...
167
-
168
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
169
- """
170
- Decorator prototype for all step decorators. This function gets specialized
171
- and imported for all decorators types by _import_plugin_decorators().
172
- """
173
- ...
174
-
175
- @typing.overload
176
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
158
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
177
159
  """
178
- Specifies the Conda environment for the step.
160
+ Specifies the number of times the task corresponding
161
+ to a step needs to be retried.
179
162
 
180
- Information in this decorator will augment any
181
- attributes set in the `@conda_base` flow-level decorator. Hence,
182
- you can use `@conda_base` to set packages required by all
183
- steps and use `@conda` to specify step-specific overrides.
163
+ This decorator is useful for handling transient errors, such as networking issues.
164
+ If your task contains operations that can't be retried safely, e.g. database updates,
165
+ it is advisable to annotate it with `@retry(times=0)`.
166
+
167
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
168
+ decorator will execute a no-op task after all retries have been exhausted,
169
+ ensuring that the flow execution can continue.
184
170
 
185
171
 
186
172
  Parameters
187
173
  ----------
188
- packages : Dict[str, str], default {}
189
- Packages to use for this step. The key is the name of the package
190
- and the value is the version to use.
191
- libraries : Dict[str, str], default {}
192
- Supported for backward compatibility. When used with packages, packages will take precedence.
193
- python : str, optional, default None
194
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
195
- that the version used will correspond to the version of the Python interpreter used to start the run.
196
- disabled : bool, default False
197
- If set to True, disables @conda.
174
+ times : int, default 3
175
+ Number of times to retry this task.
176
+ minutes_between_retries : int, default 2
177
+ Number of minutes between retries.
198
178
  """
199
179
  ...
200
180
 
201
181
  @typing.overload
202
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
182
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
203
183
  ...
204
184
 
205
185
  @typing.overload
206
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
186
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
207
187
  ...
208
188
 
209
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
189
+ def retry(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, times: int = 3, minutes_between_retries: int = 2):
210
190
  """
211
- Specifies the Conda environment for the step.
191
+ Specifies the number of times the task corresponding
192
+ to a step needs to be retried.
212
193
 
213
- Information in this decorator will augment any
214
- attributes set in the `@conda_base` flow-level decorator. Hence,
215
- you can use `@conda_base` to set packages required by all
216
- steps and use `@conda` to specify step-specific overrides.
194
+ This decorator is useful for handling transient errors, such as networking issues.
195
+ If your task contains operations that can't be retried safely, e.g. database updates,
196
+ it is advisable to annotate it with `@retry(times=0)`.
197
+
198
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
199
+ decorator will execute a no-op task after all retries have been exhausted,
200
+ ensuring that the flow execution can continue.
217
201
 
218
202
 
219
203
  Parameters
220
204
  ----------
221
- packages : Dict[str, str], default {}
222
- Packages to use for this step. The key is the name of the package
223
- and the value is the version to use.
224
- libraries : Dict[str, str], default {}
225
- Supported for backward compatibility. When used with packages, packages will take precedence.
226
- python : str, optional, default None
227
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
228
- that the version used will correspond to the version of the Python interpreter used to start the run.
229
- disabled : bool, default False
230
- If set to True, disables @conda.
205
+ times : int, default 3
206
+ Number of times to retry this task.
207
+ minutes_between_retries : int, default 2
208
+ Number of minutes between retries.
231
209
  """
232
210
  ...
233
211
 
234
212
  @typing.overload
235
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
213
+ def resources(*, cpu: int = 1, gpu: int | None = None, disk: int | None = None, memory: int = 4096, shared_memory: int | None = None) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
236
214
  """
237
- Specifies secrets to be retrieved and injected as environment variables prior to
238
- the execution of a step.
215
+ Specifies the resources needed when executing this step.
216
+
217
+ Use `@resources` to specify the resource requirements
218
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
219
+
220
+ You can choose the compute layer on the command line by executing e.g.
221
+ ```
222
+ python myflow.py run --with batch
223
+ ```
224
+ or
225
+ ```
226
+ python myflow.py run --with kubernetes
227
+ ```
228
+ which executes the flow on the desired system using the
229
+ requirements specified in `@resources`.
239
230
 
240
231
 
241
232
  Parameters
242
233
  ----------
243
- sources : List[Union[str, Dict[str, Any]]], default: []
244
- List of secret specs, defining how the secrets are to be retrieved
245
- role : str, optional, default: None
246
- Role to use for fetching secrets
234
+ cpu : int, default 1
235
+ Number of CPUs required for this step.
236
+ gpu : int, optional, default None
237
+ Number of GPUs required for this step.
238
+ disk : int, optional, default None
239
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
240
+ memory : int, default 4096
241
+ Memory size (in MB) required for this step.
242
+ shared_memory : int, optional, default None
243
+ The value for the size (in MiB) of the /dev/shm volume for this step.
244
+ This parameter maps to the `--shm-size` option in Docker.
247
245
  """
248
246
  ...
249
247
 
250
248
  @typing.overload
251
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
249
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
252
250
  ...
253
251
 
254
252
  @typing.overload
255
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
253
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
256
254
  ...
257
255
 
258
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
256
+ def resources(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, cpu: int = 1, gpu: int | None = None, disk: int | None = None, memory: int = 4096, shared_memory: int | None = None):
259
257
  """
260
- Specifies secrets to be retrieved and injected as environment variables prior to
261
- the execution of a step.
258
+ Specifies the resources needed when executing this step.
259
+
260
+ Use `@resources` to specify the resource requirements
261
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
262
+
263
+ You can choose the compute layer on the command line by executing e.g.
264
+ ```
265
+ python myflow.py run --with batch
266
+ ```
267
+ or
268
+ ```
269
+ python myflow.py run --with kubernetes
270
+ ```
271
+ which executes the flow on the desired system using the
272
+ requirements specified in `@resources`.
262
273
 
263
274
 
264
275
  Parameters
265
276
  ----------
266
- sources : List[Union[str, Dict[str, Any]]], default: []
267
- List of secret specs, defining how the secrets are to be retrieved
268
- role : str, optional, default: None
269
- Role to use for fetching secrets
277
+ cpu : int, default 1
278
+ Number of CPUs required for this step.
279
+ gpu : int, optional, default None
280
+ Number of GPUs required for this step.
281
+ disk : int, optional, default None
282
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
283
+ memory : int, default 4096
284
+ Memory size (in MB) required for this step.
285
+ shared_memory : int, optional, default None
286
+ The value for the size (in MiB) of the /dev/shm volume for this step.
287
+ This parameter maps to the `--shm-size` option in Docker.
270
288
  """
271
289
  ...
272
290
 
273
291
  @typing.overload
274
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
292
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: str | None = None) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
275
293
  """
276
294
  Specifies the PyPI packages for the step.
277
295
 
@@ -300,7 +318,7 @@ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callab
300
318
  def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
301
319
  ...
302
320
 
303
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
321
+ def pypi(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, packages: typing.Dict[str, str] = {}, python: str | None = None):
304
322
  """
305
323
  Specifies the PyPI packages for the step.
306
324
 
@@ -321,9 +339,10 @@ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
321
339
  """
322
340
  ...
323
341
 
324
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[typing.Dict[str, str]] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
342
+ @typing.overload
343
+ def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: str | None = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: int | None = None, max_swap: int | None = None, swappiness: int | None = None, aws_batch_tags: typing.Dict[str, str] | None = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: int | None = None, tmpfs_path: str | None = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: str | None = None, log_options: typing.List[str] | None = None) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
325
344
  """
326
- Specifies that this step should execute on Kubernetes.
345
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
327
346
 
328
347
 
329
348
  Parameters
@@ -331,120 +350,299 @@ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: ty
331
350
  cpu : int, default 1
332
351
  Number of CPUs required for this step. If `@resources` is
333
352
  also present, the maximum value from all decorators is used.
353
+ gpu : int, default 0
354
+ Number of GPUs required for this step. If `@resources` is
355
+ also present, the maximum value from all decorators is used.
334
356
  memory : int, default 4096
335
357
  Memory size (in MB) required for this step. If
336
358
  `@resources` is also present, the maximum value from all decorators is
337
359
  used.
338
- disk : int, default 10240
339
- Disk size (in MB) required for this step. If
340
- `@resources` is also present, the maximum value from all decorators is
341
- used.
342
360
  image : str, optional, default None
343
- Docker image to use when launching on Kubernetes. If not specified, and
344
- METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
361
+ Docker image to use when launching on AWS Batch. If not specified, and
362
+ METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
345
363
  not, a default Docker image mapping to the current version of Python is used.
346
- image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
347
- If given, the imagePullPolicy to be applied to the Docker image of the step.
348
- image_pull_secrets: List[str], default []
349
- The default is extracted from METAFLOW_KUBERNETES_IMAGE_PULL_SECRETS.
350
- Kubernetes image pull secrets to use when pulling container images
351
- in Kubernetes.
352
- service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
353
- Kubernetes service account to use when launching pod in Kubernetes.
354
- secrets : List[str], optional, default None
355
- Kubernetes secrets to use when launching pod in Kubernetes. These
356
- secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
357
- in Metaflow configuration.
358
- node_selector: Union[Dict[str,str], str], optional, default None
359
- Kubernetes node selector(s) to apply to the pod running the task.
360
- Can be passed in as a comma separated string of values e.g.
361
- 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
362
- {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
363
- namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
364
- Kubernetes namespace to use when launching pod in Kubernetes.
365
- gpu : int, optional, default None
366
- Number of GPUs required for this step. A value of zero implies that
367
- the scheduled node should not have GPUs.
368
- gpu_vendor : str, default KUBERNETES_GPU_VENDOR
369
- The vendor of the GPUs to be used for this step.
370
- tolerations : List[Dict[str,str]], default []
371
- The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
372
- Kubernetes tolerations to use when launching pod in Kubernetes.
373
- labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
374
- Kubernetes labels to use when launching pod in Kubernetes.
375
- annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
376
- Kubernetes annotations to use when launching pod in Kubernetes.
364
+ queue : str, default METAFLOW_BATCH_JOB_QUEUE
365
+ AWS Batch Job Queue to submit the job to.
366
+ iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
367
+ AWS IAM role that AWS Batch container uses to access AWS cloud resources.
368
+ execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
369
+ AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
370
+ (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
371
+ shared_memory : int, optional, default None
372
+ The value for the size (in MiB) of the /dev/shm volume for this step.
373
+ This parameter maps to the `--shm-size` option in Docker.
374
+ max_swap : int, optional, default None
375
+ The total amount of swap memory (in MiB) a container can use for this
376
+ step. This parameter is translated to the `--memory-swap` option in
377
+ Docker where the value is the sum of the container memory plus the
378
+ `max_swap` value.
379
+ swappiness : int, optional, default None
380
+ This allows you to tune memory swappiness behavior for this step.
381
+ A swappiness value of 0 causes swapping not to happen unless absolutely
382
+ necessary. A swappiness value of 100 causes pages to be swapped very
383
+ aggressively. Accepted values are whole numbers between 0 and 100.
384
+ aws_batch_tags: Dict[str, str], optional, default None
385
+ Sets arbitrary AWS tags on the AWS Batch compute environment.
386
+ Set as string key-value pairs.
377
387
  use_tmpfs : bool, default False
378
- This enables an explicit tmpfs mount for this step.
388
+ This enables an explicit tmpfs mount for this step. Note that tmpfs is
389
+ not available on Fargate compute environments
379
390
  tmpfs_tempdir : bool, default True
380
391
  sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
381
- tmpfs_size : int, optional, default: None
392
+ tmpfs_size : int, optional, default None
382
393
  The value for the size (in MiB) of the tmpfs mount for this step.
383
394
  This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
384
395
  memory allocated for this step.
385
- tmpfs_path : str, optional, default /metaflow_temp
386
- Path to tmpfs mount for this step.
387
- persistent_volume_claims : Dict[str, str], optional, default None
388
- A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
389
- volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
390
- shared_memory: int, optional
391
- Shared memory size (in MiB) required for this step
392
- port: int, optional
393
- Port number to specify in the Kubernetes job object
394
- compute_pool : str, optional, default None
395
- Compute pool to be used for for this step.
396
- If not specified, any accessible compute pool within the perimeter is used.
397
- hostname_resolution_timeout: int, default 10 * 60
398
- Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
399
- Only applicable when @parallel is used.
400
- qos: str, default: Burstable
401
- Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
402
-
403
- security_context: Dict[str, Any], optional, default None
404
- Container security context. Applies to the task container. Allows the following keys:
405
- - privileged: bool, optional, default None
406
- - allow_privilege_escalation: bool, optional, default None
407
- - run_as_user: int, optional, default None
408
- - run_as_group: int, optional, default None
409
- - run_as_non_root: bool, optional, default None
410
- """
411
- ...
396
+ tmpfs_path : str, optional, default None
397
+ Path to tmpfs mount for this step. Defaults to /metaflow_temp.
398
+ inferentia : int, default 0
399
+ Number of Inferentia chips required for this step.
400
+ trainium : int, default None
401
+ Alias for inferentia. Use only one of the two.
402
+ efa : int, default 0
403
+ Number of elastic fabric adapter network devices to attach to container
404
+ ephemeral_storage : int, default None
405
+ The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
406
+ This is only relevant for Fargate compute environments
407
+ log_driver: str, optional, default None
408
+ The log driver to use for the Amazon ECS container.
409
+ log_options: List[str], optional, default None
410
+ List of strings containing options for the chosen log driver. The configurable values
411
+ depend on the `log driver` chosen. Validation of these options is not supported yet.
412
+ Example: [`awslogs-group:aws/batch/job`]
413
+ """
414
+ ...
415
+
416
+ @typing.overload
417
+ def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
418
+ ...
412
419
 
413
420
  @typing.overload
414
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
421
+ def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
422
+ ...
423
+
424
+ def batch(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: str | None = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: int | None = None, max_swap: int | None = None, swappiness: int | None = None, aws_batch_tags: typing.Dict[str, str] | None = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: int | None = None, tmpfs_path: str | None = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: str | None = None, log_options: typing.List[str] | None = None):
415
425
  """
416
- Specifies environment variables to be set prior to the execution of a step.
426
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
417
427
 
418
428
 
419
429
  Parameters
420
430
  ----------
421
- vars : Dict[str, str], default {}
422
- Dictionary of environment variables to set.
431
+ cpu : int, default 1
432
+ Number of CPUs required for this step. If `@resources` is
433
+ also present, the maximum value from all decorators is used.
434
+ gpu : int, default 0
435
+ Number of GPUs required for this step. If `@resources` is
436
+ also present, the maximum value from all decorators is used.
437
+ memory : int, default 4096
438
+ Memory size (in MB) required for this step. If
439
+ `@resources` is also present, the maximum value from all decorators is
440
+ used.
441
+ image : str, optional, default None
442
+ Docker image to use when launching on AWS Batch. If not specified, and
443
+ METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
444
+ not, a default Docker image mapping to the current version of Python is used.
445
+ queue : str, default METAFLOW_BATCH_JOB_QUEUE
446
+ AWS Batch Job Queue to submit the job to.
447
+ iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
448
+ AWS IAM role that AWS Batch container uses to access AWS cloud resources.
449
+ execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
450
+ AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
451
+ (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
452
+ shared_memory : int, optional, default None
453
+ The value for the size (in MiB) of the /dev/shm volume for this step.
454
+ This parameter maps to the `--shm-size` option in Docker.
455
+ max_swap : int, optional, default None
456
+ The total amount of swap memory (in MiB) a container can use for this
457
+ step. This parameter is translated to the `--memory-swap` option in
458
+ Docker where the value is the sum of the container memory plus the
459
+ `max_swap` value.
460
+ swappiness : int, optional, default None
461
+ This allows you to tune memory swappiness behavior for this step.
462
+ A swappiness value of 0 causes swapping not to happen unless absolutely
463
+ necessary. A swappiness value of 100 causes pages to be swapped very
464
+ aggressively. Accepted values are whole numbers between 0 and 100.
465
+ aws_batch_tags: Dict[str, str], optional, default None
466
+ Sets arbitrary AWS tags on the AWS Batch compute environment.
467
+ Set as string key-value pairs.
468
+ use_tmpfs : bool, default False
469
+ This enables an explicit tmpfs mount for this step. Note that tmpfs is
470
+ not available on Fargate compute environments
471
+ tmpfs_tempdir : bool, default True
472
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
473
+ tmpfs_size : int, optional, default None
474
+ The value for the size (in MiB) of the tmpfs mount for this step.
475
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
476
+ memory allocated for this step.
477
+ tmpfs_path : str, optional, default None
478
+ Path to tmpfs mount for this step. Defaults to /metaflow_temp.
479
+ inferentia : int, default 0
480
+ Number of Inferentia chips required for this step.
481
+ trainium : int, default None
482
+ Alias for inferentia. Use only one of the two.
483
+ efa : int, default 0
484
+ Number of elastic fabric adapter network devices to attach to container
485
+ ephemeral_storage : int, default None
486
+ The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
487
+ This is only relevant for Fargate compute environments
488
+ log_driver: str, optional, default None
489
+ The log driver to use for the Amazon ECS container.
490
+ log_options: List[str], optional, default None
491
+ List of strings containing options for the chosen log driver. The configurable values
492
+ depend on the `log driver` chosen. Validation of these options is not supported yet.
493
+ Example: [`awslogs-group:aws/batch/job`]
494
+ """
495
+ ...
496
+
497
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: str | None = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.List[str] | None = None, node_selector: typing.Dict[str, str] | str | None = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: int | None = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[typing.Dict[str, str]] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: int | None = None, tmpfs_path: str | None = '/metaflow_temp', persistent_volume_claims: typing.Dict[str, str] | None = None, shared_memory: int | None = None, port: int | None = None, compute_pool: str | None = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Dict[str, typing.Any] | None = None) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
498
+ """
499
+ Specifies that this step should execute on Kubernetes.
500
+
501
+
502
+ Parameters
503
+ ----------
504
+ cpu : int, default 1
505
+ Number of CPUs required for this step. If `@resources` is
506
+ also present, the maximum value from all decorators is used.
507
+ memory : int, default 4096
508
+ Memory size (in MB) required for this step. If
509
+ `@resources` is also present, the maximum value from all decorators is
510
+ used.
511
+ disk : int, default 10240
512
+ Disk size (in MB) required for this step. If
513
+ `@resources` is also present, the maximum value from all decorators is
514
+ used.
515
+ image : str, optional, default None
516
+ Docker image to use when launching on Kubernetes. If not specified, and
517
+ METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
518
+ not, a default Docker image mapping to the current version of Python is used.
519
+ image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
520
+ If given, the imagePullPolicy to be applied to the Docker image of the step.
521
+ image_pull_secrets: List[str], default []
522
+ The default is extracted from METAFLOW_KUBERNETES_IMAGE_PULL_SECRETS.
523
+ Kubernetes image pull secrets to use when pulling container images
524
+ in Kubernetes.
525
+ service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
526
+ Kubernetes service account to use when launching pod in Kubernetes.
527
+ secrets : List[str], optional, default None
528
+ Kubernetes secrets to use when launching pod in Kubernetes. These
529
+ secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
530
+ in Metaflow configuration.
531
+ node_selector: Union[Dict[str,str], str], optional, default None
532
+ Kubernetes node selector(s) to apply to the pod running the task.
533
+ Can be passed in as a comma separated string of values e.g.
534
+ 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
535
+ {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
536
+ namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
537
+ Kubernetes namespace to use when launching pod in Kubernetes.
538
+ gpu : int, optional, default None
539
+ Number of GPUs required for this step. A value of zero implies that
540
+ the scheduled node should not have GPUs.
541
+ gpu_vendor : str, default KUBERNETES_GPU_VENDOR
542
+ The vendor of the GPUs to be used for this step.
543
+ tolerations : List[Dict[str,str]], default []
544
+ The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
545
+ Kubernetes tolerations to use when launching pod in Kubernetes.
546
+ labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
547
+ Kubernetes labels to use when launching pod in Kubernetes.
548
+ annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
549
+ Kubernetes annotations to use when launching pod in Kubernetes.
550
+ use_tmpfs : bool, default False
551
+ This enables an explicit tmpfs mount for this step.
552
+ tmpfs_tempdir : bool, default True
553
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
554
+ tmpfs_size : int, optional, default: None
555
+ The value for the size (in MiB) of the tmpfs mount for this step.
556
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
557
+ memory allocated for this step.
558
+ tmpfs_path : str, optional, default /metaflow_temp
559
+ Path to tmpfs mount for this step.
560
+ persistent_volume_claims : Dict[str, str], optional, default None
561
+ A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
562
+ volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
563
+ shared_memory: int, optional
564
+ Shared memory size (in MiB) required for this step
565
+ port: int, optional
566
+ Port number to specify in the Kubernetes job object
567
+ compute_pool : str, optional, default None
568
+ Compute pool to be used for for this step.
569
+ If not specified, any accessible compute pool within the perimeter is used.
570
+ hostname_resolution_timeout: int, default 10 * 60
571
+ Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
572
+ Only applicable when @parallel is used.
573
+ qos: str, default: Burstable
574
+ Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
575
+
576
+ security_context: Dict[str, Any], optional, default None
577
+ Container security context. Applies to the task container. Allows the following keys:
578
+ - privileged: bool, optional, default None
579
+ - allow_privilege_escalation: bool, optional, default None
580
+ - run_as_user: int, optional, default None
581
+ - run_as_group: int, optional, default None
582
+ - run_as_non_root: bool, optional, default None
423
583
  """
424
584
  ...
425
585
 
426
586
  @typing.overload
427
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
587
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
588
+ """
589
+ Decorator prototype for all step decorators. This function gets specialized
590
+ and imported for all decorators types by _import_plugin_decorators().
591
+ """
428
592
  ...
429
593
 
430
594
  @typing.overload
431
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
595
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
432
596
  ...
433
597
 
434
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
598
+ def parallel(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None):
435
599
  """
436
- Specifies environment variables to be set prior to the execution of a step.
600
+ Decorator prototype for all step decorators. This function gets specialized
601
+ and imported for all decorators types by _import_plugin_decorators().
602
+ """
603
+ ...
604
+
605
+ @typing.overload
606
+ def secrets(*, sources: typing.List[str | typing.Dict[str, typing.Any]] = [], role: str | None = None) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
607
+ """
608
+ Specifies secrets to be retrieved and injected as environment variables prior to
609
+ the execution of a step.
437
610
 
438
611
 
439
612
  Parameters
440
613
  ----------
441
- vars : Dict[str, str], default {}
442
- Dictionary of environment variables to set.
614
+ sources : List[Union[str, Dict[str, Any]]], default: []
615
+ List of secret specs, defining how the secrets are to be retrieved
616
+ role : str, optional, default: None
617
+ Role to use for fetching secrets
618
+ """
619
+ ...
620
+
621
+ @typing.overload
622
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
623
+ ...
624
+
625
+ @typing.overload
626
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
627
+ ...
628
+
629
+ def secrets(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, sources: typing.List[str | typing.Dict[str, typing.Any]] = [], role: str | None = None):
630
+ """
631
+ Specifies secrets to be retrieved and injected as environment variables prior to
632
+ the execution of a step.
633
+
634
+
635
+ Parameters
636
+ ----------
637
+ sources : List[Union[str, Dict[str, Any]]], default: []
638
+ List of secret specs, defining how the secrets are to be retrieved
639
+ role : str, optional, default: None
640
+ Role to use for fetching secrets
443
641
  """
444
642
  ...
445
643
 
446
644
  @typing.overload
447
- def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
645
+ def card(*, type: str = 'default', id: str | None = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
448
646
  """
449
647
  Creates a human-readable report, a Metaflow Card, after this step completes.
450
648
 
@@ -472,7 +670,7 @@ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callab
472
670
  def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
473
671
  ...
474
672
 
475
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
673
+ def card(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, type: str = 'default', id: str | None = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
476
674
  """
477
675
  Creates a human-readable report, a Metaflow Card, after this step completes.
478
676
 
@@ -493,406 +691,209 @@ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
493
691
  ...
494
692
 
495
693
  @typing.overload
496
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
497
- """
498
- Specifies the number of times the task corresponding
499
- to a step needs to be retried.
500
-
501
- This decorator is useful for handling transient errors, such as networking issues.
502
- If your task contains operations that can't be retried safely, e.g. database updates,
503
- it is advisable to annotate it with `@retry(times=0)`.
504
-
505
- This can be used in conjunction with the `@catch` decorator. The `@catch`
506
- decorator will execute a no-op task after all retries have been exhausted,
507
- ensuring that the flow execution can continue.
508
-
509
-
510
- Parameters
511
- ----------
512
- times : int, default 3
513
- Number of times to retry this task.
514
- minutes_between_retries : int, default 2
515
- Number of minutes between retries.
516
- """
517
- ...
518
-
519
- @typing.overload
520
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
521
- ...
522
-
523
- @typing.overload
524
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
525
- ...
526
-
527
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
694
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
528
695
  """
529
- Specifies the number of times the task corresponding
530
- to a step needs to be retried.
531
-
532
- This decorator is useful for handling transient errors, such as networking issues.
533
- If your task contains operations that can't be retried safely, e.g. database updates,
534
- it is advisable to annotate it with `@retry(times=0)`.
535
-
536
- This can be used in conjunction with the `@catch` decorator. The `@catch`
537
- decorator will execute a no-op task after all retries have been exhausted,
538
- ensuring that the flow execution can continue.
539
-
696
+ Specifies a timeout for your step.
540
697
 
541
- Parameters
542
- ----------
543
- times : int, default 3
544
- Number of times to retry this task.
545
- minutes_between_retries : int, default 2
546
- Number of minutes between retries.
547
- """
548
- ...
549
-
550
- @typing.overload
551
- def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
552
- """
553
- Specifies the resources needed when executing this step.
698
+ This decorator is useful if this step may hang indefinitely.
554
699
 
555
- Use `@resources` to specify the resource requirements
556
- independently of the specific compute layer (`@batch`, `@kubernetes`).
700
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
701
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
702
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
557
703
 
558
- You can choose the compute layer on the command line by executing e.g.
559
- ```
560
- python myflow.py run --with batch
561
- ```
562
- or
563
- ```
564
- python myflow.py run --with kubernetes
565
- ```
566
- which executes the flow on the desired system using the
567
- requirements specified in `@resources`.
704
+ Note that all the values specified in parameters are added together so if you specify
705
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
568
706
 
569
707
 
570
708
  Parameters
571
709
  ----------
572
- cpu : int, default 1
573
- Number of CPUs required for this step.
574
- gpu : int, optional, default None
575
- Number of GPUs required for this step.
576
- disk : int, optional, default None
577
- Disk size (in MB) required for this step. Only applies on Kubernetes.
578
- memory : int, default 4096
579
- Memory size (in MB) required for this step.
580
- shared_memory : int, optional, default None
581
- The value for the size (in MiB) of the /dev/shm volume for this step.
582
- This parameter maps to the `--shm-size` option in Docker.
710
+ seconds : int, default 0
711
+ Number of seconds to wait prior to timing out.
712
+ minutes : int, default 0
713
+ Number of minutes to wait prior to timing out.
714
+ hours : int, default 0
715
+ Number of hours to wait prior to timing out.
583
716
  """
584
717
  ...
585
718
 
586
719
  @typing.overload
587
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
720
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
588
721
  ...
589
722
 
590
723
  @typing.overload
591
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
724
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
592
725
  ...
593
726
 
594
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
727
+ def timeout(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
595
728
  """
596
- Specifies the resources needed when executing this step.
729
+ Specifies a timeout for your step.
597
730
 
598
- Use `@resources` to specify the resource requirements
599
- independently of the specific compute layer (`@batch`, `@kubernetes`).
731
+ This decorator is useful if this step may hang indefinitely.
600
732
 
601
- You can choose the compute layer on the command line by executing e.g.
602
- ```
603
- python myflow.py run --with batch
604
- ```
605
- or
606
- ```
607
- python myflow.py run --with kubernetes
608
- ```
609
- which executes the flow on the desired system using the
610
- requirements specified in `@resources`.
733
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
734
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
735
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
736
+
737
+ Note that all the values specified in parameters are added together so if you specify
738
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
611
739
 
612
740
 
613
741
  Parameters
614
742
  ----------
615
- cpu : int, default 1
616
- Number of CPUs required for this step.
617
- gpu : int, optional, default None
618
- Number of GPUs required for this step.
619
- disk : int, optional, default None
620
- Disk size (in MB) required for this step. Only applies on Kubernetes.
621
- memory : int, default 4096
622
- Memory size (in MB) required for this step.
623
- shared_memory : int, optional, default None
624
- The value for the size (in MiB) of the /dev/shm volume for this step.
625
- This parameter maps to the `--shm-size` option in Docker.
743
+ seconds : int, default 0
744
+ Number of seconds to wait prior to timing out.
745
+ minutes : int, default 0
746
+ Number of minutes to wait prior to timing out.
747
+ hours : int, default 0
748
+ Number of hours to wait prior to timing out.
626
749
  """
627
750
  ...
628
751
 
629
752
  @typing.overload
630
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
753
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: str | None = None, disabled: bool = False) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
631
754
  """
632
- Specifies that the step will success under all circumstances.
755
+ Specifies the Conda environment for the step.
633
756
 
634
- The decorator will create an optional artifact, specified by `var`, which
635
- contains the exception raised. You can use it to detect the presence
636
- of errors, indicating that all happy-path artifacts produced by the step
637
- are missing.
757
+ Information in this decorator will augment any
758
+ attributes set in the `@conda_base` flow-level decorator. Hence,
759
+ you can use `@conda_base` to set packages required by all
760
+ steps and use `@conda` to specify step-specific overrides.
638
761
 
639
762
 
640
763
  Parameters
641
764
  ----------
642
- var : str, optional, default None
643
- Name of the artifact in which to store the caught exception.
644
- If not specified, the exception is not stored.
645
- print_exception : bool, default True
646
- Determines whether or not the exception is printed to
647
- stdout when caught.
765
+ packages : Dict[str, str], default {}
766
+ Packages to use for this step. The key is the name of the package
767
+ and the value is the version to use.
768
+ libraries : Dict[str, str], default {}
769
+ Supported for backward compatibility. When used with packages, packages will take precedence.
770
+ python : str, optional, default None
771
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
772
+ that the version used will correspond to the version of the Python interpreter used to start the run.
773
+ disabled : bool, default False
774
+ If set to True, disables @conda.
648
775
  """
649
776
  ...
650
777
 
651
778
  @typing.overload
652
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
779
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
653
780
  ...
654
781
 
655
782
  @typing.overload
656
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
783
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
657
784
  ...
658
785
 
659
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
786
+ def conda(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: str | None = None, disabled: bool = False):
660
787
  """
661
- Specifies that the step will success under all circumstances.
788
+ Specifies the Conda environment for the step.
662
789
 
663
- The decorator will create an optional artifact, specified by `var`, which
664
- contains the exception raised. You can use it to detect the presence
665
- of errors, indicating that all happy-path artifacts produced by the step
666
- are missing.
790
+ Information in this decorator will augment any
791
+ attributes set in the `@conda_base` flow-level decorator. Hence,
792
+ you can use `@conda_base` to set packages required by all
793
+ steps and use `@conda` to specify step-specific overrides.
667
794
 
668
795
 
669
796
  Parameters
670
797
  ----------
671
- var : str, optional, default None
672
- Name of the artifact in which to store the caught exception.
673
- If not specified, the exception is not stored.
674
- print_exception : bool, default True
675
- Determines whether or not the exception is printed to
676
- stdout when caught.
798
+ packages : Dict[str, str], default {}
799
+ Packages to use for this step. The key is the name of the package
800
+ and the value is the version to use.
801
+ libraries : Dict[str, str], default {}
802
+ Supported for backward compatibility. When used with packages, packages will take precedence.
803
+ python : str, optional, default None
804
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
805
+ that the version used will correspond to the version of the Python interpreter used to start the run.
806
+ disabled : bool, default False
807
+ If set to True, disables @conda.
677
808
  """
678
809
  ...
679
810
 
680
811
  @typing.overload
681
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
812
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
682
813
  """
683
- Specifies a timeout for your step.
684
-
685
- This decorator is useful if this step may hang indefinitely.
686
-
687
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
688
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
689
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
690
-
691
- Note that all the values specified in parameters are added together so if you specify
692
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
814
+ Specifies environment variables to be set prior to the execution of a step.
693
815
 
694
816
 
695
817
  Parameters
696
818
  ----------
697
- seconds : int, default 0
698
- Number of seconds to wait prior to timing out.
699
- minutes : int, default 0
700
- Number of minutes to wait prior to timing out.
701
- hours : int, default 0
702
- Number of hours to wait prior to timing out.
819
+ vars : Dict[str, str], default {}
820
+ Dictionary of environment variables to set.
703
821
  """
704
822
  ...
705
823
 
706
824
  @typing.overload
707
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
825
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
708
826
  ...
709
827
 
710
828
  @typing.overload
711
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
829
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
712
830
  ...
713
831
 
714
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
832
+ def environment(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, vars: typing.Dict[str, str] = {}):
715
833
  """
716
- Specifies a timeout for your step.
717
-
718
- This decorator is useful if this step may hang indefinitely.
719
-
720
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
721
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
722
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
723
-
724
- Note that all the values specified in parameters are added together so if you specify
725
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
834
+ Specifies environment variables to be set prior to the execution of a step.
726
835
 
727
836
 
728
837
  Parameters
729
838
  ----------
730
- seconds : int, default 0
731
- Number of seconds to wait prior to timing out.
732
- minutes : int, default 0
733
- Number of minutes to wait prior to timing out.
734
- hours : int, default 0
735
- Number of hours to wait prior to timing out.
839
+ vars : Dict[str, str], default {}
840
+ Dictionary of environment variables to set.
736
841
  """
737
842
  ...
738
843
 
739
844
  @typing.overload
740
- def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, aws_batch_tags: typing.Optional[typing.Dict[str, str]] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
845
+ def catch(*, var: str | None = None, print_exception: bool = True) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
741
846
  """
742
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
847
+ Specifies that the step will success under all circumstances.
848
+
849
+ The decorator will create an optional artifact, specified by `var`, which
850
+ contains the exception raised. You can use it to detect the presence
851
+ of errors, indicating that all happy-path artifacts produced by the step
852
+ are missing.
743
853
 
744
854
 
745
855
  Parameters
746
856
  ----------
747
- cpu : int, default 1
748
- Number of CPUs required for this step. If `@resources` is
749
- also present, the maximum value from all decorators is used.
750
- gpu : int, default 0
751
- Number of GPUs required for this step. If `@resources` is
752
- also present, the maximum value from all decorators is used.
753
- memory : int, default 4096
754
- Memory size (in MB) required for this step. If
755
- `@resources` is also present, the maximum value from all decorators is
756
- used.
757
- image : str, optional, default None
758
- Docker image to use when launching on AWS Batch. If not specified, and
759
- METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
760
- not, a default Docker image mapping to the current version of Python is used.
761
- queue : str, default METAFLOW_BATCH_JOB_QUEUE
762
- AWS Batch Job Queue to submit the job to.
763
- iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
764
- AWS IAM role that AWS Batch container uses to access AWS cloud resources.
765
- execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
766
- AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
767
- (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
768
- shared_memory : int, optional, default None
769
- The value for the size (in MiB) of the /dev/shm volume for this step.
770
- This parameter maps to the `--shm-size` option in Docker.
771
- max_swap : int, optional, default None
772
- The total amount of swap memory (in MiB) a container can use for this
773
- step. This parameter is translated to the `--memory-swap` option in
774
- Docker where the value is the sum of the container memory plus the
775
- `max_swap` value.
776
- swappiness : int, optional, default None
777
- This allows you to tune memory swappiness behavior for this step.
778
- A swappiness value of 0 causes swapping not to happen unless absolutely
779
- necessary. A swappiness value of 100 causes pages to be swapped very
780
- aggressively. Accepted values are whole numbers between 0 and 100.
781
- aws_batch_tags: Dict[str, str], optional, default None
782
- Sets arbitrary AWS tags on the AWS Batch compute environment.
783
- Set as string key-value pairs.
784
- use_tmpfs : bool, default False
785
- This enables an explicit tmpfs mount for this step. Note that tmpfs is
786
- not available on Fargate compute environments
787
- tmpfs_tempdir : bool, default True
788
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
789
- tmpfs_size : int, optional, default None
790
- The value for the size (in MiB) of the tmpfs mount for this step.
791
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
792
- memory allocated for this step.
793
- tmpfs_path : str, optional, default None
794
- Path to tmpfs mount for this step. Defaults to /metaflow_temp.
795
- inferentia : int, default 0
796
- Number of Inferentia chips required for this step.
797
- trainium : int, default None
798
- Alias for inferentia. Use only one of the two.
799
- efa : int, default 0
800
- Number of elastic fabric adapter network devices to attach to container
801
- ephemeral_storage : int, default None
802
- The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
803
- This is only relevant for Fargate compute environments
804
- log_driver: str, optional, default None
805
- The log driver to use for the Amazon ECS container.
806
- log_options: List[str], optional, default None
807
- List of strings containing options for the chosen log driver. The configurable values
808
- depend on the `log driver` chosen. Validation of these options is not supported yet.
809
- Example: [`awslogs-group:aws/batch/job`]
857
+ var : str, optional, default None
858
+ Name of the artifact in which to store the caught exception.
859
+ If not specified, the exception is not stored.
860
+ print_exception : bool, default True
861
+ Determines whether or not the exception is printed to
862
+ stdout when caught.
810
863
  """
811
864
  ...
812
865
 
813
866
  @typing.overload
814
- def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
867
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
815
868
  ...
816
869
 
817
870
  @typing.overload
818
- def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
871
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
819
872
  ...
820
873
 
821
- def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, aws_batch_tags: typing.Optional[typing.Dict[str, str]] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
874
+ def catch(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, var: str | None = None, print_exception: bool = True):
822
875
  """
823
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
876
+ Specifies that the step will success under all circumstances.
877
+
878
+ The decorator will create an optional artifact, specified by `var`, which
879
+ contains the exception raised. You can use it to detect the presence
880
+ of errors, indicating that all happy-path artifacts produced by the step
881
+ are missing.
824
882
 
825
883
 
826
884
  Parameters
827
885
  ----------
828
- cpu : int, default 1
829
- Number of CPUs required for this step. If `@resources` is
830
- also present, the maximum value from all decorators is used.
831
- gpu : int, default 0
832
- Number of GPUs required for this step. If `@resources` is
833
- also present, the maximum value from all decorators is used.
834
- memory : int, default 4096
835
- Memory size (in MB) required for this step. If
836
- `@resources` is also present, the maximum value from all decorators is
837
- used.
838
- image : str, optional, default None
839
- Docker image to use when launching on AWS Batch. If not specified, and
840
- METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
841
- not, a default Docker image mapping to the current version of Python is used.
842
- queue : str, default METAFLOW_BATCH_JOB_QUEUE
843
- AWS Batch Job Queue to submit the job to.
844
- iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
845
- AWS IAM role that AWS Batch container uses to access AWS cloud resources.
846
- execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
847
- AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
848
- (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
849
- shared_memory : int, optional, default None
850
- The value for the size (in MiB) of the /dev/shm volume for this step.
851
- This parameter maps to the `--shm-size` option in Docker.
852
- max_swap : int, optional, default None
853
- The total amount of swap memory (in MiB) a container can use for this
854
- step. This parameter is translated to the `--memory-swap` option in
855
- Docker where the value is the sum of the container memory plus the
856
- `max_swap` value.
857
- swappiness : int, optional, default None
858
- This allows you to tune memory swappiness behavior for this step.
859
- A swappiness value of 0 causes swapping not to happen unless absolutely
860
- necessary. A swappiness value of 100 causes pages to be swapped very
861
- aggressively. Accepted values are whole numbers between 0 and 100.
862
- aws_batch_tags: Dict[str, str], optional, default None
863
- Sets arbitrary AWS tags on the AWS Batch compute environment.
864
- Set as string key-value pairs.
865
- use_tmpfs : bool, default False
866
- This enables an explicit tmpfs mount for this step. Note that tmpfs is
867
- not available on Fargate compute environments
868
- tmpfs_tempdir : bool, default True
869
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
870
- tmpfs_size : int, optional, default None
871
- The value for the size (in MiB) of the tmpfs mount for this step.
872
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
873
- memory allocated for this step.
874
- tmpfs_path : str, optional, default None
875
- Path to tmpfs mount for this step. Defaults to /metaflow_temp.
876
- inferentia : int, default 0
877
- Number of Inferentia chips required for this step.
878
- trainium : int, default None
879
- Alias for inferentia. Use only one of the two.
880
- efa : int, default 0
881
- Number of elastic fabric adapter network devices to attach to container
882
- ephemeral_storage : int, default None
883
- The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
884
- This is only relevant for Fargate compute environments
885
- log_driver: str, optional, default None
886
- The log driver to use for the Amazon ECS container.
887
- log_options: List[str], optional, default None
888
- List of strings containing options for the chosen log driver. The configurable values
889
- depend on the `log driver` chosen. Validation of these options is not supported yet.
890
- Example: [`awslogs-group:aws/batch/job`]
886
+ var : str, optional, default None
887
+ Name of the artifact in which to store the caught exception.
888
+ If not specified, the exception is not stored.
889
+ print_exception : bool, default True
890
+ Determines whether or not the exception is printed to
891
+ stdout when caught.
891
892
  """
892
893
  ...
893
894
 
894
895
  @typing.overload
895
- def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
896
+ def trigger(*, event: str | typing.Dict[str, typing.Any] | None = None, events: typing.List[str | typing.Dict[str, typing.Any]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
896
897
  """
897
898
  Specifies the event(s) that this flow depends on.
898
899
 
@@ -940,7 +941,7 @@ def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = No
940
941
  def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
941
942
  ...
942
943
 
943
- def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
944
+ def trigger(f: typing.Type[~FlowSpecDerived] | None = None, *, event: str | typing.Dict[str, typing.Any] | None = None, events: typing.List[str | typing.Dict[str, typing.Any]] = [], options: typing.Dict[str, typing.Any] = {}):
944
945
  """
945
946
  Specifies the event(s) that this flow depends on.
946
947
 
@@ -963,29 +964,148 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
963
964
  {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
964
965
  ```
965
966
 
966
- 'parameters' can also be a list of strings and tuples like so:
967
- ```
968
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
969
- ```
970
- This is equivalent to:
971
- ```
972
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
973
- ```
967
+ 'parameters' can also be a list of strings and tuples like so:
968
+ ```
969
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
970
+ ```
971
+ This is equivalent to:
972
+ ```
973
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
974
+ ```
975
+
976
+
977
+ Parameters
978
+ ----------
979
+ event : Union[str, Dict[str, Any]], optional, default None
980
+ Event dependency for this flow.
981
+ events : List[Union[str, Dict[str, Any]]], default []
982
+ Events dependency for this flow.
983
+ options : Dict[str, Any], default {}
984
+ Backend-specific configuration for tuning eventing behavior.
985
+ """
986
+ ...
987
+
988
+ @typing.overload
989
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: str | None = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
990
+ """
991
+ Specifies the PyPI packages for all steps of the flow.
992
+
993
+ Use `@pypi_base` to set common packages required by all
994
+ steps and use `@pypi` to specify step-specific overrides.
995
+
996
+ Parameters
997
+ ----------
998
+ packages : Dict[str, str], default: {}
999
+ Packages to use for this flow. The key is the name of the package
1000
+ and the value is the version to use.
1001
+ python : str, optional, default: None
1002
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1003
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1004
+ """
1005
+ ...
1006
+
1007
+ @typing.overload
1008
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1009
+ ...
1010
+
1011
+ def pypi_base(f: typing.Type[~FlowSpecDerived] | None = None, *, packages: typing.Dict[str, str] = {}, python: str | None = None):
1012
+ """
1013
+ Specifies the PyPI packages for all steps of the flow.
1014
+
1015
+ Use `@pypi_base` to set common packages required by all
1016
+ steps and use `@pypi` to specify step-specific overrides.
1017
+
1018
+ Parameters
1019
+ ----------
1020
+ packages : Dict[str, str], default: {}
1021
+ Packages to use for this flow. The key is the name of the package
1022
+ and the value is the version to use.
1023
+ python : str, optional, default: None
1024
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1025
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1026
+ """
1027
+ ...
1028
+
1029
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1030
+ """
1031
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1032
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1033
+
1034
+
1035
+ Parameters
1036
+ ----------
1037
+ timeout : int
1038
+ Time, in seconds before the task times out and fails. (Default: 3600)
1039
+ poke_interval : int
1040
+ Time in seconds that the job should wait in between each try. (Default: 60)
1041
+ mode : str
1042
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1043
+ exponential_backoff : bool
1044
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1045
+ pool : str
1046
+ the slot pool this task should run in,
1047
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1048
+ soft_fail : bool
1049
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1050
+ name : str
1051
+ Name of the sensor on Airflow
1052
+ description : str
1053
+ Description of sensor in the Airflow UI
1054
+ external_dag_id : str
1055
+ The dag_id that contains the task you want to wait for.
1056
+ external_task_ids : List[str]
1057
+ The list of task_ids that you want to wait for.
1058
+ If None (default value) the sensor waits for the DAG. (Default: None)
1059
+ allowed_states : List[str]
1060
+ Iterable of allowed states, (Default: ['success'])
1061
+ failed_states : List[str]
1062
+ Iterable of failed or dis-allowed states. (Default: None)
1063
+ execution_delta : datetime.timedelta
1064
+ time difference with the previous execution to look at,
1065
+ the default is the same logical date as the current task or DAG. (Default: None)
1066
+ check_existence: bool
1067
+ Set to True to check if the external task exists or check if
1068
+ the DAG to wait for exists. (Default: True)
1069
+ """
1070
+ ...
1071
+
1072
+ def project(*, name: str, branch: str | None = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1073
+ """
1074
+ Specifies what flows belong to the same project.
1075
+
1076
+ A project-specific namespace is created for all flows that
1077
+ use the same `@project(name)`.
974
1078
 
975
1079
 
976
1080
  Parameters
977
1081
  ----------
978
- event : Union[str, Dict[str, Any]], optional, default None
979
- Event dependency for this flow.
980
- events : List[Union[str, Dict[str, Any]]], default []
981
- Events dependency for this flow.
982
- options : Dict[str, Any], default {}
983
- Backend-specific configuration for tuning eventing behavior.
1082
+ name : str
1083
+ Project name. Make sure that the name is unique amongst all
1084
+ projects that use the same production scheduler. The name may
1085
+ contain only lowercase alphanumeric characters and underscores.
1086
+
1087
+ branch : Optional[str], default None
1088
+ The branch to use. If not specified, the branch is set to
1089
+ `user.<username>` unless `production` is set to `True`. This can
1090
+ also be set on the command line using `--branch` as a top-level option.
1091
+ It is an error to specify `branch` in the decorator and on the command line.
1092
+
1093
+ production : bool, default False
1094
+ Whether or not the branch is the production branch. This can also be set on the
1095
+ command line using `--production` as a top-level option. It is an error to specify
1096
+ `production` in the decorator and on the command line.
1097
+ The project branch name will be:
1098
+ - if `branch` is specified:
1099
+ - if `production` is True: `prod.<branch>`
1100
+ - if `production` is False: `test.<branch>`
1101
+ - if `branch` is not specified:
1102
+ - if `production` is True: `prod`
1103
+ - if `production` is False: `user.<username>`
984
1104
  """
985
1105
  ...
986
1106
 
987
1107
  @typing.overload
988
- def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1108
+ def trigger_on_finish(*, flow: typing.Dict[str, str] | str | None = None, flows: typing.List[str | typing.Dict[str, str]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
989
1109
  """
990
1110
  Specifies the flow(s) that this flow depends on.
991
1111
 
@@ -1037,7 +1157,7 @@ def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] =
1037
1157
  def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1038
1158
  ...
1039
1159
 
1040
- def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1160
+ def trigger_on_finish(f: typing.Type[~FlowSpecDerived] | None = None, *, flow: typing.Dict[str, str] | str | None = None, flows: typing.List[str | typing.Dict[str, str]] = [], options: typing.Dict[str, typing.Any] = {}):
1041
1161
  """
1042
1162
  Specifies the flow(s) that this flow depends on.
1043
1163
 
@@ -1086,34 +1206,58 @@ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *
1086
1206
  ...
1087
1207
 
1088
1208
  @typing.overload
1089
- def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1209
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: str | None = None, timezone: str | None = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1090
1210
  """
1091
- Specifies the Conda environment for all steps of the flow.
1092
-
1093
- Use `@conda_base` to set common libraries required by all
1094
- steps and use `@conda` to specify step-specific additions.
1211
+ Specifies the times when the flow should be run when running on a
1212
+ production scheduler.
1095
1213
 
1096
1214
 
1097
1215
  Parameters
1098
1216
  ----------
1099
- packages : Dict[str, str], default {}
1100
- Packages to use for this flow. The key is the name of the package
1101
- and the value is the version to use.
1102
- libraries : Dict[str, str], default {}
1103
- Supported for backward compatibility. When used with packages, packages will take precedence.
1104
- python : str, optional, default None
1105
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1106
- that the version used will correspond to the version of the Python interpreter used to start the run.
1107
- disabled : bool, default False
1108
- If set to True, disables Conda.
1217
+ hourly : bool, default False
1218
+ Run the workflow hourly.
1219
+ daily : bool, default True
1220
+ Run the workflow daily.
1221
+ weekly : bool, default False
1222
+ Run the workflow weekly.
1223
+ cron : str, optional, default None
1224
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1225
+ specified by this expression.
1226
+ timezone : str, optional, default None
1227
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1228
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1109
1229
  """
1110
1230
  ...
1111
1231
 
1112
1232
  @typing.overload
1113
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1233
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1234
+ ...
1235
+
1236
+ def schedule(f: typing.Type[~FlowSpecDerived] | None = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: str | None = None, timezone: str | None = None):
1237
+ """
1238
+ Specifies the times when the flow should be run when running on a
1239
+ production scheduler.
1240
+
1241
+
1242
+ Parameters
1243
+ ----------
1244
+ hourly : bool, default False
1245
+ Run the workflow hourly.
1246
+ daily : bool, default True
1247
+ Run the workflow daily.
1248
+ weekly : bool, default False
1249
+ Run the workflow weekly.
1250
+ cron : str, optional, default None
1251
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1252
+ specified by this expression.
1253
+ timezone : str, optional, default None
1254
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1255
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1256
+ """
1114
1257
  ...
1115
1258
 
1116
- def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1259
+ @typing.overload
1260
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: str | None = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1117
1261
  """
1118
1262
  Specifies the Conda environment for all steps of the flow.
1119
1263
 
@@ -1136,91 +1280,34 @@ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packa
1136
1280
  """
1137
1281
  ...
1138
1282
 
1139
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1140
- """
1141
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1142
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1143
-
1144
-
1145
- Parameters
1146
- ----------
1147
- timeout : int
1148
- Time, in seconds before the task times out and fails. (Default: 3600)
1149
- poke_interval : int
1150
- Time in seconds that the job should wait in between each try. (Default: 60)
1151
- mode : str
1152
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1153
- exponential_backoff : bool
1154
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1155
- pool : str
1156
- the slot pool this task should run in,
1157
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1158
- soft_fail : bool
1159
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1160
- name : str
1161
- Name of the sensor on Airflow
1162
- description : str
1163
- Description of sensor in the Airflow UI
1164
- external_dag_id : str
1165
- The dag_id that contains the task you want to wait for.
1166
- external_task_ids : List[str]
1167
- The list of task_ids that you want to wait for.
1168
- If None (default value) the sensor waits for the DAG. (Default: None)
1169
- allowed_states : List[str]
1170
- Iterable of allowed states, (Default: ['success'])
1171
- failed_states : List[str]
1172
- Iterable of failed or dis-allowed states. (Default: None)
1173
- execution_delta : datetime.timedelta
1174
- time difference with the previous execution to look at,
1175
- the default is the same logical date as the current task or DAG. (Default: None)
1176
- check_existence: bool
1177
- Set to True to check if the external task exists or check if
1178
- the DAG to wait for exists. (Default: True)
1179
- """
1180
- ...
1181
-
1182
- @typing.overload
1183
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1184
- """
1185
- Specifies the PyPI packages for all steps of the flow.
1186
-
1187
- Use `@pypi_base` to set common packages required by all
1188
- steps and use `@pypi` to specify step-specific overrides.
1189
-
1190
- Parameters
1191
- ----------
1192
- packages : Dict[str, str], default: {}
1193
- Packages to use for this flow. The key is the name of the package
1194
- and the value is the version to use.
1195
- python : str, optional, default: None
1196
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1197
- that the version used will correspond to the version of the Python interpreter used to start the run.
1198
- """
1199
- ...
1200
-
1201
1283
  @typing.overload
1202
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1284
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1203
1285
  ...
1204
1286
 
1205
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1287
+ def conda_base(f: typing.Type[~FlowSpecDerived] | None = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: str | None = None, disabled: bool = False):
1206
1288
  """
1207
- Specifies the PyPI packages for all steps of the flow.
1289
+ Specifies the Conda environment for all steps of the flow.
1290
+
1291
+ Use `@conda_base` to set common libraries required by all
1292
+ steps and use `@conda` to specify step-specific additions.
1208
1293
 
1209
- Use `@pypi_base` to set common packages required by all
1210
- steps and use `@pypi` to specify step-specific overrides.
1211
1294
 
1212
1295
  Parameters
1213
1296
  ----------
1214
- packages : Dict[str, str], default: {}
1297
+ packages : Dict[str, str], default {}
1215
1298
  Packages to use for this flow. The key is the name of the package
1216
1299
  and the value is the version to use.
1217
- python : str, optional, default: None
1300
+ libraries : Dict[str, str], default {}
1301
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1302
+ python : str, optional, default None
1218
1303
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1219
1304
  that the version used will correspond to the version of the Python interpreter used to start the run.
1305
+ disabled : bool, default False
1306
+ If set to True, disables Conda.
1220
1307
  """
1221
1308
  ...
1222
1309
 
1223
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1310
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: str | typing.List[str], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1224
1311
  """
1225
1312
  The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1226
1313
  before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
@@ -1263,89 +1350,3 @@ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, expone
1263
1350
  """
1264
1351
  ...
1265
1352
 
1266
- @typing.overload
1267
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1268
- """
1269
- Specifies the times when the flow should be run when running on a
1270
- production scheduler.
1271
-
1272
-
1273
- Parameters
1274
- ----------
1275
- hourly : bool, default False
1276
- Run the workflow hourly.
1277
- daily : bool, default True
1278
- Run the workflow daily.
1279
- weekly : bool, default False
1280
- Run the workflow weekly.
1281
- cron : str, optional, default None
1282
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1283
- specified by this expression.
1284
- timezone : str, optional, default None
1285
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1286
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1287
- """
1288
- ...
1289
-
1290
- @typing.overload
1291
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1292
- ...
1293
-
1294
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1295
- """
1296
- Specifies the times when the flow should be run when running on a
1297
- production scheduler.
1298
-
1299
-
1300
- Parameters
1301
- ----------
1302
- hourly : bool, default False
1303
- Run the workflow hourly.
1304
- daily : bool, default True
1305
- Run the workflow daily.
1306
- weekly : bool, default False
1307
- Run the workflow weekly.
1308
- cron : str, optional, default None
1309
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1310
- specified by this expression.
1311
- timezone : str, optional, default None
1312
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1313
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1314
- """
1315
- ...
1316
-
1317
- def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1318
- """
1319
- Specifies what flows belong to the same project.
1320
-
1321
- A project-specific namespace is created for all flows that
1322
- use the same `@project(name)`.
1323
-
1324
-
1325
- Parameters
1326
- ----------
1327
- name : str
1328
- Project name. Make sure that the name is unique amongst all
1329
- projects that use the same production scheduler. The name may
1330
- contain only lowercase alphanumeric characters and underscores.
1331
-
1332
- branch : Optional[str], default None
1333
- The branch to use. If not specified, the branch is set to
1334
- `user.<username>` unless `production` is set to `True`. This can
1335
- also be set on the command line using `--branch` as a top-level option.
1336
- It is an error to specify `branch` in the decorator and on the command line.
1337
-
1338
- production : bool, default False
1339
- Whether or not the branch is the production branch. This can also be set on the
1340
- command line using `--production` as a top-level option. It is an error to specify
1341
- `production` in the decorator and on the command line.
1342
- The project branch name will be:
1343
- - if `branch` is specified:
1344
- - if `production` is True: `prod.<branch>`
1345
- - if `production` is False: `test.<branch>`
1346
- - if `branch` is not specified:
1347
- - if `production` is True: `prod`
1348
- - if `production` is False: `user.<username>`
1349
- """
1350
- ...
1351
-