ob-metaflow-stubs 6.0.3.180rc4__py2.py3-none-any.whl → 6.0.3.180rc5__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (220) hide show
  1. metaflow-stubs/__init__.pyi +887 -887
  2. metaflow-stubs/cards.pyi +1 -1
  3. metaflow-stubs/cli.pyi +1 -1
  4. metaflow-stubs/cli_components/__init__.pyi +1 -1
  5. metaflow-stubs/cli_components/utils.pyi +1 -1
  6. metaflow-stubs/client/__init__.pyi +1 -1
  7. metaflow-stubs/client/core.pyi +5 -5
  8. metaflow-stubs/client/filecache.pyi +2 -2
  9. metaflow-stubs/events.pyi +2 -2
  10. metaflow-stubs/exception.pyi +1 -1
  11. metaflow-stubs/flowspec.pyi +4 -4
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +3 -3
  14. metaflow-stubs/info_file.pyi +1 -1
  15. metaflow-stubs/metadata_provider/__init__.pyi +1 -1
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +1 -1
  17. metaflow-stubs/metadata_provider/metadata.pyi +2 -2
  18. metaflow-stubs/metadata_provider/util.pyi +1 -1
  19. metaflow-stubs/metaflow_config.pyi +1 -1
  20. metaflow-stubs/metaflow_current.pyi +65 -65
  21. metaflow-stubs/metaflow_git.pyi +1 -1
  22. metaflow-stubs/mf_extensions/__init__.pyi +1 -1
  23. metaflow-stubs/mf_extensions/obcheckpoint/__init__.pyi +1 -1
  24. metaflow-stubs/mf_extensions/obcheckpoint/plugins/__init__.pyi +1 -1
  25. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/__init__.pyi +1 -1
  26. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/__init__.pyi +1 -1
  27. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/async_cards.pyi +2 -2
  28. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/deco_injection_mixin.pyi +1 -1
  29. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/extra_components.pyi +2 -2
  30. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/__init__.pyi +1 -1
  31. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/__init__.pyi +1 -1
  32. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/checkpoint_lister.pyi +4 -4
  33. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/lineage_card.pyi +1 -1
  34. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/checkpoint_storage.pyi +4 -4
  35. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/constructors.pyi +1 -1
  36. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/core.pyi +3 -3
  37. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/decorator.pyi +4 -4
  38. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/exceptions.pyi +1 -1
  39. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/final_api.pyi +1 -1
  40. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/lineage.pyi +1 -1
  41. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/__init__.pyi +1 -1
  42. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/context.pyi +2 -2
  43. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/core.pyi +2 -2
  44. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/decorator.pyi +1 -1
  45. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/exceptions.pyi +1 -1
  46. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/task_utils.pyi +2 -2
  47. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/utils.pyi +1 -1
  48. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastructures.pyi +2 -2
  49. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/exceptions.pyi +1 -1
  50. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/__init__.pyi +1 -1
  51. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/decorator.pyi +2 -2
  52. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/__init__.pyi +1 -1
  53. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/core.pyi +1 -1
  54. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/exceptions.pyi +1 -1
  55. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/model_storage.pyi +4 -4
  56. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/__init__.pyi +1 -1
  57. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/flowspec_utils.pyi +1 -1
  58. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/general.pyi +1 -1
  59. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/identity_utils.pyi +1 -1
  60. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/__init__.pyi +1 -1
  61. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/base.pyi +1 -1
  62. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/tar.pyi +2 -2
  63. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/tar_utils.pyi +2 -2
  64. metaflow-stubs/mf_extensions/outerbounds/__init__.pyi +1 -1
  65. metaflow-stubs/mf_extensions/outerbounds/plugins/__init__.pyi +1 -1
  66. metaflow-stubs/mf_extensions/outerbounds/plugins/aws/__init__.pyi +1 -1
  67. metaflow-stubs/mf_extensions/outerbounds/plugins/aws/assume_role_decorator.pyi +1 -1
  68. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/__init__.pyi +1 -1
  69. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/async_cards.pyi +1 -1
  70. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/injector.pyi +1 -1
  71. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/__init__.pyi +1 -1
  72. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/coreweave.pyi +1 -1
  73. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/nebius.pyi +1 -1
  74. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/__init__.pyi +1 -1
  75. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/baker.pyi +3 -3
  76. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/docker_environment.pyi +2 -2
  77. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/fast_bakery.pyi +1 -1
  78. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/__init__.pyi +1 -1
  79. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/pod_killer.pyi +1 -1
  80. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/__init__.pyi +1 -1
  81. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/constants.pyi +1 -1
  82. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/exceptions.pyi +1 -1
  83. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/ollama.pyi +1 -1
  84. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/status_card.pyi +1 -1
  85. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/__init__.pyi +1 -1
  86. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/snowflake.pyi +1 -1
  87. metaflow-stubs/mf_extensions/outerbounds/profilers/__init__.pyi +1 -1
  88. metaflow-stubs/mf_extensions/outerbounds/profilers/gpu.pyi +1 -1
  89. metaflow-stubs/mf_extensions/outerbounds/remote_config.pyi +1 -1
  90. metaflow-stubs/mf_extensions/outerbounds/toplevel/__init__.pyi +1 -1
  91. metaflow-stubs/mf_extensions/outerbounds/toplevel/global_aliases_for_metaflow_package.pyi +1 -1
  92. metaflow-stubs/multicore_utils.pyi +1 -1
  93. metaflow-stubs/ob_internal.pyi +1 -1
  94. metaflow-stubs/parameters.pyi +3 -3
  95. metaflow-stubs/plugins/__init__.pyi +8 -8
  96. metaflow-stubs/plugins/airflow/__init__.pyi +1 -1
  97. metaflow-stubs/plugins/airflow/airflow_utils.pyi +1 -1
  98. metaflow-stubs/plugins/airflow/exception.pyi +1 -1
  99. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +1 -1
  100. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +1 -1
  101. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +1 -1
  102. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +1 -1
  103. metaflow-stubs/plugins/argo/__init__.pyi +1 -1
  104. metaflow-stubs/plugins/argo/argo_client.pyi +1 -1
  105. metaflow-stubs/plugins/argo/argo_events.pyi +1 -1
  106. metaflow-stubs/plugins/argo/argo_workflows.pyi +2 -2
  107. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +3 -3
  108. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +3 -3
  109. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +3 -3
  110. metaflow-stubs/plugins/aws/__init__.pyi +1 -1
  111. metaflow-stubs/plugins/aws/aws_client.pyi +1 -1
  112. metaflow-stubs/plugins/aws/aws_utils.pyi +1 -1
  113. metaflow-stubs/plugins/aws/batch/__init__.pyi +1 -1
  114. metaflow-stubs/plugins/aws/batch/batch.pyi +1 -1
  115. metaflow-stubs/plugins/aws/batch/batch_client.pyi +1 -1
  116. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +1 -1
  117. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +1 -1
  118. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +2 -2
  119. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +1 -1
  120. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +1 -1
  121. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +1 -1
  122. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +1 -1
  123. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +1 -1
  124. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +3 -3
  125. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +3 -3
  126. metaflow-stubs/plugins/azure/__init__.pyi +1 -1
  127. metaflow-stubs/plugins/azure/azure_credential.pyi +1 -1
  128. metaflow-stubs/plugins/azure/azure_exceptions.pyi +1 -1
  129. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +2 -2
  130. metaflow-stubs/plugins/azure/azure_utils.pyi +1 -1
  131. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +1 -1
  132. metaflow-stubs/plugins/azure/includefile_support.pyi +1 -1
  133. metaflow-stubs/plugins/cards/__init__.pyi +1 -1
  134. metaflow-stubs/plugins/cards/card_client.pyi +2 -2
  135. metaflow-stubs/plugins/cards/card_creator.pyi +1 -1
  136. metaflow-stubs/plugins/cards/card_datastore.pyi +1 -1
  137. metaflow-stubs/plugins/cards/card_decorator.pyi +1 -1
  138. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +1 -1
  139. metaflow-stubs/plugins/cards/card_modules/basic.pyi +2 -2
  140. metaflow-stubs/plugins/cards/card_modules/card.pyi +1 -1
  141. metaflow-stubs/plugins/cards/card_modules/components.pyi +3 -3
  142. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +1 -1
  143. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +1 -1
  144. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +1 -1
  145. metaflow-stubs/plugins/cards/card_resolver.pyi +1 -1
  146. metaflow-stubs/plugins/cards/component_serializer.pyi +1 -1
  147. metaflow-stubs/plugins/cards/exception.pyi +1 -1
  148. metaflow-stubs/plugins/catch_decorator.pyi +1 -1
  149. metaflow-stubs/plugins/datatools/__init__.pyi +1 -1
  150. metaflow-stubs/plugins/datatools/local.pyi +1 -1
  151. metaflow-stubs/plugins/datatools/s3/__init__.pyi +1 -1
  152. metaflow-stubs/plugins/datatools/s3/s3.pyi +2 -2
  153. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +1 -1
  154. metaflow-stubs/plugins/datatools/s3/s3util.pyi +1 -1
  155. metaflow-stubs/plugins/debug_logger.pyi +1 -1
  156. metaflow-stubs/plugins/debug_monitor.pyi +1 -1
  157. metaflow-stubs/plugins/environment_decorator.pyi +1 -1
  158. metaflow-stubs/plugins/events_decorator.pyi +1 -1
  159. metaflow-stubs/plugins/frameworks/__init__.pyi +1 -1
  160. metaflow-stubs/plugins/frameworks/pytorch.pyi +1 -1
  161. metaflow-stubs/plugins/gcp/__init__.pyi +1 -1
  162. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +2 -2
  163. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +1 -1
  164. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +1 -1
  165. metaflow-stubs/plugins/gcp/gs_utils.pyi +1 -1
  166. metaflow-stubs/plugins/gcp/includefile_support.pyi +1 -1
  167. metaflow-stubs/plugins/kubernetes/__init__.pyi +1 -1
  168. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +2 -2
  169. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +1 -1
  170. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +1 -1
  171. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +1 -1
  172. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +1 -1
  173. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +1 -1
  174. metaflow-stubs/plugins/ollama/__init__.pyi +2 -2
  175. metaflow-stubs/plugins/parallel_decorator.pyi +1 -1
  176. metaflow-stubs/plugins/perimeters.pyi +1 -1
  177. metaflow-stubs/plugins/project_decorator.pyi +1 -1
  178. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  179. metaflow-stubs/plugins/pypi/conda_decorator.pyi +1 -1
  180. metaflow-stubs/plugins/pypi/conda_environment.pyi +2 -2
  181. metaflow-stubs/plugins/pypi/parsers.pyi +1 -1
  182. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +1 -1
  183. metaflow-stubs/plugins/pypi/pypi_environment.pyi +1 -1
  184. metaflow-stubs/plugins/pypi/utils.pyi +1 -1
  185. metaflow-stubs/plugins/resources_decorator.pyi +1 -1
  186. metaflow-stubs/plugins/retry_decorator.pyi +1 -1
  187. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  188. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +2 -2
  189. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +1 -1
  190. metaflow-stubs/plugins/snowflake/__init__.pyi +1 -1
  191. metaflow-stubs/plugins/storage_executor.pyi +1 -1
  192. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +2 -2
  193. metaflow-stubs/plugins/timeout_decorator.pyi +1 -1
  194. metaflow-stubs/plugins/torchtune/__init__.pyi +1 -1
  195. metaflow-stubs/plugins/uv/__init__.pyi +1 -1
  196. metaflow-stubs/plugins/uv/uv_environment.pyi +2 -2
  197. metaflow-stubs/profilers/__init__.pyi +1 -1
  198. metaflow-stubs/pylint_wrapper.pyi +1 -1
  199. metaflow-stubs/runner/__init__.pyi +1 -1
  200. metaflow-stubs/runner/deployer.pyi +5 -5
  201. metaflow-stubs/runner/deployer_impl.pyi +2 -2
  202. metaflow-stubs/runner/metaflow_runner.pyi +3 -3
  203. metaflow-stubs/runner/nbdeploy.pyi +1 -1
  204. metaflow-stubs/runner/nbrun.pyi +1 -1
  205. metaflow-stubs/runner/subprocess_manager.pyi +1 -1
  206. metaflow-stubs/runner/utils.pyi +2 -2
  207. metaflow-stubs/system/__init__.pyi +1 -1
  208. metaflow-stubs/system/system_logger.pyi +2 -2
  209. metaflow-stubs/system/system_monitor.pyi +1 -1
  210. metaflow-stubs/tagging_util.pyi +1 -1
  211. metaflow-stubs/tuple_util.pyi +1 -1
  212. metaflow-stubs/user_configs/__init__.pyi +1 -1
  213. metaflow-stubs/user_configs/config_decorators.pyi +5 -5
  214. metaflow-stubs/user_configs/config_options.pyi +2 -2
  215. metaflow-stubs/user_configs/config_parameters.pyi +6 -6
  216. {ob_metaflow_stubs-6.0.3.180rc4.dist-info → ob_metaflow_stubs-6.0.3.180rc5.dist-info}/METADATA +1 -1
  217. ob_metaflow_stubs-6.0.3.180rc5.dist-info/RECORD +220 -0
  218. ob_metaflow_stubs-6.0.3.180rc4.dist-info/RECORD +0 -220
  219. {ob_metaflow_stubs-6.0.3.180rc4.dist-info → ob_metaflow_stubs-6.0.3.180rc5.dist-info}/WHEEL +0 -0
  220. {ob_metaflow_stubs-6.0.3.180rc4.dist-info → ob_metaflow_stubs-6.0.3.180rc5.dist-info}/top_level.txt +0 -0
@@ -1,15 +1,15 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
3
  # MF version: 2.15.17.1+obcheckpoint(0.2.1);ob(v1) #
4
- # Generated on 2025-06-17T18:51:01.431227 #
4
+ # Generated on 2025-06-17T20:32:02.265213 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import typing
12
11
  import datetime
12
+ import typing
13
13
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
14
14
  StepFlag = typing.NewType("StepFlag", bool)
15
15
 
@@ -35,10 +35,10 @@ from .user_configs.config_parameters import ConfigValue as ConfigValue
35
35
  from .user_configs.config_parameters import config_expr as config_expr
36
36
  from .user_configs.config_decorators import CustomFlowDecorator as CustomFlowDecorator
37
37
  from .user_configs.config_decorators import CustomStepDecorator as CustomStepDecorator
38
- from . import cards as cards
39
- from . import metaflow_git as metaflow_git
40
38
  from . import tuple_util as tuple_util
39
+ from . import cards as cards
41
40
  from . import events as events
41
+ from . import metaflow_git as metaflow_git
42
42
  from . import runner as runner
43
43
  from . import plugins as plugins
44
44
  from .mf_extensions.outerbounds.toplevel.global_aliases_for_metaflow_package import S3 as S3
@@ -155,97 +155,38 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
155
155
  """
156
156
  ...
157
157
 
158
- def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy: str, force_cache_update: bool, debug: bool, circuit_breaker_config: dict, timeout_config: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
159
- """
160
- This decorator is used to run Ollama APIs as Metaflow task sidecars.
161
-
162
- User code call
163
- --------------
164
- @ollama(
165
- models=[...],
166
- ...
167
- )
168
-
169
- Valid backend options
170
- ---------------------
171
- - 'local': Run as a separate process on the local task machine.
172
- - (TODO) 'managed': Outerbounds hosts and selects compute provider.
173
- - (TODO) 'remote': Spin up separate instance to serve Ollama models.
174
-
175
- Valid model options
176
- -------------------
177
- Any model here https://ollama.com/search, e.g. 'llama3.2', 'llama3.3'
178
-
179
-
180
- Parameters
181
- ----------
182
- models: list[str]
183
- List of Ollama containers running models in sidecars.
184
- backend: str
185
- Determines where and how to run the Ollama process.
186
- force_pull: bool
187
- Whether to run `ollama pull` no matter what, or first check the remote cache in Metaflow datastore for this model key.
188
- cache_update_policy: str
189
- Cache update policy: "auto", "force", or "never".
190
- force_cache_update: bool
191
- Simple override for "force" cache update policy.
192
- debug: bool
193
- Whether to turn on verbose debugging logs.
194
- circuit_breaker_config: dict
195
- Configuration for circuit breaker protection. Keys: failure_threshold, recovery_timeout, reset_timeout.
196
- timeout_config: dict
197
- Configuration for various operation timeouts. Keys: pull, stop, health_check, install, server_startup.
198
- """
199
- ...
200
-
201
158
  @typing.overload
202
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
159
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
203
160
  """
204
- Specifies the PyPI packages for the step.
205
-
206
- Information in this decorator will augment any
207
- attributes set in the `@pyi_base` flow-level decorator. Hence,
208
- you can use `@pypi_base` to set packages required by all
209
- steps and use `@pypi` to specify step-specific overrides.
161
+ Specifies secrets to be retrieved and injected as environment variables prior to
162
+ the execution of a step.
210
163
 
211
164
 
212
165
  Parameters
213
166
  ----------
214
- packages : Dict[str, str], default: {}
215
- Packages to use for this step. The key is the name of the package
216
- and the value is the version to use.
217
- python : str, optional, default: None
218
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
219
- that the version used will correspond to the version of the Python interpreter used to start the run.
167
+ sources : List[Union[str, Dict[str, Any]]], default: []
168
+ List of secret specs, defining how the secrets are to be retrieved
220
169
  """
221
170
  ...
222
171
 
223
172
  @typing.overload
224
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
173
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
225
174
  ...
226
175
 
227
176
  @typing.overload
228
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
177
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
229
178
  ...
230
179
 
231
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
180
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
232
181
  """
233
- Specifies the PyPI packages for the step.
234
-
235
- Information in this decorator will augment any
236
- attributes set in the `@pyi_base` flow-level decorator. Hence,
237
- you can use `@pypi_base` to set packages required by all
238
- steps and use `@pypi` to specify step-specific overrides.
182
+ Specifies secrets to be retrieved and injected as environment variables prior to
183
+ the execution of a step.
239
184
 
240
185
 
241
186
  Parameters
242
187
  ----------
243
- packages : Dict[str, str], default: {}
244
- Packages to use for this step. The key is the name of the package
245
- and the value is the version to use.
246
- python : str, optional, default: None
247
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
248
- that the version used will correspond to the version of the Python interpreter used to start the run.
188
+ sources : List[Union[str, Dict[str, Any]]], default: []
189
+ List of secret specs, defining how the secrets are to be retrieved
249
190
  """
250
191
  ...
251
192
 
@@ -265,75 +206,6 @@ def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[t
265
206
  """
266
207
  ...
267
208
 
268
- def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
269
- """
270
- Specifies that this step should execute on DGX cloud.
271
-
272
-
273
- Parameters
274
- ----------
275
- gpu : int
276
- Number of GPUs to use.
277
- gpu_type : str
278
- Type of Nvidia GPU to use.
279
- """
280
- ...
281
-
282
- @typing.overload
283
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
284
- """
285
- Specifies the number of times the task corresponding
286
- to a step needs to be retried.
287
-
288
- This decorator is useful for handling transient errors, such as networking issues.
289
- If your task contains operations that can't be retried safely, e.g. database updates,
290
- it is advisable to annotate it with `@retry(times=0)`.
291
-
292
- This can be used in conjunction with the `@catch` decorator. The `@catch`
293
- decorator will execute a no-op task after all retries have been exhausted,
294
- ensuring that the flow execution can continue.
295
-
296
-
297
- Parameters
298
- ----------
299
- times : int, default 3
300
- Number of times to retry this task.
301
- minutes_between_retries : int, default 2
302
- Number of minutes between retries.
303
- """
304
- ...
305
-
306
- @typing.overload
307
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
308
- ...
309
-
310
- @typing.overload
311
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
312
- ...
313
-
314
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
315
- """
316
- Specifies the number of times the task corresponding
317
- to a step needs to be retried.
318
-
319
- This decorator is useful for handling transient errors, such as networking issues.
320
- If your task contains operations that can't be retried safely, e.g. database updates,
321
- it is advisable to annotate it with `@retry(times=0)`.
322
-
323
- This can be used in conjunction with the `@catch` decorator. The `@catch`
324
- decorator will execute a no-op task after all retries have been exhausted,
325
- ensuring that the flow execution can continue.
326
-
327
-
328
- Parameters
329
- ----------
330
- times : int, default 3
331
- Number of times to retry this task.
332
- minutes_between_retries : int, default 2
333
- Number of minutes between retries.
334
- """
335
- ...
336
-
337
209
  @typing.overload
338
210
  def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
339
211
  """
@@ -385,41 +257,6 @@ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
385
257
  """
386
258
  ...
387
259
 
388
- @typing.overload
389
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
390
- """
391
- Specifies secrets to be retrieved and injected as environment variables prior to
392
- the execution of a step.
393
-
394
-
395
- Parameters
396
- ----------
397
- sources : List[Union[str, Dict[str, Any]]], default: []
398
- List of secret specs, defining how the secrets are to be retrieved
399
- """
400
- ...
401
-
402
- @typing.overload
403
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
404
- ...
405
-
406
- @typing.overload
407
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
408
- ...
409
-
410
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
411
- """
412
- Specifies secrets to be retrieved and injected as environment variables prior to
413
- the execution of a step.
414
-
415
-
416
- Parameters
417
- ----------
418
- sources : List[Union[str, Dict[str, Any]]], default: []
419
- List of secret specs, defining how the secrets are to be retrieved
420
- """
421
- ...
422
-
423
260
  @typing.overload
424
261
  def model(*, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
425
262
  """
@@ -477,107 +314,206 @@ def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
477
314
  """
478
315
  ...
479
316
 
480
- def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
481
- """
482
- Decorator that helps cache, version and store models/datasets from huggingface hub.
483
-
484
-
485
- Parameters
486
- ----------
487
- temp_dir_root : str, optional
488
- The root directory that will hold the temporary directory where objects will be downloaded.
489
-
490
- load: Union[List[str], List[Tuple[Dict, str]], List[Tuple[str, str]], List[Dict], None]
491
- The list of repos (models/datasets) to load.
492
-
493
- Loaded repos can be accessed via `current.huggingface_hub.loaded`. If load is set, then the following happens:
494
-
495
- - If repo (model/dataset) is not found in the datastore:
496
- - Downloads the repo from Hugging Face Hub to a temporary directory (or uses specified path) for local access
497
- - Stores it in Metaflow's datastore (s3/gcs/azure etc.) with a unique name based on repo_type/repo_id
498
- - All HF models loaded for a `@step` will be cached separately under flow/step/namespace.
499
-
500
- - If repo is found in the datastore:
501
- - Loads it directly from datastore to local path (can be temporary directory or specified path)
502
- """
503
- ...
504
-
505
- @typing.overload
506
- def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
317
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
507
318
  """
508
- Specifies the resources needed when executing this step.
509
-
510
- Use `@resources` to specify the resource requirements
511
- independently of the specific compute layer (`@batch`, `@kubernetes`).
512
-
513
- You can choose the compute layer on the command line by executing e.g.
514
- ```
515
- python myflow.py run --with batch
516
- ```
517
- or
518
- ```
519
- python myflow.py run --with kubernetes
520
- ```
521
- which executes the flow on the desired system using the
522
- requirements specified in `@resources`.
319
+ Specifies that this step should execute on Kubernetes.
523
320
 
524
321
 
525
322
  Parameters
526
323
  ----------
527
324
  cpu : int, default 1
528
- Number of CPUs required for this step.
529
- gpu : int, optional, default None
530
- Number of GPUs required for this step.
531
- disk : int, optional, default None
532
- Disk size (in MB) required for this step. Only applies on Kubernetes.
325
+ Number of CPUs required for this step. If `@resources` is
326
+ also present, the maximum value from all decorators is used.
533
327
  memory : int, default 4096
534
- Memory size (in MB) required for this step.
535
- shared_memory : int, optional, default None
536
- The value for the size (in MiB) of the /dev/shm volume for this step.
537
- This parameter maps to the `--shm-size` option in Docker.
538
- """
539
- ...
540
-
541
- @typing.overload
542
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
543
- ...
544
-
545
- @typing.overload
546
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
328
+ Memory size (in MB) required for this step. If
329
+ `@resources` is also present, the maximum value from all decorators is
330
+ used.
331
+ disk : int, default 10240
332
+ Disk size (in MB) required for this step. If
333
+ `@resources` is also present, the maximum value from all decorators is
334
+ used.
335
+ image : str, optional, default None
336
+ Docker image to use when launching on Kubernetes. If not specified, and
337
+ METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
338
+ not, a default Docker image mapping to the current version of Python is used.
339
+ image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
340
+ If given, the imagePullPolicy to be applied to the Docker image of the step.
341
+ image_pull_secrets: List[str], default []
342
+ The default is extracted from METAFLOW_KUBERNETES_IMAGE_PULL_SECRETS.
343
+ Kubernetes image pull secrets to use when pulling container images
344
+ in Kubernetes.
345
+ service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
346
+ Kubernetes service account to use when launching pod in Kubernetes.
347
+ secrets : List[str], optional, default None
348
+ Kubernetes secrets to use when launching pod in Kubernetes. These
349
+ secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
350
+ in Metaflow configuration.
351
+ node_selector: Union[Dict[str,str], str], optional, default None
352
+ Kubernetes node selector(s) to apply to the pod running the task.
353
+ Can be passed in as a comma separated string of values e.g.
354
+ 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
355
+ {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
356
+ namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
357
+ Kubernetes namespace to use when launching pod in Kubernetes.
358
+ gpu : int, optional, default None
359
+ Number of GPUs required for this step. A value of zero implies that
360
+ the scheduled node should not have GPUs.
361
+ gpu_vendor : str, default KUBERNETES_GPU_VENDOR
362
+ The vendor of the GPUs to be used for this step.
363
+ tolerations : List[str], default []
364
+ The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
365
+ Kubernetes tolerations to use when launching pod in Kubernetes.
366
+ labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
367
+ Kubernetes labels to use when launching pod in Kubernetes.
368
+ annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
369
+ Kubernetes annotations to use when launching pod in Kubernetes.
370
+ use_tmpfs : bool, default False
371
+ This enables an explicit tmpfs mount for this step.
372
+ tmpfs_tempdir : bool, default True
373
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
374
+ tmpfs_size : int, optional, default: None
375
+ The value for the size (in MiB) of the tmpfs mount for this step.
376
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
377
+ memory allocated for this step.
378
+ tmpfs_path : str, optional, default /metaflow_temp
379
+ Path to tmpfs mount for this step.
380
+ persistent_volume_claims : Dict[str, str], optional, default None
381
+ A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
382
+ volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
383
+ shared_memory: int, optional
384
+ Shared memory size (in MiB) required for this step
385
+ port: int, optional
386
+ Port number to specify in the Kubernetes job object
387
+ compute_pool : str, optional, default None
388
+ Compute pool to be used for for this step.
389
+ If not specified, any accessible compute pool within the perimeter is used.
390
+ hostname_resolution_timeout: int, default 10 * 60
391
+ Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
392
+ Only applicable when @parallel is used.
393
+ qos: str, default: Burstable
394
+ Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
395
+
396
+ security_context: Dict[str, Any], optional, default None
397
+ Container security context. Applies to the task container. Allows the following keys:
398
+ - privileged: bool, optional, default None
399
+ - allow_privilege_escalation: bool, optional, default None
400
+ - run_as_user: int, optional, default None
401
+ - run_as_group: int, optional, default None
402
+ - run_as_non_root: bool, optional, default None
403
+ """
547
404
  ...
548
405
 
549
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
406
+ @typing.overload
407
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
550
408
  """
551
- Specifies the resources needed when executing this step.
409
+ Specifies the number of times the task corresponding
410
+ to a step needs to be retried.
552
411
 
553
- Use `@resources` to specify the resource requirements
554
- independently of the specific compute layer (`@batch`, `@kubernetes`).
412
+ This decorator is useful for handling transient errors, such as networking issues.
413
+ If your task contains operations that can't be retried safely, e.g. database updates,
414
+ it is advisable to annotate it with `@retry(times=0)`.
555
415
 
556
- You can choose the compute layer on the command line by executing e.g.
557
- ```
558
- python myflow.py run --with batch
559
- ```
560
- or
561
- ```
562
- python myflow.py run --with kubernetes
563
- ```
564
- which executes the flow on the desired system using the
565
- requirements specified in `@resources`.
416
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
417
+ decorator will execute a no-op task after all retries have been exhausted,
418
+ ensuring that the flow execution can continue.
566
419
 
567
420
 
568
421
  Parameters
569
422
  ----------
570
- cpu : int, default 1
571
- Number of CPUs required for this step.
572
- gpu : int, optional, default None
573
- Number of GPUs required for this step.
574
- disk : int, optional, default None
575
- Disk size (in MB) required for this step. Only applies on Kubernetes.
576
- memory : int, default 4096
577
- Memory size (in MB) required for this step.
578
- shared_memory : int, optional, default None
579
- The value for the size (in MiB) of the /dev/shm volume for this step.
580
- This parameter maps to the `--shm-size` option in Docker.
423
+ times : int, default 3
424
+ Number of times to retry this task.
425
+ minutes_between_retries : int, default 2
426
+ Number of minutes between retries.
427
+ """
428
+ ...
429
+
430
+ @typing.overload
431
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
432
+ ...
433
+
434
+ @typing.overload
435
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
436
+ ...
437
+
438
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
439
+ """
440
+ Specifies the number of times the task corresponding
441
+ to a step needs to be retried.
442
+
443
+ This decorator is useful for handling transient errors, such as networking issues.
444
+ If your task contains operations that can't be retried safely, e.g. database updates,
445
+ it is advisable to annotate it with `@retry(times=0)`.
446
+
447
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
448
+ decorator will execute a no-op task after all retries have been exhausted,
449
+ ensuring that the flow execution can continue.
450
+
451
+
452
+ Parameters
453
+ ----------
454
+ times : int, default 3
455
+ Number of times to retry this task.
456
+ minutes_between_retries : int, default 2
457
+ Number of minutes between retries.
458
+ """
459
+ ...
460
+
461
+ @typing.overload
462
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
463
+ """
464
+ Specifies a timeout for your step.
465
+
466
+ This decorator is useful if this step may hang indefinitely.
467
+
468
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
469
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
470
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
471
+
472
+ Note that all the values specified in parameters are added together so if you specify
473
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
474
+
475
+
476
+ Parameters
477
+ ----------
478
+ seconds : int, default 0
479
+ Number of seconds to wait prior to timing out.
480
+ minutes : int, default 0
481
+ Number of minutes to wait prior to timing out.
482
+ hours : int, default 0
483
+ Number of hours to wait prior to timing out.
484
+ """
485
+ ...
486
+
487
+ @typing.overload
488
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
489
+ ...
490
+
491
+ @typing.overload
492
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
493
+ ...
494
+
495
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
496
+ """
497
+ Specifies a timeout for your step.
498
+
499
+ This decorator is useful if this step may hang indefinitely.
500
+
501
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
502
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
503
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
504
+
505
+ Note that all the values specified in parameters are added together so if you specify
506
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
507
+
508
+
509
+ Parameters
510
+ ----------
511
+ seconds : int, default 0
512
+ Number of seconds to wait prior to timing out.
513
+ minutes : int, default 0
514
+ Number of minutes to wait prior to timing out.
515
+ hours : int, default 0
516
+ Number of hours to wait prior to timing out.
581
517
  """
582
518
  ...
583
519
 
@@ -641,170 +577,39 @@ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
641
577
  ...
642
578
 
643
579
  @typing.overload
644
- def checkpoint(*, load_policy: str = 'fresh', temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
580
+ def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
645
581
  """
646
- Enables checkpointing for a step.
582
+ Creates a human-readable report, a Metaflow Card, after this step completes.
647
583
 
584
+ Note that you may add multiple `@card` decorators in a step with different parameters.
648
585
 
649
586
 
650
587
  Parameters
651
588
  ----------
652
- load_policy : str, default: "fresh"
653
- The policy for loading the checkpoint. The following policies are supported:
654
- - "eager": Loads the the latest available checkpoint within the namespace.
655
- With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
656
- will be loaded at the start of the task.
657
- - "none": Do not load any checkpoint
658
- - "fresh": Loads the lastest checkpoint created within the running Task.
659
- This mode helps loading checkpoints across various retry attempts of the same task.
660
- With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
661
- created within the task will be loaded when the task is retries execution on failure.
662
-
663
- temp_dir_root : str, default: None
664
- The root directory under which `current.checkpoint.directory` will be created.
589
+ type : str, default 'default'
590
+ Card type.
591
+ id : str, optional, default None
592
+ If multiple cards are present, use this id to identify this card.
593
+ options : Dict[str, Any], default {}
594
+ Options passed to the card. The contents depend on the card type.
595
+ timeout : int, default 45
596
+ Interrupt reporting if it takes more than this many seconds.
665
597
  """
666
598
  ...
667
599
 
668
600
  @typing.overload
669
- def checkpoint(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
601
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
670
602
  ...
671
603
 
672
604
  @typing.overload
673
- def checkpoint(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
605
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
674
606
  ...
675
607
 
676
- def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load_policy: str = 'fresh', temp_dir_root: str = None):
608
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
677
609
  """
678
- Enables checkpointing for a step.
610
+ Creates a human-readable report, a Metaflow Card, after this step completes.
679
611
 
680
-
681
-
682
- Parameters
683
- ----------
684
- load_policy : str, default: "fresh"
685
- The policy for loading the checkpoint. The following policies are supported:
686
- - "eager": Loads the the latest available checkpoint within the namespace.
687
- With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
688
- will be loaded at the start of the task.
689
- - "none": Do not load any checkpoint
690
- - "fresh": Loads the lastest checkpoint created within the running Task.
691
- This mode helps loading checkpoints across various retry attempts of the same task.
692
- With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
693
- created within the task will be loaded when the task is retries execution on failure.
694
-
695
- temp_dir_root : str, default: None
696
- The root directory under which `current.checkpoint.directory` will be created.
697
- """
698
- ...
699
-
700
- @typing.overload
701
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
702
- """
703
- Decorator prototype for all step decorators. This function gets specialized
704
- and imported for all decorators types by _import_plugin_decorators().
705
- """
706
- ...
707
-
708
- @typing.overload
709
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
710
- ...
711
-
712
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
713
- """
714
- Decorator prototype for all step decorators. This function gets specialized
715
- and imported for all decorators types by _import_plugin_decorators().
716
- """
717
- ...
718
-
719
- def vllm(*, model: str, backend: str, debug: bool, kwargs: typing.Any) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
720
- """
721
- This decorator is used to run vllm APIs as Metaflow task sidecars.
722
-
723
- User code call
724
- --------------
725
- @vllm(
726
- model="...",
727
- ...
728
- )
729
-
730
- Valid backend options
731
- ---------------------
732
- - 'local': Run as a separate process on the local task machine.
733
-
734
- Valid model options
735
- -------------------
736
- Any HuggingFace model identifier, e.g. 'meta-llama/Llama-3.2-1B'
737
-
738
- NOTE: vLLM's OpenAI-compatible server serves ONE model per server instance.
739
- If you need multiple models, you must create multiple @vllm decorators.
740
-
741
-
742
- Parameters
743
- ----------
744
- model: str
745
- HuggingFace model identifier to be served by vLLM.
746
- backend: str
747
- Determines where and how to run the vLLM process.
748
- debug: bool
749
- Whether to turn on verbose debugging logs.
750
- kwargs : Any
751
- Any other keyword arguments are passed directly to the vLLM engine.
752
- This allows for flexible configuration of vLLM server settings.
753
- For example, `tensor_parallel_size=2`.
754
- """
755
- ...
756
-
757
- @typing.overload
758
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
759
- """
760
- Internal decorator to support Fast bakery
761
- """
762
- ...
763
-
764
- @typing.overload
765
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
766
- ...
767
-
768
- def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
769
- """
770
- Internal decorator to support Fast bakery
771
- """
772
- ...
773
-
774
- @typing.overload
775
- def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
776
- """
777
- Creates a human-readable report, a Metaflow Card, after this step completes.
778
-
779
- Note that you may add multiple `@card` decorators in a step with different parameters.
780
-
781
-
782
- Parameters
783
- ----------
784
- type : str, default 'default'
785
- Card type.
786
- id : str, optional, default None
787
- If multiple cards are present, use this id to identify this card.
788
- options : Dict[str, Any], default {}
789
- Options passed to the card. The contents depend on the card type.
790
- timeout : int, default 45
791
- Interrupt reporting if it takes more than this many seconds.
792
- """
793
- ...
794
-
795
- @typing.overload
796
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
797
- ...
798
-
799
- @typing.overload
800
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
801
- ...
802
-
803
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
804
- """
805
- Creates a human-readable report, a Metaflow Card, after this step completes.
806
-
807
- Note that you may add multiple `@card` decorators in a step with different parameters.
612
+ Note that you may add multiple `@card` decorators in a step with different parameters.
808
613
 
809
614
 
810
615
  Parameters
@@ -820,92 +625,17 @@ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
820
625
  """
821
626
  ...
822
627
 
823
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
628
+ def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
824
629
  """
825
- Specifies that this step should execute on Kubernetes.
630
+ Specifies that this step should execute on DGX cloud.
826
631
 
827
632
 
828
633
  Parameters
829
634
  ----------
830
- cpu : int, default 1
831
- Number of CPUs required for this step. If `@resources` is
832
- also present, the maximum value from all decorators is used.
833
- memory : int, default 4096
834
- Memory size (in MB) required for this step. If
835
- `@resources` is also present, the maximum value from all decorators is
836
- used.
837
- disk : int, default 10240
838
- Disk size (in MB) required for this step. If
839
- `@resources` is also present, the maximum value from all decorators is
840
- used.
841
- image : str, optional, default None
842
- Docker image to use when launching on Kubernetes. If not specified, and
843
- METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
844
- not, a default Docker image mapping to the current version of Python is used.
845
- image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
846
- If given, the imagePullPolicy to be applied to the Docker image of the step.
847
- image_pull_secrets: List[str], default []
848
- The default is extracted from METAFLOW_KUBERNETES_IMAGE_PULL_SECRETS.
849
- Kubernetes image pull secrets to use when pulling container images
850
- in Kubernetes.
851
- service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
852
- Kubernetes service account to use when launching pod in Kubernetes.
853
- secrets : List[str], optional, default None
854
- Kubernetes secrets to use when launching pod in Kubernetes. These
855
- secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
856
- in Metaflow configuration.
857
- node_selector: Union[Dict[str,str], str], optional, default None
858
- Kubernetes node selector(s) to apply to the pod running the task.
859
- Can be passed in as a comma separated string of values e.g.
860
- 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
861
- {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
862
- namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
863
- Kubernetes namespace to use when launching pod in Kubernetes.
864
- gpu : int, optional, default None
865
- Number of GPUs required for this step. A value of zero implies that
866
- the scheduled node should not have GPUs.
867
- gpu_vendor : str, default KUBERNETES_GPU_VENDOR
868
- The vendor of the GPUs to be used for this step.
869
- tolerations : List[str], default []
870
- The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
871
- Kubernetes tolerations to use when launching pod in Kubernetes.
872
- labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
873
- Kubernetes labels to use when launching pod in Kubernetes.
874
- annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
875
- Kubernetes annotations to use when launching pod in Kubernetes.
876
- use_tmpfs : bool, default False
877
- This enables an explicit tmpfs mount for this step.
878
- tmpfs_tempdir : bool, default True
879
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
880
- tmpfs_size : int, optional, default: None
881
- The value for the size (in MiB) of the tmpfs mount for this step.
882
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
883
- memory allocated for this step.
884
- tmpfs_path : str, optional, default /metaflow_temp
885
- Path to tmpfs mount for this step.
886
- persistent_volume_claims : Dict[str, str], optional, default None
887
- A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
888
- volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
889
- shared_memory: int, optional
890
- Shared memory size (in MiB) required for this step
891
- port: int, optional
892
- Port number to specify in the Kubernetes job object
893
- compute_pool : str, optional, default None
894
- Compute pool to be used for for this step.
895
- If not specified, any accessible compute pool within the perimeter is used.
896
- hostname_resolution_timeout: int, default 10 * 60
897
- Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
898
- Only applicable when @parallel is used.
899
- qos: str, default: Burstable
900
- Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
901
-
902
- security_context: Dict[str, Any], optional, default None
903
- Container security context. Applies to the task container. Allows the following keys:
904
- - privileged: bool, optional, default None
905
- - allow_privilege_escalation: bool, optional, default None
906
- - run_as_user: int, optional, default None
907
- - run_as_group: int, optional, default None
908
- - run_as_non_root: bool, optional, default None
635
+ gpu : int
636
+ Number of GPUs to use.
637
+ gpu_type : str
638
+ Type of Nvidia GPU to use.
909
639
  """
910
640
  ...
911
641
 
@@ -943,127 +673,659 @@ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], Non
943
673
  ...
944
674
 
945
675
  @typing.overload
946
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
676
+ def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
947
677
  """
948
- Specifies a timeout for your step.
949
-
950
- This decorator is useful if this step may hang indefinitely.
678
+ Specifies the resources needed when executing this step.
951
679
 
952
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
953
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
954
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
680
+ Use `@resources` to specify the resource requirements
681
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
955
682
 
956
- Note that all the values specified in parameters are added together so if you specify
957
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
683
+ You can choose the compute layer on the command line by executing e.g.
684
+ ```
685
+ python myflow.py run --with batch
686
+ ```
687
+ or
688
+ ```
689
+ python myflow.py run --with kubernetes
690
+ ```
691
+ which executes the flow on the desired system using the
692
+ requirements specified in `@resources`.
958
693
 
959
694
 
960
695
  Parameters
961
696
  ----------
962
- seconds : int, default 0
963
- Number of seconds to wait prior to timing out.
964
- minutes : int, default 0
965
- Number of minutes to wait prior to timing out.
966
- hours : int, default 0
967
- Number of hours to wait prior to timing out.
697
+ cpu : int, default 1
698
+ Number of CPUs required for this step.
699
+ gpu : int, optional, default None
700
+ Number of GPUs required for this step.
701
+ disk : int, optional, default None
702
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
703
+ memory : int, default 4096
704
+ Memory size (in MB) required for this step.
705
+ shared_memory : int, optional, default None
706
+ The value for the size (in MiB) of the /dev/shm volume for this step.
707
+ This parameter maps to the `--shm-size` option in Docker.
968
708
  """
969
709
  ...
970
710
 
971
711
  @typing.overload
972
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
712
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
973
713
  ...
974
714
 
975
715
  @typing.overload
976
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
716
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
977
717
  ...
978
718
 
979
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
719
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
980
720
  """
981
- Specifies a timeout for your step.
721
+ Specifies the resources needed when executing this step.
982
722
 
983
- This decorator is useful if this step may hang indefinitely.
723
+ Use `@resources` to specify the resource requirements
724
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
984
725
 
985
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
986
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
987
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
726
+ You can choose the compute layer on the command line by executing e.g.
727
+ ```
728
+ python myflow.py run --with batch
729
+ ```
730
+ or
731
+ ```
732
+ python myflow.py run --with kubernetes
733
+ ```
734
+ which executes the flow on the desired system using the
735
+ requirements specified in `@resources`.
736
+
737
+
738
+ Parameters
739
+ ----------
740
+ cpu : int, default 1
741
+ Number of CPUs required for this step.
742
+ gpu : int, optional, default None
743
+ Number of GPUs required for this step.
744
+ disk : int, optional, default None
745
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
746
+ memory : int, default 4096
747
+ Memory size (in MB) required for this step.
748
+ shared_memory : int, optional, default None
749
+ The value for the size (in MiB) of the /dev/shm volume for this step.
750
+ This parameter maps to the `--shm-size` option in Docker.
751
+ """
752
+ ...
753
+
754
+ def app_deploy(*, app_port: int, app_name: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
755
+ """
756
+ Specifies that this step is used to deploy an instance of the app.
757
+ Requires that self.app_name, self.app_port, self.entrypoint and self.deployDir is set.
758
+
759
+
760
+ Parameters
761
+ ----------
762
+ app_port : int
763
+ Number of GPUs to use.
764
+ app_name : str
765
+ Name of the app to deploy.
766
+ """
767
+ ...
768
+
769
+ @typing.overload
770
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
771
+ """
772
+ Decorator prototype for all step decorators. This function gets specialized
773
+ and imported for all decorators types by _import_plugin_decorators().
774
+ """
775
+ ...
776
+
777
+ @typing.overload
778
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
779
+ ...
780
+
781
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
782
+ """
783
+ Decorator prototype for all step decorators. This function gets specialized
784
+ and imported for all decorators types by _import_plugin_decorators().
785
+ """
786
+ ...
787
+
788
+ def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy: str, force_cache_update: bool, debug: bool, circuit_breaker_config: dict, timeout_config: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
789
+ """
790
+ This decorator is used to run Ollama APIs as Metaflow task sidecars.
791
+
792
+ User code call
793
+ --------------
794
+ @ollama(
795
+ models=[...],
796
+ ...
797
+ )
798
+
799
+ Valid backend options
800
+ ---------------------
801
+ - 'local': Run as a separate process on the local task machine.
802
+ - (TODO) 'managed': Outerbounds hosts and selects compute provider.
803
+ - (TODO) 'remote': Spin up separate instance to serve Ollama models.
804
+
805
+ Valid model options
806
+ -------------------
807
+ Any model here https://ollama.com/search, e.g. 'llama3.2', 'llama3.3'
808
+
809
+
810
+ Parameters
811
+ ----------
812
+ models: list[str]
813
+ List of Ollama containers running models in sidecars.
814
+ backend: str
815
+ Determines where and how to run the Ollama process.
816
+ force_pull: bool
817
+ Whether to run `ollama pull` no matter what, or first check the remote cache in Metaflow datastore for this model key.
818
+ cache_update_policy: str
819
+ Cache update policy: "auto", "force", or "never".
820
+ force_cache_update: bool
821
+ Simple override for "force" cache update policy.
822
+ debug: bool
823
+ Whether to turn on verbose debugging logs.
824
+ circuit_breaker_config: dict
825
+ Configuration for circuit breaker protection. Keys: failure_threshold, recovery_timeout, reset_timeout.
826
+ timeout_config: dict
827
+ Configuration for various operation timeouts. Keys: pull, stop, health_check, install, server_startup.
828
+ """
829
+ ...
830
+
831
+ def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
832
+ """
833
+ Decorator that helps cache, version and store models/datasets from huggingface hub.
834
+
835
+
836
+ Parameters
837
+ ----------
838
+ temp_dir_root : str, optional
839
+ The root directory that will hold the temporary directory where objects will be downloaded.
840
+
841
+ load: Union[List[str], List[Tuple[Dict, str]], List[Tuple[str, str]], List[Dict], None]
842
+ The list of repos (models/datasets) to load.
843
+
844
+ Loaded repos can be accessed via `current.huggingface_hub.loaded`. If load is set, then the following happens:
845
+
846
+ - If repo (model/dataset) is not found in the datastore:
847
+ - Downloads the repo from Hugging Face Hub to a temporary directory (or uses specified path) for local access
848
+ - Stores it in Metaflow's datastore (s3/gcs/azure etc.) with a unique name based on repo_type/repo_id
849
+ - All HF models loaded for a `@step` will be cached separately under flow/step/namespace.
850
+
851
+ - If repo is found in the datastore:
852
+ - Loads it directly from datastore to local path (can be temporary directory or specified path)
853
+ """
854
+ ...
855
+
856
+ @typing.overload
857
+ def checkpoint(*, load_policy: str = 'fresh', temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
858
+ """
859
+ Enables checkpointing for a step.
860
+
861
+
862
+
863
+ Parameters
864
+ ----------
865
+ load_policy : str, default: "fresh"
866
+ The policy for loading the checkpoint. The following policies are supported:
867
+ - "eager": Loads the the latest available checkpoint within the namespace.
868
+ With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
869
+ will be loaded at the start of the task.
870
+ - "none": Do not load any checkpoint
871
+ - "fresh": Loads the lastest checkpoint created within the running Task.
872
+ This mode helps loading checkpoints across various retry attempts of the same task.
873
+ With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
874
+ created within the task will be loaded when the task is retries execution on failure.
875
+
876
+ temp_dir_root : str, default: None
877
+ The root directory under which `current.checkpoint.directory` will be created.
878
+ """
879
+ ...
880
+
881
+ @typing.overload
882
+ def checkpoint(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
883
+ ...
884
+
885
+ @typing.overload
886
+ def checkpoint(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
887
+ ...
888
+
889
+ def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load_policy: str = 'fresh', temp_dir_root: str = None):
890
+ """
891
+ Enables checkpointing for a step.
892
+
893
+
894
+
895
+ Parameters
896
+ ----------
897
+ load_policy : str, default: "fresh"
898
+ The policy for loading the checkpoint. The following policies are supported:
899
+ - "eager": Loads the the latest available checkpoint within the namespace.
900
+ With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
901
+ will be loaded at the start of the task.
902
+ - "none": Do not load any checkpoint
903
+ - "fresh": Loads the lastest checkpoint created within the running Task.
904
+ This mode helps loading checkpoints across various retry attempts of the same task.
905
+ With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
906
+ created within the task will be loaded when the task is retries execution on failure.
907
+
908
+ temp_dir_root : str, default: None
909
+ The root directory under which `current.checkpoint.directory` will be created.
910
+ """
911
+ ...
912
+
913
+ @typing.overload
914
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
915
+ """
916
+ Specifies the PyPI packages for the step.
917
+
918
+ Information in this decorator will augment any
919
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
920
+ you can use `@pypi_base` to set packages required by all
921
+ steps and use `@pypi` to specify step-specific overrides.
922
+
923
+
924
+ Parameters
925
+ ----------
926
+ packages : Dict[str, str], default: {}
927
+ Packages to use for this step. The key is the name of the package
928
+ and the value is the version to use.
929
+ python : str, optional, default: None
930
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
931
+ that the version used will correspond to the version of the Python interpreter used to start the run.
932
+ """
933
+ ...
934
+
935
+ @typing.overload
936
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
937
+ ...
938
+
939
+ @typing.overload
940
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
941
+ ...
942
+
943
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
944
+ """
945
+ Specifies the PyPI packages for the step.
946
+
947
+ Information in this decorator will augment any
948
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
949
+ you can use `@pypi_base` to set packages required by all
950
+ steps and use `@pypi` to specify step-specific overrides.
951
+
952
+
953
+ Parameters
954
+ ----------
955
+ packages : Dict[str, str], default: {}
956
+ Packages to use for this step. The key is the name of the package
957
+ and the value is the version to use.
958
+ python : str, optional, default: None
959
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
960
+ that the version used will correspond to the version of the Python interpreter used to start the run.
961
+ """
962
+ ...
963
+
964
+ def vllm(*, model: str, backend: str, debug: bool, kwargs: typing.Any) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
965
+ """
966
+ This decorator is used to run vllm APIs as Metaflow task sidecars.
967
+
968
+ User code call
969
+ --------------
970
+ @vllm(
971
+ model="...",
972
+ ...
973
+ )
974
+
975
+ Valid backend options
976
+ ---------------------
977
+ - 'local': Run as a separate process on the local task machine.
978
+
979
+ Valid model options
980
+ -------------------
981
+ Any HuggingFace model identifier, e.g. 'meta-llama/Llama-3.2-1B'
982
+
983
+ NOTE: vLLM's OpenAI-compatible server serves ONE model per server instance.
984
+ If you need multiple models, you must create multiple @vllm decorators.
985
+
986
+
987
+ Parameters
988
+ ----------
989
+ model: str
990
+ HuggingFace model identifier to be served by vLLM.
991
+ backend: str
992
+ Determines where and how to run the vLLM process.
993
+ debug: bool
994
+ Whether to turn on verbose debugging logs.
995
+ kwargs : Any
996
+ Any other keyword arguments are passed directly to the vLLM engine.
997
+ This allows for flexible configuration of vLLM server settings.
998
+ For example, `tensor_parallel_size=2`.
999
+ """
1000
+ ...
1001
+
1002
+ @typing.overload
1003
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1004
+ """
1005
+ Internal decorator to support Fast bakery
1006
+ """
1007
+ ...
1008
+
1009
+ @typing.overload
1010
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1011
+ ...
1012
+
1013
+ def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1014
+ """
1015
+ Internal decorator to support Fast bakery
1016
+ """
1017
+ ...
1018
+
1019
+ @typing.overload
1020
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1021
+ """
1022
+ Specifies the PyPI packages for all steps of the flow.
1023
+
1024
+ Use `@pypi_base` to set common packages required by all
1025
+ steps and use `@pypi` to specify step-specific overrides.
1026
+
1027
+ Parameters
1028
+ ----------
1029
+ packages : Dict[str, str], default: {}
1030
+ Packages to use for this flow. The key is the name of the package
1031
+ and the value is the version to use.
1032
+ python : str, optional, default: None
1033
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1034
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1035
+ """
1036
+ ...
1037
+
1038
+ @typing.overload
1039
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1040
+ ...
1041
+
1042
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1043
+ """
1044
+ Specifies the PyPI packages for all steps of the flow.
1045
+
1046
+ Use `@pypi_base` to set common packages required by all
1047
+ steps and use `@pypi` to specify step-specific overrides.
1048
+
1049
+ Parameters
1050
+ ----------
1051
+ packages : Dict[str, str], default: {}
1052
+ Packages to use for this flow. The key is the name of the package
1053
+ and the value is the version to use.
1054
+ python : str, optional, default: None
1055
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1056
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1057
+ """
1058
+ ...
1059
+
1060
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1061
+ """
1062
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1063
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1064
+
1065
+
1066
+ Parameters
1067
+ ----------
1068
+ timeout : int
1069
+ Time, in seconds before the task times out and fails. (Default: 3600)
1070
+ poke_interval : int
1071
+ Time in seconds that the job should wait in between each try. (Default: 60)
1072
+ mode : str
1073
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1074
+ exponential_backoff : bool
1075
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1076
+ pool : str
1077
+ the slot pool this task should run in,
1078
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1079
+ soft_fail : bool
1080
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1081
+ name : str
1082
+ Name of the sensor on Airflow
1083
+ description : str
1084
+ Description of sensor in the Airflow UI
1085
+ external_dag_id : str
1086
+ The dag_id that contains the task you want to wait for.
1087
+ external_task_ids : List[str]
1088
+ The list of task_ids that you want to wait for.
1089
+ If None (default value) the sensor waits for the DAG. (Default: None)
1090
+ allowed_states : List[str]
1091
+ Iterable of allowed states, (Default: ['success'])
1092
+ failed_states : List[str]
1093
+ Iterable of failed or dis-allowed states. (Default: None)
1094
+ execution_delta : datetime.timedelta
1095
+ time difference with the previous execution to look at,
1096
+ the default is the same logical date as the current task or DAG. (Default: None)
1097
+ check_existence: bool
1098
+ Set to True to check if the external task exists or check if
1099
+ the DAG to wait for exists. (Default: True)
1100
+ """
1101
+ ...
1102
+
1103
+ def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1104
+ """
1105
+ Specifies what flows belong to the same project.
1106
+
1107
+ A project-specific namespace is created for all flows that
1108
+ use the same `@project(name)`.
1109
+
1110
+
1111
+ Parameters
1112
+ ----------
1113
+ name : str
1114
+ Project name. Make sure that the name is unique amongst all
1115
+ projects that use the same production scheduler. The name may
1116
+ contain only lowercase alphanumeric characters and underscores.
1117
+
1118
+ branch : Optional[str], default None
1119
+ The branch to use. If not specified, the branch is set to
1120
+ `user.<username>` unless `production` is set to `True`. This can
1121
+ also be set on the command line using `--branch` as a top-level option.
1122
+ It is an error to specify `branch` in the decorator and on the command line.
1123
+
1124
+ production : bool, default False
1125
+ Whether or not the branch is the production branch. This can also be set on the
1126
+ command line using `--production` as a top-level option. It is an error to specify
1127
+ `production` in the decorator and on the command line.
1128
+ The project branch name will be:
1129
+ - if `branch` is specified:
1130
+ - if `production` is True: `prod.<branch>`
1131
+ - if `production` is False: `test.<branch>`
1132
+ - if `branch` is not specified:
1133
+ - if `production` is True: `prod`
1134
+ - if `production` is False: `user.<username>`
1135
+ """
1136
+ ...
1137
+
1138
+ @typing.overload
1139
+ def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1140
+ """
1141
+ Specifies the flow(s) that this flow depends on.
1142
+
1143
+ ```
1144
+ @trigger_on_finish(flow='FooFlow')
1145
+ ```
1146
+ or
1147
+ ```
1148
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1149
+ ```
1150
+ This decorator respects the @project decorator and triggers the flow
1151
+ when upstream runs within the same namespace complete successfully
1152
+
1153
+ Additionally, you can specify project aware upstream flow dependencies
1154
+ by specifying the fully qualified project_flow_name.
1155
+ ```
1156
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1157
+ ```
1158
+ or
1159
+ ```
1160
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1161
+ ```
1162
+
1163
+ You can also specify just the project or project branch (other values will be
1164
+ inferred from the current project or project branch):
1165
+ ```
1166
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1167
+ ```
1168
+
1169
+ Note that `branch` is typically one of:
1170
+ - `prod`
1171
+ - `user.bob`
1172
+ - `test.my_experiment`
1173
+ - `prod.staging`
1174
+
1175
+
1176
+ Parameters
1177
+ ----------
1178
+ flow : Union[str, Dict[str, str]], optional, default None
1179
+ Upstream flow dependency for this flow.
1180
+ flows : List[Union[str, Dict[str, str]]], default []
1181
+ Upstream flow dependencies for this flow.
1182
+ options : Dict[str, Any], default {}
1183
+ Backend-specific configuration for tuning eventing behavior.
1184
+ """
1185
+ ...
1186
+
1187
+ @typing.overload
1188
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1189
+ ...
1190
+
1191
+ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1192
+ """
1193
+ Specifies the flow(s) that this flow depends on.
1194
+
1195
+ ```
1196
+ @trigger_on_finish(flow='FooFlow')
1197
+ ```
1198
+ or
1199
+ ```
1200
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1201
+ ```
1202
+ This decorator respects the @project decorator and triggers the flow
1203
+ when upstream runs within the same namespace complete successfully
1204
+
1205
+ Additionally, you can specify project aware upstream flow dependencies
1206
+ by specifying the fully qualified project_flow_name.
1207
+ ```
1208
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1209
+ ```
1210
+ or
1211
+ ```
1212
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1213
+ ```
1214
+
1215
+ You can also specify just the project or project branch (other values will be
1216
+ inferred from the current project or project branch):
1217
+ ```
1218
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1219
+ ```
988
1220
 
989
- Note that all the values specified in parameters are added together so if you specify
990
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1221
+ Note that `branch` is typically one of:
1222
+ - `prod`
1223
+ - `user.bob`
1224
+ - `test.my_experiment`
1225
+ - `prod.staging`
991
1226
 
992
1227
 
993
1228
  Parameters
994
1229
  ----------
995
- seconds : int, default 0
996
- Number of seconds to wait prior to timing out.
997
- minutes : int, default 0
998
- Number of minutes to wait prior to timing out.
999
- hours : int, default 0
1000
- Number of hours to wait prior to timing out.
1230
+ flow : Union[str, Dict[str, str]], optional, default None
1231
+ Upstream flow dependency for this flow.
1232
+ flows : List[Union[str, Dict[str, str]]], default []
1233
+ Upstream flow dependencies for this flow.
1234
+ options : Dict[str, Any], default {}
1235
+ Backend-specific configuration for tuning eventing behavior.
1001
1236
  """
1002
1237
  ...
1003
1238
 
1004
- def app_deploy(*, app_port: int, app_name: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1239
+ @typing.overload
1240
+ def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1005
1241
  """
1006
- Specifies that this step is used to deploy an instance of the app.
1007
- Requires that self.app_name, self.app_port, self.entrypoint and self.deployDir is set.
1242
+ Specifies the event(s) that this flow depends on.
1008
1243
 
1244
+ ```
1245
+ @trigger(event='foo')
1246
+ ```
1247
+ or
1248
+ ```
1249
+ @trigger(events=['foo', 'bar'])
1250
+ ```
1009
1251
 
1010
- Parameters
1011
- ----------
1012
- app_port : int
1013
- Number of GPUs to use.
1014
- app_name : str
1015
- Name of the app to deploy.
1016
- """
1017
- ...
1018
-
1019
- @typing.overload
1020
- def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1021
- """
1022
- Specifies the Conda environment for all steps of the flow.
1252
+ Additionally, you can specify the parameter mappings
1253
+ to map event payload to Metaflow parameters for the flow.
1254
+ ```
1255
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1256
+ ```
1257
+ or
1258
+ ```
1259
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1260
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1261
+ ```
1023
1262
 
1024
- Use `@conda_base` to set common libraries required by all
1025
- steps and use `@conda` to specify step-specific additions.
1263
+ 'parameters' can also be a list of strings and tuples like so:
1264
+ ```
1265
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1266
+ ```
1267
+ This is equivalent to:
1268
+ ```
1269
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1270
+ ```
1026
1271
 
1027
1272
 
1028
1273
  Parameters
1029
1274
  ----------
1030
- packages : Dict[str, str], default {}
1031
- Packages to use for this flow. The key is the name of the package
1032
- and the value is the version to use.
1033
- libraries : Dict[str, str], default {}
1034
- Supported for backward compatibility. When used with packages, packages will take precedence.
1035
- python : str, optional, default None
1036
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1037
- that the version used will correspond to the version of the Python interpreter used to start the run.
1038
- disabled : bool, default False
1039
- If set to True, disables Conda.
1275
+ event : Union[str, Dict[str, Any]], optional, default None
1276
+ Event dependency for this flow.
1277
+ events : List[Union[str, Dict[str, Any]]], default []
1278
+ Events dependency for this flow.
1279
+ options : Dict[str, Any], default {}
1280
+ Backend-specific configuration for tuning eventing behavior.
1040
1281
  """
1041
1282
  ...
1042
1283
 
1043
1284
  @typing.overload
1044
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1285
+ def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1045
1286
  ...
1046
1287
 
1047
- def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1288
+ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1048
1289
  """
1049
- Specifies the Conda environment for all steps of the flow.
1290
+ Specifies the event(s) that this flow depends on.
1050
1291
 
1051
- Use `@conda_base` to set common libraries required by all
1052
- steps and use `@conda` to specify step-specific additions.
1292
+ ```
1293
+ @trigger(event='foo')
1294
+ ```
1295
+ or
1296
+ ```
1297
+ @trigger(events=['foo', 'bar'])
1298
+ ```
1299
+
1300
+ Additionally, you can specify the parameter mappings
1301
+ to map event payload to Metaflow parameters for the flow.
1302
+ ```
1303
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1304
+ ```
1305
+ or
1306
+ ```
1307
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1308
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1309
+ ```
1310
+
1311
+ 'parameters' can also be a list of strings and tuples like so:
1312
+ ```
1313
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1314
+ ```
1315
+ This is equivalent to:
1316
+ ```
1317
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1318
+ ```
1053
1319
 
1054
1320
 
1055
1321
  Parameters
1056
1322
  ----------
1057
- packages : Dict[str, str], default {}
1058
- Packages to use for this flow. The key is the name of the package
1059
- and the value is the version to use.
1060
- libraries : Dict[str, str], default {}
1061
- Supported for backward compatibility. When used with packages, packages will take precedence.
1062
- python : str, optional, default None
1063
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1064
- that the version used will correspond to the version of the Python interpreter used to start the run.
1065
- disabled : bool, default False
1066
- If set to True, disables Conda.
1323
+ event : Union[str, Dict[str, Any]], optional, default None
1324
+ Event dependency for this flow.
1325
+ events : List[Union[str, Dict[str, Any]]], default []
1326
+ Events dependency for this flow.
1327
+ options : Dict[str, Any], default {}
1328
+ Backend-specific configuration for tuning eventing behavior.
1067
1329
  """
1068
1330
  ...
1069
1331
 
@@ -1190,146 +1452,45 @@ def with_artifact_store(f: typing.Optional[typing.Type[FlowSpecDerived]] = None)
1190
1452
  "client_params": {
1191
1453
  "aws_access_key_id": os.environ.get("MY_CUSTOM_ACCESS_KEY"),
1192
1454
  "aws_secret_access_key": os.environ.get("MY_CUSTOM_SECRET_KEY"),
1193
- },
1194
- }):
1195
- with Checkpoint() as cp:
1196
- latest = cp.list(
1197
- task=run["start"].task
1198
- )[0]
1199
- print(latest)
1200
- cp.load(
1201
- latest,
1202
- "test-checkpoints"
1203
- )
1204
-
1205
- task = Task("TorchTuneFlow/8484/train/53673")
1206
- with artifact_store_from(run=run, config={
1207
- "client_params": {
1208
- "aws_access_key_id": os.environ.get("MY_CUSTOM_ACCESS_KEY"),
1209
- "aws_secret_access_key": os.environ.get("MY_CUSTOM_SECRET_KEY"),
1210
- },
1211
- }):
1212
- load_model(
1213
- task.data.model_ref,
1214
- "test-models"
1215
- )
1216
- ```
1217
- Parameters:
1218
- ----------
1219
-
1220
- type: str
1221
- The type of the datastore. Can be one of 's3', 'gcs', 'azure' or any other supported metaflow Datastore.
1222
-
1223
- config: dict or Callable
1224
- Dictionary of configuration options for the datastore. The following keys are required:
1225
- - root: The root path in the datastore where the data will be saved. (needs to be in the format expected by the datastore)
1226
- - example: 's3://bucket-name/path/to/root'
1227
- - example: 'gs://bucket-name/path/to/root'
1228
- - example: 'https://myblockacc.blob.core.windows.net/metaflow/'
1229
- - role_arn (optional): AWS IAM role to access s3 bucket (only when `type` is 's3')
1230
- - session_vars (optional): AWS session variables to access s3 bucket (only when `type` is 's3')
1231
- - client_params (optional): AWS client parameters to access s3 bucket (only when `type` is 's3')
1232
- """
1233
- ...
1234
-
1235
- @typing.overload
1236
- def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1237
- """
1238
- Specifies the flow(s) that this flow depends on.
1239
-
1240
- ```
1241
- @trigger_on_finish(flow='FooFlow')
1242
- ```
1243
- or
1244
- ```
1245
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1246
- ```
1247
- This decorator respects the @project decorator and triggers the flow
1248
- when upstream runs within the same namespace complete successfully
1249
-
1250
- Additionally, you can specify project aware upstream flow dependencies
1251
- by specifying the fully qualified project_flow_name.
1252
- ```
1253
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1254
- ```
1255
- or
1256
- ```
1257
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1258
- ```
1259
-
1260
- You can also specify just the project or project branch (other values will be
1261
- inferred from the current project or project branch):
1262
- ```
1263
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1264
- ```
1265
-
1266
- Note that `branch` is typically one of:
1267
- - `prod`
1268
- - `user.bob`
1269
- - `test.my_experiment`
1270
- - `prod.staging`
1271
-
1272
-
1273
- Parameters
1274
- ----------
1275
- flow : Union[str, Dict[str, str]], optional, default None
1276
- Upstream flow dependency for this flow.
1277
- flows : List[Union[str, Dict[str, str]]], default []
1278
- Upstream flow dependencies for this flow.
1279
- options : Dict[str, Any], default {}
1280
- Backend-specific configuration for tuning eventing behavior.
1281
- """
1282
- ...
1283
-
1284
- @typing.overload
1285
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1286
- ...
1287
-
1288
- def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1289
- """
1290
- Specifies the flow(s) that this flow depends on.
1291
-
1292
- ```
1293
- @trigger_on_finish(flow='FooFlow')
1294
- ```
1295
- or
1296
- ```
1297
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1298
- ```
1299
- This decorator respects the @project decorator and triggers the flow
1300
- when upstream runs within the same namespace complete successfully
1301
-
1302
- Additionally, you can specify project aware upstream flow dependencies
1303
- by specifying the fully qualified project_flow_name.
1304
- ```
1305
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1306
- ```
1307
- or
1308
- ```
1309
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1310
- ```
1311
-
1312
- You can also specify just the project or project branch (other values will be
1313
- inferred from the current project or project branch):
1314
- ```
1315
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1316
- ```
1455
+ },
1456
+ }):
1457
+ with Checkpoint() as cp:
1458
+ latest = cp.list(
1459
+ task=run["start"].task
1460
+ )[0]
1461
+ print(latest)
1462
+ cp.load(
1463
+ latest,
1464
+ "test-checkpoints"
1465
+ )
1317
1466
 
1318
- Note that `branch` is typically one of:
1319
- - `prod`
1320
- - `user.bob`
1321
- - `test.my_experiment`
1322
- - `prod.staging`
1467
+ task = Task("TorchTuneFlow/8484/train/53673")
1468
+ with artifact_store_from(run=run, config={
1469
+ "client_params": {
1470
+ "aws_access_key_id": os.environ.get("MY_CUSTOM_ACCESS_KEY"),
1471
+ "aws_secret_access_key": os.environ.get("MY_CUSTOM_SECRET_KEY"),
1472
+ },
1473
+ }):
1474
+ load_model(
1475
+ task.data.model_ref,
1476
+ "test-models"
1477
+ )
1478
+ ```
1479
+ Parameters:
1480
+ ----------
1323
1481
 
1482
+ type: str
1483
+ The type of the datastore. Can be one of 's3', 'gcs', 'azure' or any other supported metaflow Datastore.
1324
1484
 
1325
- Parameters
1326
- ----------
1327
- flow : Union[str, Dict[str, str]], optional, default None
1328
- Upstream flow dependency for this flow.
1329
- flows : List[Union[str, Dict[str, str]]], default []
1330
- Upstream flow dependencies for this flow.
1331
- options : Dict[str, Any], default {}
1332
- Backend-specific configuration for tuning eventing behavior.
1485
+ config: dict or Callable
1486
+ Dictionary of configuration options for the datastore. The following keys are required:
1487
+ - root: The root path in the datastore where the data will be saved. (needs to be in the format expected by the datastore)
1488
+ - example: 's3://bucket-name/path/to/root'
1489
+ - example: 'gs://bucket-name/path/to/root'
1490
+ - example: 'https://myblockacc.blob.core.windows.net/metaflow/'
1491
+ - role_arn (optional): AWS IAM role to access s3 bucket (only when `type` is 's3')
1492
+ - session_vars (optional): AWS session variables to access s3 bucket (only when `type` is 's3')
1493
+ - client_params (optional): AWS client parameters to access s3 bucket (only when `type` is 's3')
1333
1494
  """
1334
1495
  ...
1335
1496
 
@@ -1376,215 +1537,54 @@ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, expone
1376
1537
  """
1377
1538
  ...
1378
1539
 
1379
- def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1380
- """
1381
- Specifies what flows belong to the same project.
1382
-
1383
- A project-specific namespace is created for all flows that
1384
- use the same `@project(name)`.
1385
-
1386
-
1387
- Parameters
1388
- ----------
1389
- name : str
1390
- Project name. Make sure that the name is unique amongst all
1391
- projects that use the same production scheduler. The name may
1392
- contain only lowercase alphanumeric characters and underscores.
1393
-
1394
- branch : Optional[str], default None
1395
- The branch to use. If not specified, the branch is set to
1396
- `user.<username>` unless `production` is set to `True`. This can
1397
- also be set on the command line using `--branch` as a top-level option.
1398
- It is an error to specify `branch` in the decorator and on the command line.
1399
-
1400
- production : bool, default False
1401
- Whether or not the branch is the production branch. This can also be set on the
1402
- command line using `--production` as a top-level option. It is an error to specify
1403
- `production` in the decorator and on the command line.
1404
- The project branch name will be:
1405
- - if `branch` is specified:
1406
- - if `production` is True: `prod.<branch>`
1407
- - if `production` is False: `test.<branch>`
1408
- - if `branch` is not specified:
1409
- - if `production` is True: `prod`
1410
- - if `production` is False: `user.<username>`
1411
- """
1412
- ...
1413
-
1414
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1415
- """
1416
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1417
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1418
-
1419
-
1420
- Parameters
1421
- ----------
1422
- timeout : int
1423
- Time, in seconds before the task times out and fails. (Default: 3600)
1424
- poke_interval : int
1425
- Time in seconds that the job should wait in between each try. (Default: 60)
1426
- mode : str
1427
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1428
- exponential_backoff : bool
1429
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1430
- pool : str
1431
- the slot pool this task should run in,
1432
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1433
- soft_fail : bool
1434
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1435
- name : str
1436
- Name of the sensor on Airflow
1437
- description : str
1438
- Description of sensor in the Airflow UI
1439
- external_dag_id : str
1440
- The dag_id that contains the task you want to wait for.
1441
- external_task_ids : List[str]
1442
- The list of task_ids that you want to wait for.
1443
- If None (default value) the sensor waits for the DAG. (Default: None)
1444
- allowed_states : List[str]
1445
- Iterable of allowed states, (Default: ['success'])
1446
- failed_states : List[str]
1447
- Iterable of failed or dis-allowed states. (Default: None)
1448
- execution_delta : datetime.timedelta
1449
- time difference with the previous execution to look at,
1450
- the default is the same logical date as the current task or DAG. (Default: None)
1451
- check_existence: bool
1452
- Set to True to check if the external task exists or check if
1453
- the DAG to wait for exists. (Default: True)
1454
- """
1455
- ...
1456
-
1457
1540
  @typing.overload
1458
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1541
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1459
1542
  """
1460
- Specifies the PyPI packages for all steps of the flow.
1543
+ Specifies the Conda environment for all steps of the flow.
1544
+
1545
+ Use `@conda_base` to set common libraries required by all
1546
+ steps and use `@conda` to specify step-specific additions.
1461
1547
 
1462
- Use `@pypi_base` to set common packages required by all
1463
- steps and use `@pypi` to specify step-specific overrides.
1464
1548
 
1465
1549
  Parameters
1466
1550
  ----------
1467
- packages : Dict[str, str], default: {}
1551
+ packages : Dict[str, str], default {}
1468
1552
  Packages to use for this flow. The key is the name of the package
1469
1553
  and the value is the version to use.
1470
- python : str, optional, default: None
1554
+ libraries : Dict[str, str], default {}
1555
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1556
+ python : str, optional, default None
1471
1557
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1472
1558
  that the version used will correspond to the version of the Python interpreter used to start the run.
1559
+ disabled : bool, default False
1560
+ If set to True, disables Conda.
1473
1561
  """
1474
1562
  ...
1475
1563
 
1476
1564
  @typing.overload
1477
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1565
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1478
1566
  ...
1479
1567
 
1480
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1568
+ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1481
1569
  """
1482
- Specifies the PyPI packages for all steps of the flow.
1570
+ Specifies the Conda environment for all steps of the flow.
1571
+
1572
+ Use `@conda_base` to set common libraries required by all
1573
+ steps and use `@conda` to specify step-specific additions.
1483
1574
 
1484
- Use `@pypi_base` to set common packages required by all
1485
- steps and use `@pypi` to specify step-specific overrides.
1486
1575
 
1487
1576
  Parameters
1488
1577
  ----------
1489
- packages : Dict[str, str], default: {}
1578
+ packages : Dict[str, str], default {}
1490
1579
  Packages to use for this flow. The key is the name of the package
1491
1580
  and the value is the version to use.
1492
- python : str, optional, default: None
1581
+ libraries : Dict[str, str], default {}
1582
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1583
+ python : str, optional, default None
1493
1584
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1494
1585
  that the version used will correspond to the version of the Python interpreter used to start the run.
1495
- """
1496
- ...
1497
-
1498
- @typing.overload
1499
- def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1500
- """
1501
- Specifies the event(s) that this flow depends on.
1502
-
1503
- ```
1504
- @trigger(event='foo')
1505
- ```
1506
- or
1507
- ```
1508
- @trigger(events=['foo', 'bar'])
1509
- ```
1510
-
1511
- Additionally, you can specify the parameter mappings
1512
- to map event payload to Metaflow parameters for the flow.
1513
- ```
1514
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1515
- ```
1516
- or
1517
- ```
1518
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1519
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1520
- ```
1521
-
1522
- 'parameters' can also be a list of strings and tuples like so:
1523
- ```
1524
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1525
- ```
1526
- This is equivalent to:
1527
- ```
1528
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1529
- ```
1530
-
1531
-
1532
- Parameters
1533
- ----------
1534
- event : Union[str, Dict[str, Any]], optional, default None
1535
- Event dependency for this flow.
1536
- events : List[Union[str, Dict[str, Any]]], default []
1537
- Events dependency for this flow.
1538
- options : Dict[str, Any], default {}
1539
- Backend-specific configuration for tuning eventing behavior.
1540
- """
1541
- ...
1542
-
1543
- @typing.overload
1544
- def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1545
- ...
1546
-
1547
- def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1548
- """
1549
- Specifies the event(s) that this flow depends on.
1550
-
1551
- ```
1552
- @trigger(event='foo')
1553
- ```
1554
- or
1555
- ```
1556
- @trigger(events=['foo', 'bar'])
1557
- ```
1558
-
1559
- Additionally, you can specify the parameter mappings
1560
- to map event payload to Metaflow parameters for the flow.
1561
- ```
1562
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1563
- ```
1564
- or
1565
- ```
1566
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1567
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1568
- ```
1569
-
1570
- 'parameters' can also be a list of strings and tuples like so:
1571
- ```
1572
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1573
- ```
1574
- This is equivalent to:
1575
- ```
1576
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1577
- ```
1578
-
1579
-
1580
- Parameters
1581
- ----------
1582
- event : Union[str, Dict[str, Any]], optional, default None
1583
- Event dependency for this flow.
1584
- events : List[Union[str, Dict[str, Any]]], default []
1585
- Events dependency for this flow.
1586
- options : Dict[str, Any], default {}
1587
- Backend-specific configuration for tuning eventing behavior.
1586
+ disabled : bool, default False
1587
+ If set to True, disables Conda.
1588
1588
  """
1589
1589
  ...
1590
1590