ob-metaflow-stubs 6.0.3.169__py2.py3-none-any.whl → 6.0.3.170__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (206) hide show
  1. metaflow-stubs/__init__.pyi +656 -656
  2. metaflow-stubs/cards.pyi +1 -1
  3. metaflow-stubs/cli.pyi +1 -1
  4. metaflow-stubs/cli_components/__init__.pyi +1 -1
  5. metaflow-stubs/cli_components/utils.pyi +1 -1
  6. metaflow-stubs/client/__init__.pyi +1 -1
  7. metaflow-stubs/client/core.pyi +4 -4
  8. metaflow-stubs/client/filecache.pyi +2 -2
  9. metaflow-stubs/events.pyi +2 -2
  10. metaflow-stubs/exception.pyi +1 -1
  11. metaflow-stubs/flowspec.pyi +3 -3
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +3 -3
  14. metaflow-stubs/info_file.pyi +1 -1
  15. metaflow-stubs/metadata_provider/__init__.pyi +1 -1
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +1 -1
  17. metaflow-stubs/metadata_provider/metadata.pyi +2 -2
  18. metaflow-stubs/metadata_provider/util.pyi +1 -1
  19. metaflow-stubs/metaflow_config.pyi +1 -1
  20. metaflow-stubs/metaflow_current.pyi +113 -113
  21. metaflow-stubs/metaflow_git.pyi +1 -1
  22. metaflow-stubs/mf_extensions/__init__.pyi +1 -1
  23. metaflow-stubs/mf_extensions/obcheckpoint/__init__.pyi +1 -1
  24. metaflow-stubs/mf_extensions/obcheckpoint/plugins/__init__.pyi +1 -1
  25. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/__init__.pyi +1 -1
  26. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/__init__.pyi +1 -1
  27. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/async_cards.pyi +1 -1
  28. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/deco_injection_mixin.pyi +1 -1
  29. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/extra_components.pyi +2 -2
  30. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/__init__.pyi +1 -1
  31. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/__init__.pyi +1 -1
  32. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/checkpoint_lister.pyi +3 -3
  33. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/lineage_card.pyi +1 -1
  34. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/checkpoint_storage.pyi +4 -4
  35. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/constructors.pyi +1 -1
  36. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/core.pyi +3 -3
  37. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/decorator.pyi +4 -4
  38. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/exceptions.pyi +1 -1
  39. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/final_api.pyi +1 -1
  40. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/lineage.pyi +1 -1
  41. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/__init__.pyi +1 -1
  42. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/context.pyi +2 -2
  43. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/core.pyi +1 -1
  44. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/decorator.pyi +1 -1
  45. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/exceptions.pyi +1 -1
  46. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/task_utils.pyi +2 -2
  47. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/utils.pyi +1 -1
  48. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastructures.pyi +2 -2
  49. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/exceptions.pyi +1 -1
  50. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/__init__.pyi +1 -1
  51. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/decorator.pyi +1 -1
  52. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/__init__.pyi +1 -1
  53. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/core.pyi +2 -2
  54. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/exceptions.pyi +1 -1
  55. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/model_storage.pyi +3 -3
  56. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/__init__.pyi +1 -1
  57. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/flowspec_utils.pyi +1 -1
  58. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/general.pyi +1 -1
  59. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/identity_utils.pyi +1 -1
  60. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/__init__.pyi +1 -1
  61. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/base.pyi +1 -1
  62. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/tar.pyi +2 -2
  63. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/tar_utils.pyi +2 -2
  64. metaflow-stubs/mf_extensions/outerbounds/__init__.pyi +1 -1
  65. metaflow-stubs/mf_extensions/outerbounds/plugins/__init__.pyi +1 -1
  66. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/__init__.pyi +1 -1
  67. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/injector.pyi +1 -1
  68. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/__init__.pyi +1 -1
  69. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/coreweave.pyi +1 -1
  70. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/nebius.pyi +1 -1
  71. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/__init__.pyi +1 -1
  72. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/ollama.pyi +1 -1
  73. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/__init__.pyi +1 -1
  74. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/snowflake.pyi +1 -1
  75. metaflow-stubs/mf_extensions/outerbounds/profilers/__init__.pyi +1 -1
  76. metaflow-stubs/mf_extensions/outerbounds/profilers/gpu.pyi +1 -1
  77. metaflow-stubs/mf_extensions/outerbounds/remote_config.pyi +1 -1
  78. metaflow-stubs/mf_extensions/outerbounds/toplevel/__init__.pyi +1 -1
  79. metaflow-stubs/mf_extensions/outerbounds/toplevel/global_aliases_for_metaflow_package.pyi +1 -1
  80. metaflow-stubs/multicore_utils.pyi +1 -1
  81. metaflow-stubs/parameters.pyi +3 -3
  82. metaflow-stubs/plugins/__init__.pyi +11 -11
  83. metaflow-stubs/plugins/airflow/__init__.pyi +1 -1
  84. metaflow-stubs/plugins/airflow/airflow_utils.pyi +1 -1
  85. metaflow-stubs/plugins/airflow/exception.pyi +1 -1
  86. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +1 -1
  87. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +1 -1
  88. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +1 -1
  89. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +1 -1
  90. metaflow-stubs/plugins/argo/__init__.pyi +1 -1
  91. metaflow-stubs/plugins/argo/argo_client.pyi +1 -1
  92. metaflow-stubs/plugins/argo/argo_events.pyi +1 -1
  93. metaflow-stubs/plugins/argo/argo_workflows.pyi +3 -3
  94. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +1 -1
  95. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +4 -4
  96. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +2 -2
  97. metaflow-stubs/plugins/aws/__init__.pyi +1 -1
  98. metaflow-stubs/plugins/aws/aws_client.pyi +1 -1
  99. metaflow-stubs/plugins/aws/aws_utils.pyi +1 -1
  100. metaflow-stubs/plugins/aws/batch/__init__.pyi +1 -1
  101. metaflow-stubs/plugins/aws/batch/batch.pyi +1 -1
  102. metaflow-stubs/plugins/aws/batch/batch_client.pyi +1 -1
  103. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +1 -1
  104. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +1 -1
  105. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +4 -4
  106. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +1 -1
  107. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +1 -1
  108. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +1 -1
  109. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +1 -1
  110. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +1 -1
  111. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +4 -4
  112. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +2 -2
  113. metaflow-stubs/plugins/azure/__init__.pyi +1 -1
  114. metaflow-stubs/plugins/azure/azure_credential.pyi +1 -1
  115. metaflow-stubs/plugins/azure/azure_exceptions.pyi +1 -1
  116. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +4 -4
  117. metaflow-stubs/plugins/azure/azure_utils.pyi +1 -1
  118. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +1 -1
  119. metaflow-stubs/plugins/azure/includefile_support.pyi +1 -1
  120. metaflow-stubs/plugins/cards/__init__.pyi +1 -1
  121. metaflow-stubs/plugins/cards/card_client.pyi +2 -2
  122. metaflow-stubs/plugins/cards/card_creator.pyi +1 -1
  123. metaflow-stubs/plugins/cards/card_datastore.pyi +1 -1
  124. metaflow-stubs/plugins/cards/card_decorator.pyi +1 -1
  125. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +1 -1
  126. metaflow-stubs/plugins/cards/card_modules/basic.pyi +1 -1
  127. metaflow-stubs/plugins/cards/card_modules/card.pyi +1 -1
  128. metaflow-stubs/plugins/cards/card_modules/components.pyi +3 -3
  129. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +1 -1
  130. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +1 -1
  131. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +1 -1
  132. metaflow-stubs/plugins/cards/card_resolver.pyi +1 -1
  133. metaflow-stubs/plugins/cards/component_serializer.pyi +1 -1
  134. metaflow-stubs/plugins/cards/exception.pyi +1 -1
  135. metaflow-stubs/plugins/catch_decorator.pyi +2 -2
  136. metaflow-stubs/plugins/datatools/__init__.pyi +1 -1
  137. metaflow-stubs/plugins/datatools/local.pyi +1 -1
  138. metaflow-stubs/plugins/datatools/s3/__init__.pyi +1 -1
  139. metaflow-stubs/plugins/datatools/s3/s3.pyi +3 -3
  140. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +1 -1
  141. metaflow-stubs/plugins/datatools/s3/s3util.pyi +1 -1
  142. metaflow-stubs/plugins/debug_logger.pyi +1 -1
  143. metaflow-stubs/plugins/debug_monitor.pyi +1 -1
  144. metaflow-stubs/plugins/environment_decorator.pyi +1 -1
  145. metaflow-stubs/plugins/events_decorator.pyi +1 -1
  146. metaflow-stubs/plugins/frameworks/__init__.pyi +1 -1
  147. metaflow-stubs/plugins/frameworks/pytorch.pyi +1 -1
  148. metaflow-stubs/plugins/gcp/__init__.pyi +1 -1
  149. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +4 -4
  150. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +1 -1
  151. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +1 -1
  152. metaflow-stubs/plugins/gcp/gs_utils.pyi +1 -1
  153. metaflow-stubs/plugins/gcp/includefile_support.pyi +1 -1
  154. metaflow-stubs/plugins/kubernetes/__init__.pyi +1 -1
  155. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +2 -2
  156. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +1 -1
  157. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +1 -1
  158. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +1 -1
  159. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +1 -1
  160. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +1 -1
  161. metaflow-stubs/plugins/ollama/__init__.pyi +1 -1
  162. metaflow-stubs/plugins/parallel_decorator.pyi +1 -1
  163. metaflow-stubs/plugins/perimeters.pyi +1 -1
  164. metaflow-stubs/plugins/project_decorator.pyi +1 -1
  165. metaflow-stubs/plugins/pypi/__init__.pyi +1 -1
  166. metaflow-stubs/plugins/pypi/conda_decorator.pyi +1 -1
  167. metaflow-stubs/plugins/pypi/conda_environment.pyi +4 -4
  168. metaflow-stubs/plugins/pypi/parsers.pyi +1 -1
  169. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +1 -1
  170. metaflow-stubs/plugins/pypi/pypi_environment.pyi +1 -1
  171. metaflow-stubs/plugins/pypi/utils.pyi +1 -1
  172. metaflow-stubs/plugins/resources_decorator.pyi +1 -1
  173. metaflow-stubs/plugins/retry_decorator.pyi +1 -1
  174. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  175. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +3 -3
  176. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +1 -1
  177. metaflow-stubs/plugins/snowflake/__init__.pyi +1 -1
  178. metaflow-stubs/plugins/storage_executor.pyi +1 -1
  179. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +1 -1
  180. metaflow-stubs/plugins/timeout_decorator.pyi +2 -2
  181. metaflow-stubs/plugins/uv/__init__.pyi +1 -1
  182. metaflow-stubs/plugins/uv/uv_environment.pyi +1 -1
  183. metaflow-stubs/profilers/__init__.pyi +1 -1
  184. metaflow-stubs/pylint_wrapper.pyi +1 -1
  185. metaflow-stubs/runner/__init__.pyi +1 -1
  186. metaflow-stubs/runner/deployer.pyi +28 -28
  187. metaflow-stubs/runner/deployer_impl.pyi +2 -2
  188. metaflow-stubs/runner/metaflow_runner.pyi +2 -2
  189. metaflow-stubs/runner/nbdeploy.pyi +1 -1
  190. metaflow-stubs/runner/nbrun.pyi +1 -1
  191. metaflow-stubs/runner/subprocess_manager.pyi +1 -1
  192. metaflow-stubs/runner/utils.pyi +2 -2
  193. metaflow-stubs/system/__init__.pyi +1 -1
  194. metaflow-stubs/system/system_logger.pyi +1 -1
  195. metaflow-stubs/system/system_monitor.pyi +1 -1
  196. metaflow-stubs/tagging_util.pyi +1 -1
  197. metaflow-stubs/tuple_util.pyi +1 -1
  198. metaflow-stubs/user_configs/__init__.pyi +1 -1
  199. metaflow-stubs/user_configs/config_decorators.pyi +6 -6
  200. metaflow-stubs/user_configs/config_options.pyi +2 -2
  201. metaflow-stubs/user_configs/config_parameters.pyi +7 -7
  202. {ob_metaflow_stubs-6.0.3.169.dist-info → ob_metaflow_stubs-6.0.3.170.dist-info}/METADATA +1 -1
  203. ob_metaflow_stubs-6.0.3.170.dist-info/RECORD +206 -0
  204. ob_metaflow_stubs-6.0.3.169.dist-info/RECORD +0 -206
  205. {ob_metaflow_stubs-6.0.3.169.dist-info → ob_metaflow_stubs-6.0.3.170.dist-info}/WHEEL +0 -0
  206. {ob_metaflow_stubs-6.0.3.169.dist-info → ob_metaflow_stubs-6.0.3.170.dist-info}/top_level.txt +0 -0
@@ -1,15 +1,15 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
3
  # MF version: 2.15.14.1+obcheckpoint(0.2.1);ob(v1) #
4
- # Generated on 2025-05-21T14:02:14.742508 #
4
+ # Generated on 2025-05-22T07:40:40.319392 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import typing
12
11
  import datetime
12
+ import typing
13
13
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
14
14
  StepFlag = typing.NewType("StepFlag", bool)
15
15
 
@@ -37,16 +37,16 @@ from .user_configs.config_decorators import CustomFlowDecorator as CustomFlowDec
37
37
  from .user_configs.config_decorators import CustomStepDecorator as CustomStepDecorator
38
38
  from . import tuple_util as tuple_util
39
39
  from . import cards as cards
40
- from . import metaflow_git as metaflow_git
41
40
  from . import events as events
41
+ from . import metaflow_git as metaflow_git
42
42
  from . import runner as runner
43
43
  from . import plugins as plugins
44
44
  from .mf_extensions.outerbounds.toplevel.global_aliases_for_metaflow_package import S3 as S3
45
45
  from . import includefile as includefile
46
46
  from .includefile import IncludeFile as IncludeFile
47
+ from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
47
48
  from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
48
49
  from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
49
- from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
50
50
  from . import client as client
51
51
  from .client.core import namespace as namespace
52
52
  from .client.core import get_namespace as get_namespace
@@ -153,54 +153,126 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
153
153
  """
154
154
  ...
155
155
 
156
+ def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
157
+ """
158
+ Decorator that helps cache, version and store models/datasets from huggingface hub.
159
+
160
+
161
+ Parameters
162
+ ----------
163
+ temp_dir_root : str, optional
164
+ The root directory that will hold the temporary directory where objects will be downloaded.
165
+
166
+ load: Union[List[str], List[Tuple[Dict, str]], List[Tuple[str, str]], List[Dict], None]
167
+ The list of repos (models/datasets) to load.
168
+
169
+ Loaded repos can be accessed via `current.huggingface_hub.loaded`. If load is set, then the following happens:
170
+
171
+ - If repo (model/dataset) is not found in the datastore:
172
+ - Downloads the repo from Hugging Face Hub to a temporary directory (or uses specified path) for local access
173
+ - Stores it in Metaflow's datastore (s3/gcs/azure etc.) with a unique name based on repo_type/repo_id
174
+ - All HF models loaded for a `@step` will be cached separately under flow/step/namespace.
175
+
176
+ - If repo is found in the datastore:
177
+ - Loads it directly from datastore to local path (can be temporary directory or specified path)
178
+ """
179
+ ...
180
+
156
181
  @typing.overload
157
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
182
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
158
183
  """
159
- Specifies that the step will success under all circumstances.
184
+ Specifies environment variables to be set prior to the execution of a step.
160
185
 
161
- The decorator will create an optional artifact, specified by `var`, which
162
- contains the exception raised. You can use it to detect the presence
163
- of errors, indicating that all happy-path artifacts produced by the step
164
- are missing.
186
+
187
+ Parameters
188
+ ----------
189
+ vars : Dict[str, str], default {}
190
+ Dictionary of environment variables to set.
191
+ """
192
+ ...
193
+
194
+ @typing.overload
195
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
196
+ ...
197
+
198
+ @typing.overload
199
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
200
+ ...
201
+
202
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
203
+ """
204
+ Specifies environment variables to be set prior to the execution of a step.
165
205
 
166
206
 
167
207
  Parameters
168
208
  ----------
169
- var : str, optional, default None
170
- Name of the artifact in which to store the caught exception.
171
- If not specified, the exception is not stored.
172
- print_exception : bool, default True
173
- Determines whether or not the exception is printed to
174
- stdout when caught.
209
+ vars : Dict[str, str], default {}
210
+ Dictionary of environment variables to set.
175
211
  """
176
212
  ...
177
213
 
178
214
  @typing.overload
179
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
215
+ def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
216
+ """
217
+ Creates a human-readable report, a Metaflow Card, after this step completes.
218
+
219
+ Note that you may add multiple `@card` decorators in a step with different parameters.
220
+
221
+
222
+ Parameters
223
+ ----------
224
+ type : str, default 'default'
225
+ Card type.
226
+ id : str, optional, default None
227
+ If multiple cards are present, use this id to identify this card.
228
+ options : Dict[str, Any], default {}
229
+ Options passed to the card. The contents depend on the card type.
230
+ timeout : int, default 45
231
+ Interrupt reporting if it takes more than this many seconds.
232
+ """
180
233
  ...
181
234
 
182
235
  @typing.overload
183
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
236
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
184
237
  ...
185
238
 
186
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
239
+ @typing.overload
240
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
241
+ ...
242
+
243
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
187
244
  """
188
- Specifies that the step will success under all circumstances.
245
+ Creates a human-readable report, a Metaflow Card, after this step completes.
189
246
 
190
- The decorator will create an optional artifact, specified by `var`, which
191
- contains the exception raised. You can use it to detect the presence
192
- of errors, indicating that all happy-path artifacts produced by the step
193
- are missing.
247
+ Note that you may add multiple `@card` decorators in a step with different parameters.
194
248
 
195
249
 
196
250
  Parameters
197
251
  ----------
198
- var : str, optional, default None
199
- Name of the artifact in which to store the caught exception.
200
- If not specified, the exception is not stored.
201
- print_exception : bool, default True
202
- Determines whether or not the exception is printed to
203
- stdout when caught.
252
+ type : str, default 'default'
253
+ Card type.
254
+ id : str, optional, default None
255
+ If multiple cards are present, use this id to identify this card.
256
+ options : Dict[str, Any], default {}
257
+ Options passed to the card. The contents depend on the card type.
258
+ timeout : int, default 45
259
+ Interrupt reporting if it takes more than this many seconds.
260
+ """
261
+ ...
262
+
263
+ def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
264
+ """
265
+ Specifies that this step should execute on DGX cloud.
266
+
267
+
268
+ Parameters
269
+ ----------
270
+ gpu : int
271
+ Number of GPUs to use.
272
+ gpu_type : str
273
+ Type of Nvidia GPU to use.
274
+ queue_timeout : int
275
+ Time to keep the job in NVCF's queue.
204
276
  """
205
277
  ...
206
278
 
@@ -261,88 +333,38 @@ def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
261
333
  """
262
334
  ...
263
335
 
264
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
336
+ @typing.overload
337
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
265
338
  """
266
- Specifies that this step should execute on Kubernetes.
339
+ Specifies secrets to be retrieved and injected as environment variables prior to
340
+ the execution of a step.
267
341
 
268
342
 
269
343
  Parameters
270
344
  ----------
271
- cpu : int, default 1
272
- Number of CPUs required for this step. If `@resources` is
273
- also present, the maximum value from all decorators is used.
274
- memory : int, default 4096
275
- Memory size (in MB) required for this step. If
276
- `@resources` is also present, the maximum value from all decorators is
277
- used.
278
- disk : int, default 10240
279
- Disk size (in MB) required for this step. If
280
- `@resources` is also present, the maximum value from all decorators is
281
- used.
282
- image : str, optional, default None
283
- Docker image to use when launching on Kubernetes. If not specified, and
284
- METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
285
- not, a default Docker image mapping to the current version of Python is used.
286
- image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
287
- If given, the imagePullPolicy to be applied to the Docker image of the step.
288
- service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
289
- Kubernetes service account to use when launching pod in Kubernetes.
290
- secrets : List[str], optional, default None
291
- Kubernetes secrets to use when launching pod in Kubernetes. These
292
- secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
293
- in Metaflow configuration.
294
- node_selector: Union[Dict[str,str], str], optional, default None
295
- Kubernetes node selector(s) to apply to the pod running the task.
296
- Can be passed in as a comma separated string of values e.g.
297
- 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
298
- {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
299
- namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
300
- Kubernetes namespace to use when launching pod in Kubernetes.
301
- gpu : int, optional, default None
302
- Number of GPUs required for this step. A value of zero implies that
303
- the scheduled node should not have GPUs.
304
- gpu_vendor : str, default KUBERNETES_GPU_VENDOR
305
- The vendor of the GPUs to be used for this step.
306
- tolerations : List[str], default []
307
- The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
308
- Kubernetes tolerations to use when launching pod in Kubernetes.
309
- labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
310
- Kubernetes labels to use when launching pod in Kubernetes.
311
- annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
312
- Kubernetes annotations to use when launching pod in Kubernetes.
313
- use_tmpfs : bool, default False
314
- This enables an explicit tmpfs mount for this step.
315
- tmpfs_tempdir : bool, default True
316
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
317
- tmpfs_size : int, optional, default: None
318
- The value for the size (in MiB) of the tmpfs mount for this step.
319
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
320
- memory allocated for this step.
321
- tmpfs_path : str, optional, default /metaflow_temp
322
- Path to tmpfs mount for this step.
323
- persistent_volume_claims : Dict[str, str], optional, default None
324
- A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
325
- volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
326
- shared_memory: int, optional
327
- Shared memory size (in MiB) required for this step
328
- port: int, optional
329
- Port number to specify in the Kubernetes job object
330
- compute_pool : str, optional, default None
331
- Compute pool to be used for for this step.
332
- If not specified, any accessible compute pool within the perimeter is used.
333
- hostname_resolution_timeout: int, default 10 * 60
334
- Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
335
- Only applicable when @parallel is used.
336
- qos: str, default: Burstable
337
- Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
345
+ sources : List[Union[str, Dict[str, Any]]], default: []
346
+ List of secret specs, defining how the secrets are to be retrieved
347
+ """
348
+ ...
349
+
350
+ @typing.overload
351
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
352
+ ...
353
+
354
+ @typing.overload
355
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
356
+ ...
357
+
358
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
359
+ """
360
+ Specifies secrets to be retrieved and injected as environment variables prior to
361
+ the execution of a step.
338
362
 
339
- security_context: Dict[str, Any], optional, default None
340
- Container security context. Applies to the task container. Allows the following keys:
341
- - privileged: bool, optional, default None
342
- - allow_privilege_escalation: bool, optional, default None
343
- - run_as_user: int, optional, default None
344
- - run_as_group: int, optional, default None
345
- - run_as_non_root: bool, optional, default None
363
+
364
+ Parameters
365
+ ----------
366
+ sources : List[Union[str, Dict[str, Any]]], default: []
367
+ List of secret specs, defining how the secrets are to be retrieved
346
368
  """
347
369
  ...
348
370
 
@@ -405,49 +427,10 @@ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
405
427
  """
406
428
  ...
407
429
 
408
- def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
430
+ @typing.overload
431
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
409
432
  """
410
- Decorator that helps cache, version and store models/datasets from huggingface hub.
411
-
412
-
413
- Parameters
414
- ----------
415
- temp_dir_root : str, optional
416
- The root directory that will hold the temporary directory where objects will be downloaded.
417
-
418
- load: Union[List[str], List[Tuple[Dict, str]], List[Tuple[str, str]], List[Dict], None]
419
- The list of repos (models/datasets) to load.
420
-
421
- Loaded repos can be accessed via `current.huggingface_hub.loaded`. If load is set, then the following happens:
422
-
423
- - If repo (model/dataset) is not found in the datastore:
424
- - Downloads the repo from Hugging Face Hub to a temporary directory (or uses specified path) for local access
425
- - Stores it in Metaflow's datastore (s3/gcs/azure etc.) with a unique name based on repo_type/repo_id
426
- - All HF models loaded for a `@step` will be cached separately under flow/step/namespace.
427
-
428
- - If repo is found in the datastore:
429
- - Loads it directly from datastore to local path (can be temporary directory or specified path)
430
- """
431
- ...
432
-
433
- def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
434
- """
435
- Specifies that this step should execute on DGX cloud.
436
-
437
-
438
- Parameters
439
- ----------
440
- gpu : int
441
- Number of GPUs to use.
442
- gpu_type : str
443
- Type of Nvidia GPU to use.
444
- """
445
- ...
446
-
447
- @typing.overload
448
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
449
- """
450
- Internal decorator to support Fast bakery
433
+ Internal decorator to support Fast bakery
451
434
  """
452
435
  ...
453
436
 
@@ -461,167 +444,6 @@ def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepF
461
444
  """
462
445
  ...
463
446
 
464
- @typing.overload
465
- def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
466
- """
467
- Specifies the resources needed when executing this step.
468
-
469
- Use `@resources` to specify the resource requirements
470
- independently of the specific compute layer (`@batch`, `@kubernetes`).
471
-
472
- You can choose the compute layer on the command line by executing e.g.
473
- ```
474
- python myflow.py run --with batch
475
- ```
476
- or
477
- ```
478
- python myflow.py run --with kubernetes
479
- ```
480
- which executes the flow on the desired system using the
481
- requirements specified in `@resources`.
482
-
483
-
484
- Parameters
485
- ----------
486
- cpu : int, default 1
487
- Number of CPUs required for this step.
488
- gpu : int, optional, default None
489
- Number of GPUs required for this step.
490
- disk : int, optional, default None
491
- Disk size (in MB) required for this step. Only applies on Kubernetes.
492
- memory : int, default 4096
493
- Memory size (in MB) required for this step.
494
- shared_memory : int, optional, default None
495
- The value for the size (in MiB) of the /dev/shm volume for this step.
496
- This parameter maps to the `--shm-size` option in Docker.
497
- """
498
- ...
499
-
500
- @typing.overload
501
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
502
- ...
503
-
504
- @typing.overload
505
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
506
- ...
507
-
508
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
509
- """
510
- Specifies the resources needed when executing this step.
511
-
512
- Use `@resources` to specify the resource requirements
513
- independently of the specific compute layer (`@batch`, `@kubernetes`).
514
-
515
- You can choose the compute layer on the command line by executing e.g.
516
- ```
517
- python myflow.py run --with batch
518
- ```
519
- or
520
- ```
521
- python myflow.py run --with kubernetes
522
- ```
523
- which executes the flow on the desired system using the
524
- requirements specified in `@resources`.
525
-
526
-
527
- Parameters
528
- ----------
529
- cpu : int, default 1
530
- Number of CPUs required for this step.
531
- gpu : int, optional, default None
532
- Number of GPUs required for this step.
533
- disk : int, optional, default None
534
- Disk size (in MB) required for this step. Only applies on Kubernetes.
535
- memory : int, default 4096
536
- Memory size (in MB) required for this step.
537
- shared_memory : int, optional, default None
538
- The value for the size (in MiB) of the /dev/shm volume for this step.
539
- This parameter maps to the `--shm-size` option in Docker.
540
- """
541
- ...
542
-
543
- def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
544
- """
545
- Specifies that this step should execute on DGX cloud.
546
-
547
-
548
- Parameters
549
- ----------
550
- gpu : int
551
- Number of GPUs to use.
552
- gpu_type : str
553
- Type of Nvidia GPU to use.
554
- queue_timeout : int
555
- Time to keep the job in NVCF's queue.
556
- """
557
- ...
558
-
559
- @typing.overload
560
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
561
- """
562
- Specifies environment variables to be set prior to the execution of a step.
563
-
564
-
565
- Parameters
566
- ----------
567
- vars : Dict[str, str], default {}
568
- Dictionary of environment variables to set.
569
- """
570
- ...
571
-
572
- @typing.overload
573
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
574
- ...
575
-
576
- @typing.overload
577
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
578
- ...
579
-
580
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
581
- """
582
- Specifies environment variables to be set prior to the execution of a step.
583
-
584
-
585
- Parameters
586
- ----------
587
- vars : Dict[str, str], default {}
588
- Dictionary of environment variables to set.
589
- """
590
- ...
591
-
592
- def ollama(*, models: "list[Ollama]", backend: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
593
- """
594
- This decorator is used to run Ollama APIs as Metaflow task sidecars.
595
-
596
- User code call
597
- -----------
598
- @ollama(
599
- models=['meta/llama3-8b-instruct', 'meta/llama3-70b-instruct'],
600
- backend='local'
601
- )
602
-
603
- Valid backend options
604
- ---------------------
605
- - 'local': Run as a separate process on the local task machine.
606
- - (TODO) 'managed': Outerbounds hosts and selects compute provider.
607
- - (TODO) 'remote': Spin up separate instance to serve Ollama models.
608
-
609
- Valid model options
610
- ----------------
611
- - 'llama3.2'
612
- - 'llama3.3'
613
- - any model here https://ollama.com/search
614
-
615
-
616
- Parameters
617
- ----------
618
- models: list[Ollama]
619
- List of Ollama containers running models in sidecars.
620
- backend: str
621
- Determines where and how to run the Ollama process.
622
- """
623
- ...
624
-
625
447
  @typing.overload
626
448
  def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
627
449
  """
@@ -663,162 +485,72 @@ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
663
485
  Information in this decorator will augment any
664
486
  attributes set in the `@conda_base` flow-level decorator. Hence,
665
487
  you can use `@conda_base` to set packages required by all
666
- steps and use `@conda` to specify step-specific overrides.
667
-
668
-
669
- Parameters
670
- ----------
671
- packages : Dict[str, str], default {}
672
- Packages to use for this step. The key is the name of the package
673
- and the value is the version to use.
674
- libraries : Dict[str, str], default {}
675
- Supported for backward compatibility. When used with packages, packages will take precedence.
676
- python : str, optional, default None
677
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
678
- that the version used will correspond to the version of the Python interpreter used to start the run.
679
- disabled : bool, default False
680
- If set to True, disables @conda.
681
- """
682
- ...
683
-
684
- @typing.overload
685
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
686
- """
687
- Specifies secrets to be retrieved and injected as environment variables prior to
688
- the execution of a step.
689
-
690
-
691
- Parameters
692
- ----------
693
- sources : List[Union[str, Dict[str, Any]]], default: []
694
- List of secret specs, defining how the secrets are to be retrieved
695
- """
696
- ...
697
-
698
- @typing.overload
699
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
700
- ...
701
-
702
- @typing.overload
703
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
704
- ...
705
-
706
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
707
- """
708
- Specifies secrets to be retrieved and injected as environment variables prior to
709
- the execution of a step.
710
-
711
-
712
- Parameters
713
- ----------
714
- sources : List[Union[str, Dict[str, Any]]], default: []
715
- List of secret specs, defining how the secrets are to be retrieved
716
- """
717
- ...
718
-
719
- @typing.overload
720
- def checkpoint(*, load_policy: str = 'fresh', temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
721
- """
722
- Enables checkpointing for a step.
723
-
724
-
725
-
726
- Parameters
727
- ----------
728
- load_policy : str, default: "fresh"
729
- The policy for loading the checkpoint. The following policies are supported:
730
- - "eager": Loads the the latest available checkpoint within the namespace.
731
- With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
732
- will be loaded at the start of the task.
733
- - "none": Do not load any checkpoint
734
- - "fresh": Loads the lastest checkpoint created within the running Task.
735
- This mode helps loading checkpoints across various retry attempts of the same task.
736
- With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
737
- created within the task will be loaded when the task is retries execution on failure.
738
-
739
- temp_dir_root : str, default: None
740
- The root directory under which `current.checkpoint.directory` will be created.
741
- """
742
- ...
743
-
744
- @typing.overload
745
- def checkpoint(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
746
- ...
747
-
748
- @typing.overload
749
- def checkpoint(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
750
- ...
751
-
752
- def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load_policy: str = 'fresh', temp_dir_root: str = None):
753
- """
754
- Enables checkpointing for a step.
755
-
756
-
757
-
758
- Parameters
759
- ----------
760
- load_policy : str, default: "fresh"
761
- The policy for loading the checkpoint. The following policies are supported:
762
- - "eager": Loads the the latest available checkpoint within the namespace.
763
- With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
764
- will be loaded at the start of the task.
765
- - "none": Do not load any checkpoint
766
- - "fresh": Loads the lastest checkpoint created within the running Task.
767
- This mode helps loading checkpoints across various retry attempts of the same task.
768
- With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
769
- created within the task will be loaded when the task is retries execution on failure.
488
+ steps and use `@conda` to specify step-specific overrides.
770
489
 
771
- temp_dir_root : str, default: None
772
- The root directory under which `current.checkpoint.directory` will be created.
490
+
491
+ Parameters
492
+ ----------
493
+ packages : Dict[str, str], default {}
494
+ Packages to use for this step. The key is the name of the package
495
+ and the value is the version to use.
496
+ libraries : Dict[str, str], default {}
497
+ Supported for backward compatibility. When used with packages, packages will take precedence.
498
+ python : str, optional, default None
499
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
500
+ that the version used will correspond to the version of the Python interpreter used to start the run.
501
+ disabled : bool, default False
502
+ If set to True, disables @conda.
773
503
  """
774
504
  ...
775
505
 
776
506
  @typing.overload
777
- def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
507
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
778
508
  """
779
- Creates a human-readable report, a Metaflow Card, after this step completes.
509
+ Specifies that the step will success under all circumstances.
780
510
 
781
- Note that you may add multiple `@card` decorators in a step with different parameters.
511
+ The decorator will create an optional artifact, specified by `var`, which
512
+ contains the exception raised. You can use it to detect the presence
513
+ of errors, indicating that all happy-path artifacts produced by the step
514
+ are missing.
782
515
 
783
516
 
784
517
  Parameters
785
518
  ----------
786
- type : str, default 'default'
787
- Card type.
788
- id : str, optional, default None
789
- If multiple cards are present, use this id to identify this card.
790
- options : Dict[str, Any], default {}
791
- Options passed to the card. The contents depend on the card type.
792
- timeout : int, default 45
793
- Interrupt reporting if it takes more than this many seconds.
519
+ var : str, optional, default None
520
+ Name of the artifact in which to store the caught exception.
521
+ If not specified, the exception is not stored.
522
+ print_exception : bool, default True
523
+ Determines whether or not the exception is printed to
524
+ stdout when caught.
794
525
  """
795
526
  ...
796
527
 
797
528
  @typing.overload
798
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
529
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
799
530
  ...
800
531
 
801
532
  @typing.overload
802
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
533
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
803
534
  ...
804
535
 
805
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
536
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
806
537
  """
807
- Creates a human-readable report, a Metaflow Card, after this step completes.
538
+ Specifies that the step will success under all circumstances.
808
539
 
809
- Note that you may add multiple `@card` decorators in a step with different parameters.
540
+ The decorator will create an optional artifact, specified by `var`, which
541
+ contains the exception raised. You can use it to detect the presence
542
+ of errors, indicating that all happy-path artifacts produced by the step
543
+ are missing.
810
544
 
811
545
 
812
546
  Parameters
813
547
  ----------
814
- type : str, default 'default'
815
- Card type.
816
- id : str, optional, default None
817
- If multiple cards are present, use this id to identify this card.
818
- options : Dict[str, Any], default {}
819
- Options passed to the card. The contents depend on the card type.
820
- timeout : int, default 45
821
- Interrupt reporting if it takes more than this many seconds.
548
+ var : str, optional, default None
549
+ Name of the artifact in which to store the caught exception.
550
+ If not specified, the exception is not stored.
551
+ print_exception : bool, default True
552
+ Determines whether or not the exception is printed to
553
+ stdout when caught.
822
554
  """
823
555
  ...
824
556
 
@@ -928,6 +660,132 @@ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
928
660
  """
929
661
  ...
930
662
 
663
+ def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
664
+ """
665
+ Specifies that this step should execute on DGX cloud.
666
+
667
+
668
+ Parameters
669
+ ----------
670
+ gpu : int
671
+ Number of GPUs to use.
672
+ gpu_type : str
673
+ Type of Nvidia GPU to use.
674
+ """
675
+ ...
676
+
677
+ def ollama(*, models: "list[Ollama]", backend: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
678
+ """
679
+ This decorator is used to run Ollama APIs as Metaflow task sidecars.
680
+
681
+ User code call
682
+ -----------
683
+ @ollama(
684
+ models=['meta/llama3-8b-instruct', 'meta/llama3-70b-instruct'],
685
+ backend='local'
686
+ )
687
+
688
+ Valid backend options
689
+ ---------------------
690
+ - 'local': Run as a separate process on the local task machine.
691
+ - (TODO) 'managed': Outerbounds hosts and selects compute provider.
692
+ - (TODO) 'remote': Spin up separate instance to serve Ollama models.
693
+
694
+ Valid model options
695
+ ----------------
696
+ - 'llama3.2'
697
+ - 'llama3.3'
698
+ - any model here https://ollama.com/search
699
+
700
+
701
+ Parameters
702
+ ----------
703
+ models: list[Ollama]
704
+ List of Ollama containers running models in sidecars.
705
+ backend: str
706
+ Determines where and how to run the Ollama process.
707
+ """
708
+ ...
709
+
710
+ @typing.overload
711
+ def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
712
+ """
713
+ Specifies the resources needed when executing this step.
714
+
715
+ Use `@resources` to specify the resource requirements
716
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
717
+
718
+ You can choose the compute layer on the command line by executing e.g.
719
+ ```
720
+ python myflow.py run --with batch
721
+ ```
722
+ or
723
+ ```
724
+ python myflow.py run --with kubernetes
725
+ ```
726
+ which executes the flow on the desired system using the
727
+ requirements specified in `@resources`.
728
+
729
+
730
+ Parameters
731
+ ----------
732
+ cpu : int, default 1
733
+ Number of CPUs required for this step.
734
+ gpu : int, optional, default None
735
+ Number of GPUs required for this step.
736
+ disk : int, optional, default None
737
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
738
+ memory : int, default 4096
739
+ Memory size (in MB) required for this step.
740
+ shared_memory : int, optional, default None
741
+ The value for the size (in MiB) of the /dev/shm volume for this step.
742
+ This parameter maps to the `--shm-size` option in Docker.
743
+ """
744
+ ...
745
+
746
+ @typing.overload
747
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
748
+ ...
749
+
750
+ @typing.overload
751
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
752
+ ...
753
+
754
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
755
+ """
756
+ Specifies the resources needed when executing this step.
757
+
758
+ Use `@resources` to specify the resource requirements
759
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
760
+
761
+ You can choose the compute layer on the command line by executing e.g.
762
+ ```
763
+ python myflow.py run --with batch
764
+ ```
765
+ or
766
+ ```
767
+ python myflow.py run --with kubernetes
768
+ ```
769
+ which executes the flow on the desired system using the
770
+ requirements specified in `@resources`.
771
+
772
+
773
+ Parameters
774
+ ----------
775
+ cpu : int, default 1
776
+ Number of CPUs required for this step.
777
+ gpu : int, optional, default None
778
+ Number of GPUs required for this step.
779
+ disk : int, optional, default None
780
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
781
+ memory : int, default 4096
782
+ Memory size (in MB) required for this step.
783
+ shared_memory : int, optional, default None
784
+ The value for the size (in MiB) of the /dev/shm volume for this step.
785
+ This parameter maps to the `--shm-size` option in Docker.
786
+ """
787
+ ...
788
+
931
789
  @typing.overload
932
790
  def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
933
791
  """
@@ -947,18 +805,160 @@ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
947
805
  """
948
806
  ...
949
807
 
950
- def app_deploy(*, app_port: int, app_name: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
808
+ def app_deploy(*, app_port: int, app_name: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
809
+ """
810
+ Specifies that this step is used to deploy an instance of the app.
811
+ Requires that self.app_name, self.app_port, self.entrypoint and self.deployDir is set.
812
+
813
+
814
+ Parameters
815
+ ----------
816
+ app_port : int
817
+ Number of GPUs to use.
818
+ app_name : str
819
+ Name of the app to deploy.
820
+ """
821
+ ...
822
+
823
+ @typing.overload
824
+ def checkpoint(*, load_policy: str = 'fresh', temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
825
+ """
826
+ Enables checkpointing for a step.
827
+
828
+
829
+
830
+ Parameters
831
+ ----------
832
+ load_policy : str, default: "fresh"
833
+ The policy for loading the checkpoint. The following policies are supported:
834
+ - "eager": Loads the the latest available checkpoint within the namespace.
835
+ With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
836
+ will be loaded at the start of the task.
837
+ - "none": Do not load any checkpoint
838
+ - "fresh": Loads the lastest checkpoint created within the running Task.
839
+ This mode helps loading checkpoints across various retry attempts of the same task.
840
+ With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
841
+ created within the task will be loaded when the task is retries execution on failure.
842
+
843
+ temp_dir_root : str, default: None
844
+ The root directory under which `current.checkpoint.directory` will be created.
845
+ """
846
+ ...
847
+
848
+ @typing.overload
849
+ def checkpoint(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
850
+ ...
851
+
852
+ @typing.overload
853
+ def checkpoint(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
854
+ ...
855
+
856
+ def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load_policy: str = 'fresh', temp_dir_root: str = None):
857
+ """
858
+ Enables checkpointing for a step.
859
+
860
+
861
+
862
+ Parameters
863
+ ----------
864
+ load_policy : str, default: "fresh"
865
+ The policy for loading the checkpoint. The following policies are supported:
866
+ - "eager": Loads the the latest available checkpoint within the namespace.
867
+ With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
868
+ will be loaded at the start of the task.
869
+ - "none": Do not load any checkpoint
870
+ - "fresh": Loads the lastest checkpoint created within the running Task.
871
+ This mode helps loading checkpoints across various retry attempts of the same task.
872
+ With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
873
+ created within the task will be loaded when the task is retries execution on failure.
874
+
875
+ temp_dir_root : str, default: None
876
+ The root directory under which `current.checkpoint.directory` will be created.
877
+ """
878
+ ...
879
+
880
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
951
881
  """
952
- Specifies that this step is used to deploy an instance of the app.
953
- Requires that self.app_name, self.app_port, self.entrypoint and self.deployDir is set.
882
+ Specifies that this step should execute on Kubernetes.
954
883
 
955
884
 
956
885
  Parameters
957
886
  ----------
958
- app_port : int
959
- Number of GPUs to use.
960
- app_name : str
961
- Name of the app to deploy.
887
+ cpu : int, default 1
888
+ Number of CPUs required for this step. If `@resources` is
889
+ also present, the maximum value from all decorators is used.
890
+ memory : int, default 4096
891
+ Memory size (in MB) required for this step. If
892
+ `@resources` is also present, the maximum value from all decorators is
893
+ used.
894
+ disk : int, default 10240
895
+ Disk size (in MB) required for this step. If
896
+ `@resources` is also present, the maximum value from all decorators is
897
+ used.
898
+ image : str, optional, default None
899
+ Docker image to use when launching on Kubernetes. If not specified, and
900
+ METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
901
+ not, a default Docker image mapping to the current version of Python is used.
902
+ image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
903
+ If given, the imagePullPolicy to be applied to the Docker image of the step.
904
+ service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
905
+ Kubernetes service account to use when launching pod in Kubernetes.
906
+ secrets : List[str], optional, default None
907
+ Kubernetes secrets to use when launching pod in Kubernetes. These
908
+ secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
909
+ in Metaflow configuration.
910
+ node_selector: Union[Dict[str,str], str], optional, default None
911
+ Kubernetes node selector(s) to apply to the pod running the task.
912
+ Can be passed in as a comma separated string of values e.g.
913
+ 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
914
+ {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
915
+ namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
916
+ Kubernetes namespace to use when launching pod in Kubernetes.
917
+ gpu : int, optional, default None
918
+ Number of GPUs required for this step. A value of zero implies that
919
+ the scheduled node should not have GPUs.
920
+ gpu_vendor : str, default KUBERNETES_GPU_VENDOR
921
+ The vendor of the GPUs to be used for this step.
922
+ tolerations : List[str], default []
923
+ The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
924
+ Kubernetes tolerations to use when launching pod in Kubernetes.
925
+ labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
926
+ Kubernetes labels to use when launching pod in Kubernetes.
927
+ annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
928
+ Kubernetes annotations to use when launching pod in Kubernetes.
929
+ use_tmpfs : bool, default False
930
+ This enables an explicit tmpfs mount for this step.
931
+ tmpfs_tempdir : bool, default True
932
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
933
+ tmpfs_size : int, optional, default: None
934
+ The value for the size (in MiB) of the tmpfs mount for this step.
935
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
936
+ memory allocated for this step.
937
+ tmpfs_path : str, optional, default /metaflow_temp
938
+ Path to tmpfs mount for this step.
939
+ persistent_volume_claims : Dict[str, str], optional, default None
940
+ A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
941
+ volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
942
+ shared_memory: int, optional
943
+ Shared memory size (in MiB) required for this step
944
+ port: int, optional
945
+ Port number to specify in the Kubernetes job object
946
+ compute_pool : str, optional, default None
947
+ Compute pool to be used for for this step.
948
+ If not specified, any accessible compute pool within the perimeter is used.
949
+ hostname_resolution_timeout: int, default 10 * 60
950
+ Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
951
+ Only applicable when @parallel is used.
952
+ qos: str, default: Burstable
953
+ Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
954
+
955
+ security_context: Dict[str, Any], optional, default None
956
+ Container security context. Applies to the task container. Allows the following keys:
957
+ - privileged: bool, optional, default None
958
+ - allow_privilege_escalation: bool, optional, default None
959
+ - run_as_user: int, optional, default None
960
+ - run_as_group: int, optional, default None
961
+ - run_as_non_root: bool, optional, default None
962
962
  """
963
963
  ...
964
964
 
@@ -1055,54 +1055,160 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
1055
1055
  """
1056
1056
  ...
1057
1057
 
1058
- @typing.overload
1059
- def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1058
+ def with_artifact_store(f: typing.Optional[typing.Type[FlowSpecDerived]] = None):
1060
1059
  """
1061
- Specifies the Conda environment for all steps of the flow.
1060
+ Allows setting external datastores to save data for the
1061
+ `@checkpoint`/`@model`/`@huggingface_hub` decorators.
1062
1062
 
1063
- Use `@conda_base` to set common libraries required by all
1064
- steps and use `@conda` to specify step-specific additions.
1063
+ This decorator is useful when users wish to save data to a different datastore
1064
+ than what is configured in Metaflow. This can be for variety of reasons:
1065
1065
 
1066
+ 1. Data security: The objects needs to be stored in a bucket (object storage) that is not accessible by other flows.
1067
+ 2. Data Locality: The location where the task is executing is not located in the same region as the datastore.
1068
+ - Example: Metaflow datastore lives in US East, but the task is executing in Finland datacenters.
1069
+ 3. Data Lifecycle Policies: The objects need to be archived / managed separately from the Metaflow managed objects.
1070
+ - Example: Flow is training very large models that need to be stored separately and will be deleted more aggressively than the Metaflow managed objects.
1066
1071
 
1067
- Parameters
1072
+ Usage:
1068
1073
  ----------
1069
- packages : Dict[str, str], default {}
1070
- Packages to use for this flow. The key is the name of the package
1071
- and the value is the version to use.
1072
- libraries : Dict[str, str], default {}
1073
- Supported for backward compatibility. When used with packages, packages will take precedence.
1074
- python : str, optional, default None
1075
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1076
- that the version used will correspond to the version of the Python interpreter used to start the run.
1077
- disabled : bool, default False
1078
- If set to True, disables Conda.
1074
+
1075
+ - Using a custom IAM role to access the datastore.
1076
+
1077
+ ```python
1078
+ @with_artifact_store(
1079
+ type="s3",
1080
+ config=lambda: {
1081
+ "root": "s3://my-bucket-foo/path/to/root",
1082
+ "role_arn": ROLE,
1083
+ },
1084
+ )
1085
+ class MyFlow(FlowSpec):
1086
+
1087
+ @checkpoint
1088
+ @step
1089
+ def start(self):
1090
+ with open("my_file.txt", "w") as f:
1091
+ f.write("Hello, World!")
1092
+ self.external_bucket_checkpoint = current.checkpoint.save("my_file.txt")
1093
+ self.next(self.end)
1094
+
1095
+ ```
1096
+
1097
+ - Using credentials to access the s3-compatible datastore.
1098
+
1099
+ ```python
1100
+ @with_artifact_store(
1101
+ type="s3",
1102
+ config=lambda: {
1103
+ "root": "s3://my-bucket-foo/path/to/root",
1104
+ "client_params": {
1105
+ "aws_access_key_id": os.environ.get("MY_CUSTOM_ACCESS_KEY"),
1106
+ "aws_secret_access_key": os.environ.get("MY_CUSTOM_SECRET_KEY"),
1107
+ },
1108
+ },
1109
+ )
1110
+ class MyFlow(FlowSpec):
1111
+
1112
+ @checkpoint
1113
+ @step
1114
+ def start(self):
1115
+ with open("my_file.txt", "w") as f:
1116
+ f.write("Hello, World!")
1117
+ self.external_bucket_checkpoint = current.checkpoint.save("my_file.txt")
1118
+ self.next(self.end)
1119
+
1120
+ ```
1121
+
1122
+ - Accessing objects stored in external datastores after task execution.
1123
+
1124
+ ```python
1125
+ run = Run("CheckpointsTestsFlow/8992")
1126
+ with artifact_store_from(run=run, config={
1127
+ "client_params": {
1128
+ "aws_access_key_id": os.environ.get("MY_CUSTOM_ACCESS_KEY"),
1129
+ "aws_secret_access_key": os.environ.get("MY_CUSTOM_SECRET_KEY"),
1130
+ },
1131
+ }):
1132
+ with Checkpoint() as cp:
1133
+ latest = cp.list(
1134
+ task=run["start"].task
1135
+ )[0]
1136
+ print(latest)
1137
+ cp.load(
1138
+ latest,
1139
+ "test-checkpoints"
1140
+ )
1141
+
1142
+ task = Task("TorchTuneFlow/8484/train/53673")
1143
+ with artifact_store_from(run=run, config={
1144
+ "client_params": {
1145
+ "aws_access_key_id": os.environ.get("MY_CUSTOM_ACCESS_KEY"),
1146
+ "aws_secret_access_key": os.environ.get("MY_CUSTOM_SECRET_KEY"),
1147
+ },
1148
+ }):
1149
+ load_model(
1150
+ task.data.model_ref,
1151
+ "test-models"
1152
+ )
1153
+ ```
1154
+ Parameters:
1155
+ ----------
1156
+
1157
+ type: str
1158
+ The type of the datastore. Can be one of 's3', 'gcs', 'azure' or any other supported metaflow Datastore.
1159
+
1160
+ config: dict or Callable
1161
+ Dictionary of configuration options for the datastore. The following keys are required:
1162
+ - root: The root path in the datastore where the data will be saved. (needs to be in the format expected by the datastore)
1163
+ - example: 's3://bucket-name/path/to/root'
1164
+ - example: 'gs://bucket-name/path/to/root'
1165
+ - example: 'https://myblockacc.blob.core.windows.net/metaflow/'
1166
+ - role_arn (optional): AWS IAM role to access s3 bucket (only when `type` is 's3')
1167
+ - session_vars (optional): AWS session variables to access s3 bucket (only when `type` is 's3')
1168
+ - client_params (optional): AWS client parameters to access s3 bucket (only when `type` is 's3')
1079
1169
  """
1080
1170
  ...
1081
1171
 
1082
- @typing.overload
1083
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1084
- ...
1085
-
1086
- def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1172
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1087
1173
  """
1088
- Specifies the Conda environment for all steps of the flow.
1089
-
1090
- Use `@conda_base` to set common libraries required by all
1091
- steps and use `@conda` to specify step-specific additions.
1174
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1175
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1092
1176
 
1093
1177
 
1094
1178
  Parameters
1095
1179
  ----------
1096
- packages : Dict[str, str], default {}
1097
- Packages to use for this flow. The key is the name of the package
1098
- and the value is the version to use.
1099
- libraries : Dict[str, str], default {}
1100
- Supported for backward compatibility. When used with packages, packages will take precedence.
1101
- python : str, optional, default None
1102
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1103
- that the version used will correspond to the version of the Python interpreter used to start the run.
1104
- disabled : bool, default False
1105
- If set to True, disables Conda.
1180
+ timeout : int
1181
+ Time, in seconds before the task times out and fails. (Default: 3600)
1182
+ poke_interval : int
1183
+ Time in seconds that the job should wait in between each try. (Default: 60)
1184
+ mode : str
1185
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1186
+ exponential_backoff : bool
1187
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1188
+ pool : str
1189
+ the slot pool this task should run in,
1190
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1191
+ soft_fail : bool
1192
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1193
+ name : str
1194
+ Name of the sensor on Airflow
1195
+ description : str
1196
+ Description of sensor in the Airflow UI
1197
+ external_dag_id : str
1198
+ The dag_id that contains the task you want to wait for.
1199
+ external_task_ids : List[str]
1200
+ The list of task_ids that you want to wait for.
1201
+ If None (default value) the sensor waits for the DAG. (Default: None)
1202
+ allowed_states : List[str]
1203
+ Iterable of allowed states, (Default: ['success'])
1204
+ failed_states : List[str]
1205
+ Iterable of failed or dis-allowed states. (Default: None)
1206
+ execution_delta : datetime.timedelta
1207
+ time difference with the previous execution to look at,
1208
+ the default is the same logical date as the current task or DAG. (Default: None)
1209
+ check_existence: bool
1210
+ Set to True to check if the external task exists or check if
1211
+ the DAG to wait for exists. (Default: True)
1106
1212
  """
1107
1213
  ...
1108
1214
 
@@ -1147,41 +1253,6 @@ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packag
1147
1253
  """
1148
1254
  ...
1149
1255
 
1150
- def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1151
- """
1152
- Specifies what flows belong to the same project.
1153
-
1154
- A project-specific namespace is created for all flows that
1155
- use the same `@project(name)`.
1156
-
1157
-
1158
- Parameters
1159
- ----------
1160
- name : str
1161
- Project name. Make sure that the name is unique amongst all
1162
- projects that use the same production scheduler. The name may
1163
- contain only lowercase alphanumeric characters and underscores.
1164
-
1165
- branch : Optional[str], default None
1166
- The branch to use. If not specified, the branch is set to
1167
- `user.<username>` unless `production` is set to `True`. This can
1168
- also be set on the command line using `--branch` as a top-level option.
1169
- It is an error to specify `branch` in the decorator and on the command line.
1170
-
1171
- production : bool, default False
1172
- Whether or not the branch is the production branch. This can also be set on the
1173
- command line using `--production` as a top-level option. It is an error to specify
1174
- `production` in the decorator and on the command line.
1175
- The project branch name will be:
1176
- - if `branch` is specified:
1177
- - if `production` is True: `prod.<branch>`
1178
- - if `production` is False: `test.<branch>`
1179
- - if `branch` is not specified:
1180
- - if `production` is True: `prod`
1181
- - if `production` is False: `user.<username>`
1182
- """
1183
- ...
1184
-
1185
1256
  def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1186
1257
  """
1187
1258
  The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
@@ -1225,49 +1296,6 @@ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, expone
1225
1296
  """
1226
1297
  ...
1227
1298
 
1228
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1229
- """
1230
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1231
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1232
-
1233
-
1234
- Parameters
1235
- ----------
1236
- timeout : int
1237
- Time, in seconds before the task times out and fails. (Default: 3600)
1238
- poke_interval : int
1239
- Time in seconds that the job should wait in between each try. (Default: 60)
1240
- mode : str
1241
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1242
- exponential_backoff : bool
1243
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1244
- pool : str
1245
- the slot pool this task should run in,
1246
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1247
- soft_fail : bool
1248
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1249
- name : str
1250
- Name of the sensor on Airflow
1251
- description : str
1252
- Description of sensor in the Airflow UI
1253
- external_dag_id : str
1254
- The dag_id that contains the task you want to wait for.
1255
- external_task_ids : List[str]
1256
- The list of task_ids that you want to wait for.
1257
- If None (default value) the sensor waits for the DAG. (Default: None)
1258
- allowed_states : List[str]
1259
- Iterable of allowed states, (Default: ['success'])
1260
- failed_states : List[str]
1261
- Iterable of failed or dis-allowed states. (Default: None)
1262
- execution_delta : datetime.timedelta
1263
- time difference with the previous execution to look at,
1264
- the default is the same logical date as the current task or DAG. (Default: None)
1265
- check_existence: bool
1266
- Set to True to check if the external task exists or check if
1267
- the DAG to wait for exists. (Default: True)
1268
- """
1269
- ...
1270
-
1271
1299
  @typing.overload
1272
1300
  def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1273
1301
  """
@@ -1420,117 +1448,89 @@ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly:
1420
1448
  """
1421
1449
  ...
1422
1450
 
1423
- def with_artifact_store(f: typing.Optional[typing.Type[FlowSpecDerived]] = None):
1451
+ def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1424
1452
  """
1425
- Allows setting external datastores to save data for the
1426
- `@checkpoint`/`@model`/`@huggingface_hub` decorators.
1453
+ Specifies what flows belong to the same project.
1427
1454
 
1428
- This decorator is useful when users wish to save data to a different datastore
1429
- than what is configured in Metaflow. This can be for variety of reasons:
1455
+ A project-specific namespace is created for all flows that
1456
+ use the same `@project(name)`.
1430
1457
 
1431
- 1. Data security: The objects needs to be stored in a bucket (object storage) that is not accessible by other flows.
1432
- 2. Data Locality: The location where the task is executing is not located in the same region as the datastore.
1433
- - Example: Metaflow datastore lives in US East, but the task is executing in Finland datacenters.
1434
- 3. Data Lifecycle Policies: The objects need to be archived / managed separately from the Metaflow managed objects.
1435
- - Example: Flow is training very large models that need to be stored separately and will be deleted more aggressively than the Metaflow managed objects.
1436
1458
 
1437
- Usage:
1459
+ Parameters
1438
1460
  ----------
1461
+ name : str
1462
+ Project name. Make sure that the name is unique amongst all
1463
+ projects that use the same production scheduler. The name may
1464
+ contain only lowercase alphanumeric characters and underscores.
1439
1465
 
1440
- - Using a custom IAM role to access the datastore.
1441
-
1442
- ```python
1443
- @with_artifact_store(
1444
- type="s3",
1445
- config=lambda: {
1446
- "root": "s3://my-bucket-foo/path/to/root",
1447
- "role_arn": ROLE,
1448
- },
1449
- )
1450
- class MyFlow(FlowSpec):
1451
-
1452
- @checkpoint
1453
- @step
1454
- def start(self):
1455
- with open("my_file.txt", "w") as f:
1456
- f.write("Hello, World!")
1457
- self.external_bucket_checkpoint = current.checkpoint.save("my_file.txt")
1458
- self.next(self.end)
1459
-
1460
- ```
1466
+ branch : Optional[str], default None
1467
+ The branch to use. If not specified, the branch is set to
1468
+ `user.<username>` unless `production` is set to `True`. This can
1469
+ also be set on the command line using `--branch` as a top-level option.
1470
+ It is an error to specify `branch` in the decorator and on the command line.
1461
1471
 
1462
- - Using credentials to access the s3-compatible datastore.
1472
+ production : bool, default False
1473
+ Whether or not the branch is the production branch. This can also be set on the
1474
+ command line using `--production` as a top-level option. It is an error to specify
1475
+ `production` in the decorator and on the command line.
1476
+ The project branch name will be:
1477
+ - if `branch` is specified:
1478
+ - if `production` is True: `prod.<branch>`
1479
+ - if `production` is False: `test.<branch>`
1480
+ - if `branch` is not specified:
1481
+ - if `production` is True: `prod`
1482
+ - if `production` is False: `user.<username>`
1483
+ """
1484
+ ...
1485
+
1486
+ @typing.overload
1487
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1488
+ """
1489
+ Specifies the Conda environment for all steps of the flow.
1463
1490
 
1464
- ```python
1465
- @with_artifact_store(
1466
- type="s3",
1467
- config=lambda: {
1468
- "root": "s3://my-bucket-foo/path/to/root",
1469
- "client_params": {
1470
- "aws_access_key_id": os.environ.get("MY_CUSTOM_ACCESS_KEY"),
1471
- "aws_secret_access_key": os.environ.get("MY_CUSTOM_SECRET_KEY"),
1472
- },
1473
- },
1474
- )
1475
- class MyFlow(FlowSpec):
1491
+ Use `@conda_base` to set common libraries required by all
1492
+ steps and use `@conda` to specify step-specific additions.
1476
1493
 
1477
- @checkpoint
1478
- @step
1479
- def start(self):
1480
- with open("my_file.txt", "w") as f:
1481
- f.write("Hello, World!")
1482
- self.external_bucket_checkpoint = current.checkpoint.save("my_file.txt")
1483
- self.next(self.end)
1484
1494
 
1485
- ```
1495
+ Parameters
1496
+ ----------
1497
+ packages : Dict[str, str], default {}
1498
+ Packages to use for this flow. The key is the name of the package
1499
+ and the value is the version to use.
1500
+ libraries : Dict[str, str], default {}
1501
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1502
+ python : str, optional, default None
1503
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1504
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1505
+ disabled : bool, default False
1506
+ If set to True, disables Conda.
1507
+ """
1508
+ ...
1509
+
1510
+ @typing.overload
1511
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1512
+ ...
1513
+
1514
+ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1515
+ """
1516
+ Specifies the Conda environment for all steps of the flow.
1486
1517
 
1487
- - Accessing objects stored in external datastores after task execution.
1518
+ Use `@conda_base` to set common libraries required by all
1519
+ steps and use `@conda` to specify step-specific additions.
1488
1520
 
1489
- ```python
1490
- run = Run("CheckpointsTestsFlow/8992")
1491
- with artifact_store_from(run=run, config={
1492
- "client_params": {
1493
- "aws_access_key_id": os.environ.get("MY_CUSTOM_ACCESS_KEY"),
1494
- "aws_secret_access_key": os.environ.get("MY_CUSTOM_SECRET_KEY"),
1495
- },
1496
- }):
1497
- with Checkpoint() as cp:
1498
- latest = cp.list(
1499
- task=run["start"].task
1500
- )[0]
1501
- print(latest)
1502
- cp.load(
1503
- latest,
1504
- "test-checkpoints"
1505
- )
1506
1521
 
1507
- task = Task("TorchTuneFlow/8484/train/53673")
1508
- with artifact_store_from(run=run, config={
1509
- "client_params": {
1510
- "aws_access_key_id": os.environ.get("MY_CUSTOM_ACCESS_KEY"),
1511
- "aws_secret_access_key": os.environ.get("MY_CUSTOM_SECRET_KEY"),
1512
- },
1513
- }):
1514
- load_model(
1515
- task.data.model_ref,
1516
- "test-models"
1517
- )
1518
- ```
1519
- Parameters:
1522
+ Parameters
1520
1523
  ----------
1521
-
1522
- type: str
1523
- The type of the datastore. Can be one of 's3', 'gcs', 'azure' or any other supported metaflow Datastore.
1524
-
1525
- config: dict or Callable
1526
- Dictionary of configuration options for the datastore. The following keys are required:
1527
- - root: The root path in the datastore where the data will be saved. (needs to be in the format expected by the datastore)
1528
- - example: 's3://bucket-name/path/to/root'
1529
- - example: 'gs://bucket-name/path/to/root'
1530
- - example: 'https://myblockacc.blob.core.windows.net/metaflow/'
1531
- - role_arn (optional): AWS IAM role to access s3 bucket (only when `type` is 's3')
1532
- - session_vars (optional): AWS session variables to access s3 bucket (only when `type` is 's3')
1533
- - client_params (optional): AWS client parameters to access s3 bucket (only when `type` is 's3')
1524
+ packages : Dict[str, str], default {}
1525
+ Packages to use for this flow. The key is the name of the package
1526
+ and the value is the version to use.
1527
+ libraries : Dict[str, str], default {}
1528
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1529
+ python : str, optional, default None
1530
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1531
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1532
+ disabled : bool, default False
1533
+ If set to True, disables Conda.
1534
1534
  """
1535
1535
  ...
1536
1536