ob-metaflow-stubs 6.0.3.176rc3__py2.py3-none-any.whl → 6.0.3.176rc4__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (218) hide show
  1. metaflow-stubs/__init__.pyi +703 -703
  2. metaflow-stubs/cards.pyi +1 -1
  3. metaflow-stubs/cli.pyi +1 -1
  4. metaflow-stubs/cli_components/__init__.pyi +1 -1
  5. metaflow-stubs/cli_components/utils.pyi +1 -1
  6. metaflow-stubs/client/__init__.pyi +1 -1
  7. metaflow-stubs/client/core.pyi +4 -4
  8. metaflow-stubs/client/filecache.pyi +2 -2
  9. metaflow-stubs/events.pyi +1 -1
  10. metaflow-stubs/exception.pyi +1 -1
  11. metaflow-stubs/flowspec.pyi +5 -5
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +3 -3
  14. metaflow-stubs/info_file.pyi +1 -1
  15. metaflow-stubs/metadata_provider/__init__.pyi +1 -1
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +1 -1
  17. metaflow-stubs/metadata_provider/metadata.pyi +2 -2
  18. metaflow-stubs/metadata_provider/util.pyi +1 -1
  19. metaflow-stubs/metaflow_config.pyi +1 -1
  20. metaflow-stubs/metaflow_current.pyi +134 -134
  21. metaflow-stubs/metaflow_git.pyi +1 -1
  22. metaflow-stubs/mf_extensions/__init__.pyi +1 -1
  23. metaflow-stubs/mf_extensions/obcheckpoint/__init__.pyi +1 -1
  24. metaflow-stubs/mf_extensions/obcheckpoint/plugins/__init__.pyi +1 -1
  25. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/__init__.pyi +1 -1
  26. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/__init__.pyi +1 -1
  27. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/async_cards.pyi +2 -2
  28. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/deco_injection_mixin.pyi +1 -1
  29. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/extra_components.pyi +2 -2
  30. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/__init__.pyi +1 -1
  31. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/__init__.pyi +1 -1
  32. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/checkpoint_lister.pyi +4 -4
  33. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/lineage_card.pyi +1 -1
  34. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/checkpoint_storage.pyi +3 -3
  35. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/constructors.pyi +1 -1
  36. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/core.pyi +3 -3
  37. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/decorator.pyi +3 -3
  38. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/exceptions.pyi +1 -1
  39. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/final_api.pyi +2 -2
  40. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/lineage.pyi +1 -1
  41. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/__init__.pyi +1 -1
  42. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/context.pyi +2 -2
  43. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/core.pyi +1 -1
  44. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/decorator.pyi +1 -1
  45. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/exceptions.pyi +1 -1
  46. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/task_utils.pyi +2 -2
  47. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/utils.pyi +1 -1
  48. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastructures.pyi +2 -2
  49. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/exceptions.pyi +1 -1
  50. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/__init__.pyi +1 -1
  51. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/decorator.pyi +2 -2
  52. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/__init__.pyi +1 -1
  53. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/core.pyi +3 -3
  54. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/exceptions.pyi +1 -1
  55. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/model_storage.pyi +3 -3
  56. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/__init__.pyi +1 -1
  57. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/flowspec_utils.pyi +1 -1
  58. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/general.pyi +1 -1
  59. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/identity_utils.pyi +1 -1
  60. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/__init__.pyi +1 -1
  61. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/base.pyi +1 -1
  62. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/tar.pyi +2 -2
  63. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/tar_utils.pyi +2 -2
  64. metaflow-stubs/mf_extensions/outerbounds/__init__.pyi +1 -1
  65. metaflow-stubs/mf_extensions/outerbounds/plugins/__init__.pyi +1 -1
  66. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/__init__.pyi +1 -1
  67. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/async_cards.pyi +1 -1
  68. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/injector.pyi +1 -1
  69. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/__init__.pyi +1 -1
  70. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/coreweave.pyi +1 -1
  71. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/nebius.pyi +1 -1
  72. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/__init__.pyi +1 -1
  73. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/baker.pyi +3 -3
  74. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/docker_environment.pyi +1 -1
  75. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/fast_bakery.pyi +1 -1
  76. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/__init__.pyi +1 -1
  77. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/pod_killer.pyi +1 -1
  78. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/__init__.pyi +1 -1
  79. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/constants.pyi +1 -1
  80. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/exceptions.pyi +1 -1
  81. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/ollama.pyi +1 -1
  82. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/status_card.pyi +1 -1
  83. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/__init__.pyi +1 -1
  84. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/snowflake.pyi +1 -1
  85. metaflow-stubs/mf_extensions/outerbounds/profilers/__init__.pyi +1 -1
  86. metaflow-stubs/mf_extensions/outerbounds/profilers/gpu.pyi +1 -1
  87. metaflow-stubs/mf_extensions/outerbounds/remote_config.pyi +1 -1
  88. metaflow-stubs/mf_extensions/outerbounds/toplevel/__init__.pyi +1 -1
  89. metaflow-stubs/mf_extensions/outerbounds/toplevel/global_aliases_for_metaflow_package.pyi +1 -1
  90. metaflow-stubs/multicore_utils.pyi +1 -1
  91. metaflow-stubs/ob_internal.pyi +1 -1
  92. metaflow-stubs/parameters.pyi +3 -3
  93. metaflow-stubs/plugins/__init__.pyi +10 -10
  94. metaflow-stubs/plugins/airflow/__init__.pyi +1 -1
  95. metaflow-stubs/plugins/airflow/airflow_utils.pyi +1 -1
  96. metaflow-stubs/plugins/airflow/exception.pyi +1 -1
  97. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +1 -1
  98. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +1 -1
  99. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +1 -1
  100. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +1 -1
  101. metaflow-stubs/plugins/argo/__init__.pyi +1 -1
  102. metaflow-stubs/plugins/argo/argo_client.pyi +1 -1
  103. metaflow-stubs/plugins/argo/argo_events.pyi +1 -1
  104. metaflow-stubs/plugins/argo/argo_workflows.pyi +3 -3
  105. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +2 -2
  106. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +4 -4
  107. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +3 -3
  108. metaflow-stubs/plugins/aws/__init__.pyi +1 -1
  109. metaflow-stubs/plugins/aws/aws_client.pyi +1 -1
  110. metaflow-stubs/plugins/aws/aws_utils.pyi +1 -1
  111. metaflow-stubs/plugins/aws/batch/__init__.pyi +1 -1
  112. metaflow-stubs/plugins/aws/batch/batch.pyi +1 -1
  113. metaflow-stubs/plugins/aws/batch/batch_client.pyi +1 -1
  114. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +1 -1
  115. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +1 -1
  116. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +2 -2
  117. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +1 -1
  118. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +1 -1
  119. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +1 -1
  120. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +1 -1
  121. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +1 -1
  122. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +4 -4
  123. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +3 -3
  124. metaflow-stubs/plugins/azure/__init__.pyi +1 -1
  125. metaflow-stubs/plugins/azure/azure_credential.pyi +1 -1
  126. metaflow-stubs/plugins/azure/azure_exceptions.pyi +1 -1
  127. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +2 -2
  128. metaflow-stubs/plugins/azure/azure_utils.pyi +1 -1
  129. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +1 -1
  130. metaflow-stubs/plugins/azure/includefile_support.pyi +1 -1
  131. metaflow-stubs/plugins/cards/__init__.pyi +1 -1
  132. metaflow-stubs/plugins/cards/card_client.pyi +2 -2
  133. metaflow-stubs/plugins/cards/card_creator.pyi +1 -1
  134. metaflow-stubs/plugins/cards/card_datastore.pyi +1 -1
  135. metaflow-stubs/plugins/cards/card_decorator.pyi +1 -1
  136. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +1 -1
  137. metaflow-stubs/plugins/cards/card_modules/basic.pyi +2 -2
  138. metaflow-stubs/plugins/cards/card_modules/card.pyi +1 -1
  139. metaflow-stubs/plugins/cards/card_modules/components.pyi +4 -4
  140. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +1 -1
  141. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +1 -1
  142. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +1 -1
  143. metaflow-stubs/plugins/cards/card_resolver.pyi +1 -1
  144. metaflow-stubs/plugins/cards/component_serializer.pyi +1 -1
  145. metaflow-stubs/plugins/cards/exception.pyi +1 -1
  146. metaflow-stubs/plugins/catch_decorator.pyi +2 -2
  147. metaflow-stubs/plugins/datatools/__init__.pyi +1 -1
  148. metaflow-stubs/plugins/datatools/local.pyi +1 -1
  149. metaflow-stubs/plugins/datatools/s3/__init__.pyi +1 -1
  150. metaflow-stubs/plugins/datatools/s3/s3.pyi +3 -3
  151. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +1 -1
  152. metaflow-stubs/plugins/datatools/s3/s3util.pyi +1 -1
  153. metaflow-stubs/plugins/debug_logger.pyi +1 -1
  154. metaflow-stubs/plugins/debug_monitor.pyi +1 -1
  155. metaflow-stubs/plugins/environment_decorator.pyi +1 -1
  156. metaflow-stubs/plugins/events_decorator.pyi +1 -1
  157. metaflow-stubs/plugins/frameworks/__init__.pyi +1 -1
  158. metaflow-stubs/plugins/frameworks/pytorch.pyi +1 -1
  159. metaflow-stubs/plugins/gcp/__init__.pyi +1 -1
  160. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +2 -2
  161. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +1 -1
  162. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +1 -1
  163. metaflow-stubs/plugins/gcp/gs_utils.pyi +1 -1
  164. metaflow-stubs/plugins/gcp/includefile_support.pyi +1 -1
  165. metaflow-stubs/plugins/kubernetes/__init__.pyi +1 -1
  166. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +2 -2
  167. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +1 -1
  168. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +1 -1
  169. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +1 -1
  170. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +1 -1
  171. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +1 -1
  172. metaflow-stubs/plugins/ollama/__init__.pyi +2 -2
  173. metaflow-stubs/plugins/parallel_decorator.pyi +1 -1
  174. metaflow-stubs/plugins/perimeters.pyi +1 -1
  175. metaflow-stubs/plugins/project_decorator.pyi +1 -1
  176. metaflow-stubs/plugins/pypi/__init__.pyi +1 -1
  177. metaflow-stubs/plugins/pypi/conda_decorator.pyi +1 -1
  178. metaflow-stubs/plugins/pypi/conda_environment.pyi +2 -2
  179. metaflow-stubs/plugins/pypi/parsers.pyi +1 -1
  180. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +1 -1
  181. metaflow-stubs/plugins/pypi/pypi_environment.pyi +1 -1
  182. metaflow-stubs/plugins/pypi/utils.pyi +1 -1
  183. metaflow-stubs/plugins/resources_decorator.pyi +1 -1
  184. metaflow-stubs/plugins/retry_decorator.pyi +1 -1
  185. metaflow-stubs/plugins/secrets/__init__.pyi +1 -1
  186. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +2 -2
  187. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +1 -1
  188. metaflow-stubs/plugins/snowflake/__init__.pyi +1 -1
  189. metaflow-stubs/plugins/storage_executor.pyi +1 -1
  190. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +1 -1
  191. metaflow-stubs/plugins/timeout_decorator.pyi +2 -2
  192. metaflow-stubs/plugins/torchtune/__init__.pyi +1 -1
  193. metaflow-stubs/plugins/uv/__init__.pyi +1 -1
  194. metaflow-stubs/plugins/uv/uv_environment.pyi +1 -1
  195. metaflow-stubs/profilers/__init__.pyi +1 -1
  196. metaflow-stubs/pylint_wrapper.pyi +1 -1
  197. metaflow-stubs/runner/__init__.pyi +1 -1
  198. metaflow-stubs/runner/deployer.pyi +6 -6
  199. metaflow-stubs/runner/deployer_impl.pyi +2 -2
  200. metaflow-stubs/runner/metaflow_runner.pyi +2 -2
  201. metaflow-stubs/runner/nbdeploy.pyi +1 -1
  202. metaflow-stubs/runner/nbrun.pyi +1 -1
  203. metaflow-stubs/runner/subprocess_manager.pyi +1 -1
  204. metaflow-stubs/runner/utils.pyi +1 -1
  205. metaflow-stubs/system/__init__.pyi +1 -1
  206. metaflow-stubs/system/system_logger.pyi +1 -1
  207. metaflow-stubs/system/system_monitor.pyi +1 -1
  208. metaflow-stubs/tagging_util.pyi +1 -1
  209. metaflow-stubs/tuple_util.pyi +1 -1
  210. metaflow-stubs/user_configs/__init__.pyi +1 -1
  211. metaflow-stubs/user_configs/config_decorators.pyi +5 -5
  212. metaflow-stubs/user_configs/config_options.pyi +3 -3
  213. metaflow-stubs/user_configs/config_parameters.pyi +6 -6
  214. {ob_metaflow_stubs-6.0.3.176rc3.dist-info → ob_metaflow_stubs-6.0.3.176rc4.dist-info}/METADATA +1 -1
  215. ob_metaflow_stubs-6.0.3.176rc4.dist-info/RECORD +218 -0
  216. ob_metaflow_stubs-6.0.3.176rc3.dist-info/RECORD +0 -218
  217. {ob_metaflow_stubs-6.0.3.176rc3.dist-info → ob_metaflow_stubs-6.0.3.176rc4.dist-info}/WHEEL +0 -0
  218. {ob_metaflow_stubs-6.0.3.176rc3.dist-info → ob_metaflow_stubs-6.0.3.176rc4.dist-info}/top_level.txt +0 -0
@@ -1,7 +1,7 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
3
  # MF version: 2.15.14.1+obcheckpoint(0.2.1);ob(v1) #
4
- # Generated on 2025-06-04T06:35:02.879434 #
4
+ # Generated on 2025-06-04T19:55:23.864727 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
@@ -36,17 +36,17 @@ from .user_configs.config_parameters import config_expr as config_expr
36
36
  from .user_configs.config_decorators import CustomFlowDecorator as CustomFlowDecorator
37
37
  from .user_configs.config_decorators import CustomStepDecorator as CustomStepDecorator
38
38
  from . import cards as cards
39
- from . import tuple_util as tuple_util
40
- from . import metaflow_git as metaflow_git
41
39
  from . import events as events
40
+ from . import metaflow_git as metaflow_git
41
+ from . import tuple_util as tuple_util
42
42
  from . import runner as runner
43
43
  from . import plugins as plugins
44
44
  from .mf_extensions.outerbounds.toplevel.global_aliases_for_metaflow_package import S3 as S3
45
45
  from . import includefile as includefile
46
46
  from .includefile import IncludeFile as IncludeFile
47
47
  from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
48
- from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
49
48
  from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
49
+ from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
50
50
  from . import client as client
51
51
  from .client.core import namespace as namespace
52
52
  from .client.core import get_namespace as get_namespace
@@ -155,180 +155,129 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
155
155
  ...
156
156
 
157
157
  @typing.overload
158
- def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
158
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
159
159
  """
160
- Specifies the resources needed when executing this step.
160
+ Specifies the number of times the task corresponding
161
+ to a step needs to be retried.
161
162
 
162
- Use `@resources` to specify the resource requirements
163
- independently of the specific compute layer (`@batch`, `@kubernetes`).
163
+ This decorator is useful for handling transient errors, such as networking issues.
164
+ If your task contains operations that can't be retried safely, e.g. database updates,
165
+ it is advisable to annotate it with `@retry(times=0)`.
164
166
 
165
- You can choose the compute layer on the command line by executing e.g.
166
- ```
167
- python myflow.py run --with batch
168
- ```
169
- or
170
- ```
171
- python myflow.py run --with kubernetes
172
- ```
173
- which executes the flow on the desired system using the
174
- requirements specified in `@resources`.
167
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
168
+ decorator will execute a no-op task after all retries have been exhausted,
169
+ ensuring that the flow execution can continue.
175
170
 
176
171
 
177
172
  Parameters
178
173
  ----------
179
- cpu : int, default 1
180
- Number of CPUs required for this step.
181
- gpu : int, optional, default None
182
- Number of GPUs required for this step.
183
- disk : int, optional, default None
184
- Disk size (in MB) required for this step. Only applies on Kubernetes.
185
- memory : int, default 4096
186
- Memory size (in MB) required for this step.
187
- shared_memory : int, optional, default None
188
- The value for the size (in MiB) of the /dev/shm volume for this step.
189
- This parameter maps to the `--shm-size` option in Docker.
174
+ times : int, default 3
175
+ Number of times to retry this task.
176
+ minutes_between_retries : int, default 2
177
+ Number of minutes between retries.
190
178
  """
191
179
  ...
192
180
 
193
181
  @typing.overload
194
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
182
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
195
183
  ...
196
184
 
197
185
  @typing.overload
198
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
186
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
199
187
  ...
200
188
 
201
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
189
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
202
190
  """
203
- Specifies the resources needed when executing this step.
191
+ Specifies the number of times the task corresponding
192
+ to a step needs to be retried.
204
193
 
205
- Use `@resources` to specify the resource requirements
206
- independently of the specific compute layer (`@batch`, `@kubernetes`).
194
+ This decorator is useful for handling transient errors, such as networking issues.
195
+ If your task contains operations that can't be retried safely, e.g. database updates,
196
+ it is advisable to annotate it with `@retry(times=0)`.
207
197
 
208
- You can choose the compute layer on the command line by executing e.g.
209
- ```
210
- python myflow.py run --with batch
211
- ```
212
- or
213
- ```
214
- python myflow.py run --with kubernetes
215
- ```
216
- which executes the flow on the desired system using the
217
- requirements specified in `@resources`.
198
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
199
+ decorator will execute a no-op task after all retries have been exhausted,
200
+ ensuring that the flow execution can continue.
218
201
 
219
202
 
220
203
  Parameters
221
204
  ----------
222
- cpu : int, default 1
223
- Number of CPUs required for this step.
224
- gpu : int, optional, default None
225
- Number of GPUs required for this step.
226
- disk : int, optional, default None
227
- Disk size (in MB) required for this step. Only applies on Kubernetes.
228
- memory : int, default 4096
229
- Memory size (in MB) required for this step.
230
- shared_memory : int, optional, default None
231
- The value for the size (in MiB) of the /dev/shm volume for this step.
232
- This parameter maps to the `--shm-size` option in Docker.
205
+ times : int, default 3
206
+ Number of times to retry this task.
207
+ minutes_between_retries : int, default 2
208
+ Number of minutes between retries.
233
209
  """
234
210
  ...
235
211
 
236
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
212
+ def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
237
213
  """
238
- Specifies that this step should execute on Kubernetes.
214
+ Specifies that this step should execute on DGX cloud.
239
215
 
240
216
 
241
217
  Parameters
242
218
  ----------
243
- cpu : int, default 1
244
- Number of CPUs required for this step. If `@resources` is
245
- also present, the maximum value from all decorators is used.
246
- memory : int, default 4096
247
- Memory size (in MB) required for this step. If
248
- `@resources` is also present, the maximum value from all decorators is
249
- used.
250
- disk : int, default 10240
251
- Disk size (in MB) required for this step. If
252
- `@resources` is also present, the maximum value from all decorators is
253
- used.
254
- image : str, optional, default None
255
- Docker image to use when launching on Kubernetes. If not specified, and
256
- METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
257
- not, a default Docker image mapping to the current version of Python is used.
258
- image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
259
- If given, the imagePullPolicy to be applied to the Docker image of the step.
260
- service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
261
- Kubernetes service account to use when launching pod in Kubernetes.
262
- secrets : List[str], optional, default None
263
- Kubernetes secrets to use when launching pod in Kubernetes. These
264
- secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
265
- in Metaflow configuration.
266
- node_selector: Union[Dict[str,str], str], optional, default None
267
- Kubernetes node selector(s) to apply to the pod running the task.
268
- Can be passed in as a comma separated string of values e.g.
269
- 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
270
- {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
271
- namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
272
- Kubernetes namespace to use when launching pod in Kubernetes.
273
- gpu : int, optional, default None
274
- Number of GPUs required for this step. A value of zero implies that
275
- the scheduled node should not have GPUs.
276
- gpu_vendor : str, default KUBERNETES_GPU_VENDOR
277
- The vendor of the GPUs to be used for this step.
278
- tolerations : List[str], default []
279
- The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
280
- Kubernetes tolerations to use when launching pod in Kubernetes.
281
- labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
282
- Kubernetes labels to use when launching pod in Kubernetes.
283
- annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
284
- Kubernetes annotations to use when launching pod in Kubernetes.
285
- use_tmpfs : bool, default False
286
- This enables an explicit tmpfs mount for this step.
287
- tmpfs_tempdir : bool, default True
288
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
289
- tmpfs_size : int, optional, default: None
290
- The value for the size (in MiB) of the tmpfs mount for this step.
291
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
292
- memory allocated for this step.
293
- tmpfs_path : str, optional, default /metaflow_temp
294
- Path to tmpfs mount for this step.
295
- persistent_volume_claims : Dict[str, str], optional, default None
296
- A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
297
- volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
298
- shared_memory: int, optional
299
- Shared memory size (in MiB) required for this step
300
- port: int, optional
301
- Port number to specify in the Kubernetes job object
302
- compute_pool : str, optional, default None
303
- Compute pool to be used for for this step.
304
- If not specified, any accessible compute pool within the perimeter is used.
305
- hostname_resolution_timeout: int, default 10 * 60
306
- Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
307
- Only applicable when @parallel is used.
308
- qos: str, default: Burstable
309
- Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
219
+ gpu : int
220
+ Number of GPUs to use.
221
+ gpu_type : str
222
+ Type of Nvidia GPU to use.
223
+ """
224
+ ...
225
+
226
+ def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy: str, force_cache_update: bool, debug: bool, circuit_breaker_config: dict, timeout_config: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
227
+ """
228
+ This decorator is used to run Ollama APIs as Metaflow task sidecars.
310
229
 
311
- security_context: Dict[str, Any], optional, default None
312
- Container security context. Applies to the task container. Allows the following keys:
313
- - privileged: bool, optional, default None
314
- - allow_privilege_escalation: bool, optional, default None
315
- - run_as_user: int, optional, default None
316
- - run_as_group: int, optional, default None
317
- - run_as_non_root: bool, optional, default None
230
+ User code call
231
+ --------------
232
+ @ollama(
233
+ models=[...],
234
+ ...
235
+ )
236
+
237
+ Valid backend options
238
+ ---------------------
239
+ - 'local': Run as a separate process on the local task machine.
240
+ - (TODO) 'managed': Outerbounds hosts and selects compute provider.
241
+ - (TODO) 'remote': Spin up separate instance to serve Ollama models.
242
+
243
+ Valid model options
244
+ -------------------
245
+ Any model here https://ollama.com/search, e.g. 'llama3.2', 'llama3.3'
246
+
247
+
248
+ Parameters
249
+ ----------
250
+ models: list[str]
251
+ List of Ollama containers running models in sidecars.
252
+ backend: str
253
+ Determines where and how to run the Ollama process.
254
+ force_pull: bool
255
+ Whether to run `ollama pull` no matter what, or first check the remote cache in Metaflow datastore for this model key.
256
+ cache_update_policy: str
257
+ Cache update policy: "auto", "force", or "never".
258
+ force_cache_update: bool
259
+ Simple override for "force" cache update policy.
260
+ debug: bool
261
+ Whether to turn on verbose debugging logs.
262
+ circuit_breaker_config: dict
263
+ Configuration for circuit breaker protection. Keys: failure_threshold, recovery_timeout, reset_timeout.
264
+ timeout_config: dict
265
+ Configuration for various operation timeouts. Keys: pull, stop, health_check, install, server_startup.
318
266
  """
319
267
  ...
320
268
 
321
- def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
269
+ def app_deploy(*, app_port: int, app_name: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
322
270
  """
323
- Specifies that this step should execute on DGX cloud.
271
+ Specifies that this step is used to deploy an instance of the app.
272
+ Requires that self.app_name, self.app_port, self.entrypoint and self.deployDir is set.
324
273
 
325
274
 
326
275
  Parameters
327
276
  ----------
328
- gpu : int
277
+ app_port : int
329
278
  Number of GPUs to use.
330
- gpu_type : str
331
- Type of Nvidia GPU to use.
279
+ app_name : str
280
+ Name of the app to deploy.
332
281
  """
333
282
  ...
334
283
 
@@ -368,365 +317,485 @@ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
368
317
  ...
369
318
 
370
319
  @typing.overload
371
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
320
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
372
321
  """
373
- Specifies environment variables to be set prior to the execution of a step.
322
+ Specifies the PyPI packages for the step.
323
+
324
+ Information in this decorator will augment any
325
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
326
+ you can use `@pypi_base` to set packages required by all
327
+ steps and use `@pypi` to specify step-specific overrides.
374
328
 
375
329
 
376
330
  Parameters
377
331
  ----------
378
- vars : Dict[str, str], default {}
379
- Dictionary of environment variables to set.
332
+ packages : Dict[str, str], default: {}
333
+ Packages to use for this step. The key is the name of the package
334
+ and the value is the version to use.
335
+ python : str, optional, default: None
336
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
337
+ that the version used will correspond to the version of the Python interpreter used to start the run.
380
338
  """
381
339
  ...
382
340
 
383
341
  @typing.overload
384
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
342
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
385
343
  ...
386
344
 
387
345
  @typing.overload
388
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
346
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
389
347
  ...
390
348
 
391
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
349
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
392
350
  """
393
- Specifies environment variables to be set prior to the execution of a step.
351
+ Specifies the PyPI packages for the step.
352
+
353
+ Information in this decorator will augment any
354
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
355
+ you can use `@pypi_base` to set packages required by all
356
+ steps and use `@pypi` to specify step-specific overrides.
394
357
 
395
358
 
396
359
  Parameters
397
360
  ----------
398
- vars : Dict[str, str], default {}
399
- Dictionary of environment variables to set.
400
- """
401
- ...
402
-
361
+ packages : Dict[str, str], default: {}
362
+ Packages to use for this step. The key is the name of the package
363
+ and the value is the version to use.
364
+ python : str, optional, default: None
365
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
366
+ that the version used will correspond to the version of the Python interpreter used to start the run.
367
+ """
368
+ ...
369
+
403
370
  @typing.overload
404
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
371
+ def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
405
372
  """
406
- Specifies the number of times the task corresponding
407
- to a step needs to be retried.
373
+ Specifies the resources needed when executing this step.
408
374
 
409
- This decorator is useful for handling transient errors, such as networking issues.
410
- If your task contains operations that can't be retried safely, e.g. database updates,
411
- it is advisable to annotate it with `@retry(times=0)`.
375
+ Use `@resources` to specify the resource requirements
376
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
412
377
 
413
- This can be used in conjunction with the `@catch` decorator. The `@catch`
414
- decorator will execute a no-op task after all retries have been exhausted,
415
- ensuring that the flow execution can continue.
378
+ You can choose the compute layer on the command line by executing e.g.
379
+ ```
380
+ python myflow.py run --with batch
381
+ ```
382
+ or
383
+ ```
384
+ python myflow.py run --with kubernetes
385
+ ```
386
+ which executes the flow on the desired system using the
387
+ requirements specified in `@resources`.
416
388
 
417
389
 
418
390
  Parameters
419
391
  ----------
420
- times : int, default 3
421
- Number of times to retry this task.
422
- minutes_between_retries : int, default 2
423
- Number of minutes between retries.
392
+ cpu : int, default 1
393
+ Number of CPUs required for this step.
394
+ gpu : int, optional, default None
395
+ Number of GPUs required for this step.
396
+ disk : int, optional, default None
397
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
398
+ memory : int, default 4096
399
+ Memory size (in MB) required for this step.
400
+ shared_memory : int, optional, default None
401
+ The value for the size (in MiB) of the /dev/shm volume for this step.
402
+ This parameter maps to the `--shm-size` option in Docker.
424
403
  """
425
404
  ...
426
405
 
427
406
  @typing.overload
428
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
407
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
429
408
  ...
430
409
 
431
410
  @typing.overload
432
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
411
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
433
412
  ...
434
413
 
435
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
414
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
436
415
  """
437
- Specifies the number of times the task corresponding
438
- to a step needs to be retried.
416
+ Specifies the resources needed when executing this step.
439
417
 
440
- This decorator is useful for handling transient errors, such as networking issues.
441
- If your task contains operations that can't be retried safely, e.g. database updates,
442
- it is advisable to annotate it with `@retry(times=0)`.
418
+ Use `@resources` to specify the resource requirements
419
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
443
420
 
444
- This can be used in conjunction with the `@catch` decorator. The `@catch`
445
- decorator will execute a no-op task after all retries have been exhausted,
446
- ensuring that the flow execution can continue.
421
+ You can choose the compute layer on the command line by executing e.g.
422
+ ```
423
+ python myflow.py run --with batch
424
+ ```
425
+ or
426
+ ```
427
+ python myflow.py run --with kubernetes
428
+ ```
429
+ which executes the flow on the desired system using the
430
+ requirements specified in `@resources`.
447
431
 
448
432
 
449
433
  Parameters
450
434
  ----------
451
- times : int, default 3
452
- Number of times to retry this task.
453
- minutes_between_retries : int, default 2
454
- Number of minutes between retries.
435
+ cpu : int, default 1
436
+ Number of CPUs required for this step.
437
+ gpu : int, optional, default None
438
+ Number of GPUs required for this step.
439
+ disk : int, optional, default None
440
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
441
+ memory : int, default 4096
442
+ Memory size (in MB) required for this step.
443
+ shared_memory : int, optional, default None
444
+ The value for the size (in MiB) of the /dev/shm volume for this step.
445
+ This parameter maps to the `--shm-size` option in Docker.
455
446
  """
456
447
  ...
457
448
 
458
449
  @typing.overload
459
- def model(*, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
450
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
460
451
  """
461
- Enables loading / saving of models within a step.
452
+ Specifies a timeout for your step.
453
+
454
+ This decorator is useful if this step may hang indefinitely.
462
455
 
456
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
457
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
458
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
459
+
460
+ Note that all the values specified in parameters are added together so if you specify
461
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
463
462
 
464
463
 
465
464
  Parameters
466
465
  ----------
467
- load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
468
- Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
469
- These artifact names give to `load` be reference objects or reference `key` string's from objects created by:
470
- - `current.checkpoint`
471
- - `current.model`
472
- - `current.huggingface_hub`
473
-
474
- If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
475
- the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
476
- If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
477
-
478
- temp_dir_root : str, default: None
479
- The root directory under which `current.model.loaded` will store loaded models
466
+ seconds : int, default 0
467
+ Number of seconds to wait prior to timing out.
468
+ minutes : int, default 0
469
+ Number of minutes to wait prior to timing out.
470
+ hours : int, default 0
471
+ Number of hours to wait prior to timing out.
480
472
  """
481
473
  ...
482
474
 
483
475
  @typing.overload
484
- def model(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
476
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
485
477
  ...
486
478
 
487
479
  @typing.overload
488
- def model(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
480
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
489
481
  ...
490
482
 
491
- def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None):
483
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
492
484
  """
493
- Enables loading / saving of models within a step.
485
+ Specifies a timeout for your step.
494
486
 
487
+ This decorator is useful if this step may hang indefinitely.
495
488
 
489
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
490
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
491
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
496
492
 
497
- Parameters
498
- ----------
499
- load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
500
- Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
501
- These artifact names give to `load` be reference objects or reference `key` string's from objects created by:
502
- - `current.checkpoint`
503
- - `current.model`
504
- - `current.huggingface_hub`
493
+ Note that all the values specified in parameters are added together so if you specify
494
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
505
495
 
506
- If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
507
- the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
508
- If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
509
496
 
510
- temp_dir_root : str, default: None
511
- The root directory under which `current.model.loaded` will store loaded models
497
+ Parameters
498
+ ----------
499
+ seconds : int, default 0
500
+ Number of seconds to wait prior to timing out.
501
+ minutes : int, default 0
502
+ Number of minutes to wait prior to timing out.
503
+ hours : int, default 0
504
+ Number of hours to wait prior to timing out.
512
505
  """
513
506
  ...
514
507
 
515
- def app_deploy(*, app_port: int, app_name: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
508
+ @typing.overload
509
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
516
510
  """
517
- Specifies that this step is used to deploy an instance of the app.
518
- Requires that self.app_name, self.app_port, self.entrypoint and self.deployDir is set.
511
+ Specifies that the step will success under all circumstances.
512
+
513
+ The decorator will create an optional artifact, specified by `var`, which
514
+ contains the exception raised. You can use it to detect the presence
515
+ of errors, indicating that all happy-path artifacts produced by the step
516
+ are missing.
519
517
 
520
518
 
521
519
  Parameters
522
520
  ----------
523
- app_port : int
524
- Number of GPUs to use.
525
- app_name : str
526
- Name of the app to deploy.
521
+ var : str, optional, default None
522
+ Name of the artifact in which to store the caught exception.
523
+ If not specified, the exception is not stored.
524
+ print_exception : bool, default True
525
+ Determines whether or not the exception is printed to
526
+ stdout when caught.
527
527
  """
528
528
  ...
529
529
 
530
- def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
530
+ @typing.overload
531
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
532
+ ...
533
+
534
+ @typing.overload
535
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
536
+ ...
537
+
538
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
531
539
  """
532
- Specifies that this step should execute on DGX cloud.
540
+ Specifies that the step will success under all circumstances.
541
+
542
+ The decorator will create an optional artifact, specified by `var`, which
543
+ contains the exception raised. You can use it to detect the presence
544
+ of errors, indicating that all happy-path artifacts produced by the step
545
+ are missing.
533
546
 
534
547
 
535
548
  Parameters
536
549
  ----------
537
- gpu : int
538
- Number of GPUs to use.
539
- gpu_type : str
540
- Type of Nvidia GPU to use.
541
- queue_timeout : int
542
- Time to keep the job in NVCF's queue.
550
+ var : str, optional, default None
551
+ Name of the artifact in which to store the caught exception.
552
+ If not specified, the exception is not stored.
553
+ print_exception : bool, default True
554
+ Determines whether or not the exception is printed to
555
+ stdout when caught.
543
556
  """
544
557
  ...
545
558
 
546
- def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
559
+ @typing.overload
560
+ def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
547
561
  """
548
- Decorator that helps cache, version and store models/datasets from huggingface hub.
562
+ Creates a human-readable report, a Metaflow Card, after this step completes.
563
+
564
+ Note that you may add multiple `@card` decorators in a step with different parameters.
549
565
 
550
566
 
551
567
  Parameters
552
568
  ----------
553
- temp_dir_root : str, optional
554
- The root directory that will hold the temporary directory where objects will be downloaded.
555
-
556
- load: Union[List[str], List[Tuple[Dict, str]], List[Tuple[str, str]], List[Dict], None]
557
- The list of repos (models/datasets) to load.
558
-
559
- Loaded repos can be accessed via `current.huggingface_hub.loaded`. If load is set, then the following happens:
560
-
561
- - If repo (model/dataset) is not found in the datastore:
562
- - Downloads the repo from Hugging Face Hub to a temporary directory (or uses specified path) for local access
563
- - Stores it in Metaflow's datastore (s3/gcs/azure etc.) with a unique name based on repo_type/repo_id
564
- - All HF models loaded for a `@step` will be cached separately under flow/step/namespace.
565
-
566
- - If repo is found in the datastore:
567
- - Loads it directly from datastore to local path (can be temporary directory or specified path)
569
+ type : str, default 'default'
570
+ Card type.
571
+ id : str, optional, default None
572
+ If multiple cards are present, use this id to identify this card.
573
+ options : Dict[str, Any], default {}
574
+ Options passed to the card. The contents depend on the card type.
575
+ timeout : int, default 45
576
+ Interrupt reporting if it takes more than this many seconds.
568
577
  """
569
578
  ...
570
579
 
571
- def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy: str, force_cache_update: bool, debug: bool, circuit_breaker_config: dict, timeout_config: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
580
+ @typing.overload
581
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
582
+ ...
583
+
584
+ @typing.overload
585
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
586
+ ...
587
+
588
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
572
589
  """
573
- This decorator is used to run Ollama APIs as Metaflow task sidecars.
574
-
575
- User code call
576
- --------------
577
- @ollama(
578
- models=[...],
579
- ...
580
- )
581
-
582
- Valid backend options
583
- ---------------------
584
- - 'local': Run as a separate process on the local task machine.
585
- - (TODO) 'managed': Outerbounds hosts and selects compute provider.
586
- - (TODO) 'remote': Spin up separate instance to serve Ollama models.
590
+ Creates a human-readable report, a Metaflow Card, after this step completes.
587
591
 
588
- Valid model options
589
- -------------------
590
- Any model here https://ollama.com/search, e.g. 'llama3.2', 'llama3.3'
592
+ Note that you may add multiple `@card` decorators in a step with different parameters.
591
593
 
592
594
 
593
595
  Parameters
594
596
  ----------
595
- models: list[str]
596
- List of Ollama containers running models in sidecars.
597
- backend: str
598
- Determines where and how to run the Ollama process.
599
- force_pull: bool
600
- Whether to run `ollama pull` no matter what, or first check the remote cache in Metaflow datastore for this model key.
601
- cache_update_policy: str
602
- Cache update policy: "auto", "force", or "never".
603
- force_cache_update: bool
604
- Simple override for "force" cache update policy.
605
- debug: bool
606
- Whether to turn on verbose debugging logs.
607
- circuit_breaker_config: dict
608
- Configuration for circuit breaker protection. Keys: failure_threshold, recovery_timeout, reset_timeout.
609
- timeout_config: dict
610
- Configuration for various operation timeouts. Keys: pull, stop, health_check, install, server_startup.
597
+ type : str, default 'default'
598
+ Card type.
599
+ id : str, optional, default None
600
+ If multiple cards are present, use this id to identify this card.
601
+ options : Dict[str, Any], default {}
602
+ Options passed to the card. The contents depend on the card type.
603
+ timeout : int, default 45
604
+ Interrupt reporting if it takes more than this many seconds.
611
605
  """
612
606
  ...
613
607
 
614
608
  @typing.overload
615
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
609
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
616
610
  """
617
- Internal decorator to support Fast bakery
611
+ Decorator prototype for all step decorators. This function gets specialized
612
+ and imported for all decorators types by _import_plugin_decorators().
618
613
  """
619
614
  ...
620
615
 
621
616
  @typing.overload
622
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
617
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
623
618
  ...
624
619
 
625
- def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
620
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
626
621
  """
627
- Internal decorator to support Fast bakery
622
+ Decorator prototype for all step decorators. This function gets specialized
623
+ and imported for all decorators types by _import_plugin_decorators().
624
+ """
625
+ ...
626
+
627
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
628
+ """
629
+ Specifies that this step should execute on Kubernetes.
630
+
631
+
632
+ Parameters
633
+ ----------
634
+ cpu : int, default 1
635
+ Number of CPUs required for this step. If `@resources` is
636
+ also present, the maximum value from all decorators is used.
637
+ memory : int, default 4096
638
+ Memory size (in MB) required for this step. If
639
+ `@resources` is also present, the maximum value from all decorators is
640
+ used.
641
+ disk : int, default 10240
642
+ Disk size (in MB) required for this step. If
643
+ `@resources` is also present, the maximum value from all decorators is
644
+ used.
645
+ image : str, optional, default None
646
+ Docker image to use when launching on Kubernetes. If not specified, and
647
+ METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
648
+ not, a default Docker image mapping to the current version of Python is used.
649
+ image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
650
+ If given, the imagePullPolicy to be applied to the Docker image of the step.
651
+ service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
652
+ Kubernetes service account to use when launching pod in Kubernetes.
653
+ secrets : List[str], optional, default None
654
+ Kubernetes secrets to use when launching pod in Kubernetes. These
655
+ secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
656
+ in Metaflow configuration.
657
+ node_selector: Union[Dict[str,str], str], optional, default None
658
+ Kubernetes node selector(s) to apply to the pod running the task.
659
+ Can be passed in as a comma separated string of values e.g.
660
+ 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
661
+ {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
662
+ namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
663
+ Kubernetes namespace to use when launching pod in Kubernetes.
664
+ gpu : int, optional, default None
665
+ Number of GPUs required for this step. A value of zero implies that
666
+ the scheduled node should not have GPUs.
667
+ gpu_vendor : str, default KUBERNETES_GPU_VENDOR
668
+ The vendor of the GPUs to be used for this step.
669
+ tolerations : List[str], default []
670
+ The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
671
+ Kubernetes tolerations to use when launching pod in Kubernetes.
672
+ labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
673
+ Kubernetes labels to use when launching pod in Kubernetes.
674
+ annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
675
+ Kubernetes annotations to use when launching pod in Kubernetes.
676
+ use_tmpfs : bool, default False
677
+ This enables an explicit tmpfs mount for this step.
678
+ tmpfs_tempdir : bool, default True
679
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
680
+ tmpfs_size : int, optional, default: None
681
+ The value for the size (in MiB) of the tmpfs mount for this step.
682
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
683
+ memory allocated for this step.
684
+ tmpfs_path : str, optional, default /metaflow_temp
685
+ Path to tmpfs mount for this step.
686
+ persistent_volume_claims : Dict[str, str], optional, default None
687
+ A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
688
+ volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
689
+ shared_memory: int, optional
690
+ Shared memory size (in MiB) required for this step
691
+ port: int, optional
692
+ Port number to specify in the Kubernetes job object
693
+ compute_pool : str, optional, default None
694
+ Compute pool to be used for for this step.
695
+ If not specified, any accessible compute pool within the perimeter is used.
696
+ hostname_resolution_timeout: int, default 10 * 60
697
+ Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
698
+ Only applicable when @parallel is used.
699
+ qos: str, default: Burstable
700
+ Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
701
+
702
+ security_context: Dict[str, Any], optional, default None
703
+ Container security context. Applies to the task container. Allows the following keys:
704
+ - privileged: bool, optional, default None
705
+ - allow_privilege_escalation: bool, optional, default None
706
+ - run_as_user: int, optional, default None
707
+ - run_as_group: int, optional, default None
708
+ - run_as_non_root: bool, optional, default None
628
709
  """
629
710
  ...
630
711
 
631
- @typing.overload
632
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
712
+ def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
633
713
  """
634
- Specifies that the step will success under all circumstances.
635
-
636
- The decorator will create an optional artifact, specified by `var`, which
637
- contains the exception raised. You can use it to detect the presence
638
- of errors, indicating that all happy-path artifacts produced by the step
639
- are missing.
714
+ Specifies that this step should execute on DGX cloud.
640
715
 
641
716
 
642
717
  Parameters
643
718
  ----------
644
- var : str, optional, default None
645
- Name of the artifact in which to store the caught exception.
646
- If not specified, the exception is not stored.
647
- print_exception : bool, default True
648
- Determines whether or not the exception is printed to
649
- stdout when caught.
719
+ gpu : int
720
+ Number of GPUs to use.
721
+ gpu_type : str
722
+ Type of Nvidia GPU to use.
723
+ queue_timeout : int
724
+ Time to keep the job in NVCF's queue.
650
725
  """
651
726
  ...
652
727
 
653
728
  @typing.overload
654
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
729
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
730
+ """
731
+ Internal decorator to support Fast bakery
732
+ """
655
733
  ...
656
734
 
657
735
  @typing.overload
658
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
736
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
659
737
  ...
660
738
 
661
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
739
+ def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
662
740
  """
663
- Specifies that the step will success under all circumstances.
664
-
665
- The decorator will create an optional artifact, specified by `var`, which
666
- contains the exception raised. You can use it to detect the presence
667
- of errors, indicating that all happy-path artifacts produced by the step
668
- are missing.
669
-
670
-
671
- Parameters
672
- ----------
673
- var : str, optional, default None
674
- Name of the artifact in which to store the caught exception.
675
- If not specified, the exception is not stored.
676
- print_exception : bool, default True
677
- Determines whether or not the exception is printed to
678
- stdout when caught.
741
+ Internal decorator to support Fast bakery
679
742
  """
680
743
  ...
681
744
 
682
745
  @typing.overload
683
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
746
+ def checkpoint(*, load_policy: str = 'fresh', temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
684
747
  """
685
- Specifies the PyPI packages for the step.
748
+ Enables checkpointing for a step.
686
749
 
687
- Information in this decorator will augment any
688
- attributes set in the `@pyi_base` flow-level decorator. Hence,
689
- you can use `@pypi_base` to set packages required by all
690
- steps and use `@pypi` to specify step-specific overrides.
691
750
 
692
751
 
693
752
  Parameters
694
753
  ----------
695
- packages : Dict[str, str], default: {}
696
- Packages to use for this step. The key is the name of the package
697
- and the value is the version to use.
698
- python : str, optional, default: None
699
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
700
- that the version used will correspond to the version of the Python interpreter used to start the run.
754
+ load_policy : str, default: "fresh"
755
+ The policy for loading the checkpoint. The following policies are supported:
756
+ - "eager": Loads the the latest available checkpoint within the namespace.
757
+ With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
758
+ will be loaded at the start of the task.
759
+ - "none": Do not load any checkpoint
760
+ - "fresh": Loads the lastest checkpoint created within the running Task.
761
+ This mode helps loading checkpoints across various retry attempts of the same task.
762
+ With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
763
+ created within the task will be loaded when the task is retries execution on failure.
764
+
765
+ temp_dir_root : str, default: None
766
+ The root directory under which `current.checkpoint.directory` will be created.
701
767
  """
702
768
  ...
703
769
 
704
770
  @typing.overload
705
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
771
+ def checkpoint(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
706
772
  ...
707
773
 
708
774
  @typing.overload
709
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
775
+ def checkpoint(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
710
776
  ...
711
777
 
712
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
778
+ def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load_policy: str = 'fresh', temp_dir_root: str = None):
713
779
  """
714
- Specifies the PyPI packages for the step.
780
+ Enables checkpointing for a step.
715
781
 
716
- Information in this decorator will augment any
717
- attributes set in the `@pyi_base` flow-level decorator. Hence,
718
- you can use `@pypi_base` to set packages required by all
719
- steps and use `@pypi` to specify step-specific overrides.
720
782
 
721
783
 
722
784
  Parameters
723
785
  ----------
724
- packages : Dict[str, str], default: {}
725
- Packages to use for this step. The key is the name of the package
726
- and the value is the version to use.
727
- python : str, optional, default: None
728
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
729
- that the version used will correspond to the version of the Python interpreter used to start the run.
786
+ load_policy : str, default: "fresh"
787
+ The policy for loading the checkpoint. The following policies are supported:
788
+ - "eager": Loads the the latest available checkpoint within the namespace.
789
+ With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
790
+ will be loaded at the start of the task.
791
+ - "none": Do not load any checkpoint
792
+ - "fresh": Loads the lastest checkpoint created within the running Task.
793
+ This mode helps loading checkpoints across various retry attempts of the same task.
794
+ With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
795
+ created within the task will be loaded when the task is retries execution on failure.
796
+
797
+ temp_dir_root : str, default: None
798
+ The root directory under which `current.checkpoint.directory` will be created.
730
799
  """
731
800
  ...
732
801
 
@@ -790,186 +859,117 @@ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
790
859
  ...
791
860
 
792
861
  @typing.overload
793
- def checkpoint(*, load_policy: str = 'fresh', temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
862
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
794
863
  """
795
- Enables checkpointing for a step.
796
-
864
+ Specifies environment variables to be set prior to the execution of a step.
797
865
 
798
866
 
799
867
  Parameters
800
868
  ----------
801
- load_policy : str, default: "fresh"
802
- The policy for loading the checkpoint. The following policies are supported:
803
- - "eager": Loads the the latest available checkpoint within the namespace.
804
- With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
805
- will be loaded at the start of the task.
806
- - "none": Do not load any checkpoint
807
- - "fresh": Loads the lastest checkpoint created within the running Task.
808
- This mode helps loading checkpoints across various retry attempts of the same task.
809
- With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
810
- created within the task will be loaded when the task is retries execution on failure.
811
-
812
- temp_dir_root : str, default: None
813
- The root directory under which `current.checkpoint.directory` will be created.
869
+ vars : Dict[str, str], default {}
870
+ Dictionary of environment variables to set.
814
871
  """
815
872
  ...
816
873
 
817
874
  @typing.overload
818
- def checkpoint(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
875
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
819
876
  ...
820
877
 
821
878
  @typing.overload
822
- def checkpoint(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
879
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
823
880
  ...
824
881
 
825
- def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load_policy: str = 'fresh', temp_dir_root: str = None):
882
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
826
883
  """
827
- Enables checkpointing for a step.
828
-
884
+ Specifies environment variables to be set prior to the execution of a step.
829
885
 
830
886
 
831
887
  Parameters
832
888
  ----------
833
- load_policy : str, default: "fresh"
834
- The policy for loading the checkpoint. The following policies are supported:
835
- - "eager": Loads the the latest available checkpoint within the namespace.
836
- With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
837
- will be loaded at the start of the task.
838
- - "none": Do not load any checkpoint
839
- - "fresh": Loads the lastest checkpoint created within the running Task.
840
- This mode helps loading checkpoints across various retry attempts of the same task.
841
- With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
842
- created within the task will be loaded when the task is retries execution on failure.
843
-
844
- temp_dir_root : str, default: None
845
- The root directory under which `current.checkpoint.directory` will be created.
846
- """
847
- ...
848
-
849
- @typing.overload
850
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
851
- """
852
- Decorator prototype for all step decorators. This function gets specialized
853
- and imported for all decorators types by _import_plugin_decorators().
854
- """
855
- ...
856
-
857
- @typing.overload
858
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
859
- ...
860
-
861
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
862
- """
863
- Decorator prototype for all step decorators. This function gets specialized
864
- and imported for all decorators types by _import_plugin_decorators().
889
+ vars : Dict[str, str], default {}
890
+ Dictionary of environment variables to set.
865
891
  """
866
892
  ...
867
893
 
868
- @typing.overload
869
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
894
+ def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
870
895
  """
871
- Specifies a timeout for your step.
872
-
873
- This decorator is useful if this step may hang indefinitely.
874
-
875
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
876
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
877
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
878
-
879
- Note that all the values specified in parameters are added together so if you specify
880
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
896
+ Decorator that helps cache, version and store models/datasets from huggingface hub.
881
897
 
882
898
 
883
899
  Parameters
884
900
  ----------
885
- seconds : int, default 0
886
- Number of seconds to wait prior to timing out.
887
- minutes : int, default 0
888
- Number of minutes to wait prior to timing out.
889
- hours : int, default 0
890
- Number of hours to wait prior to timing out.
891
- """
892
- ...
893
-
894
- @typing.overload
895
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
896
- ...
897
-
898
- @typing.overload
899
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
900
- ...
901
-
902
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
903
- """
904
- Specifies a timeout for your step.
905
-
906
- This decorator is useful if this step may hang indefinitely.
901
+ temp_dir_root : str, optional
902
+ The root directory that will hold the temporary directory where objects will be downloaded.
907
903
 
908
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
909
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
910
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
904
+ load: Union[List[str], List[Tuple[Dict, str]], List[Tuple[str, str]], List[Dict], None]
905
+ The list of repos (models/datasets) to load.
911
906
 
912
- Note that all the values specified in parameters are added together so if you specify
913
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
907
+ Loaded repos can be accessed via `current.huggingface_hub.loaded`. If load is set, then the following happens:
914
908
 
909
+ - If repo (model/dataset) is not found in the datastore:
910
+ - Downloads the repo from Hugging Face Hub to a temporary directory (or uses specified path) for local access
911
+ - Stores it in Metaflow's datastore (s3/gcs/azure etc.) with a unique name based on repo_type/repo_id
912
+ - All HF models loaded for a `@step` will be cached separately under flow/step/namespace.
915
913
 
916
- Parameters
917
- ----------
918
- seconds : int, default 0
919
- Number of seconds to wait prior to timing out.
920
- minutes : int, default 0
921
- Number of minutes to wait prior to timing out.
922
- hours : int, default 0
923
- Number of hours to wait prior to timing out.
914
+ - If repo is found in the datastore:
915
+ - Loads it directly from datastore to local path (can be temporary directory or specified path)
924
916
  """
925
917
  ...
926
918
 
927
919
  @typing.overload
928
- def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
920
+ def model(*, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
929
921
  """
930
- Creates a human-readable report, a Metaflow Card, after this step completes.
922
+ Enables loading / saving of models within a step.
931
923
 
932
- Note that you may add multiple `@card` decorators in a step with different parameters.
933
924
 
934
925
 
935
926
  Parameters
936
927
  ----------
937
- type : str, default 'default'
938
- Card type.
939
- id : str, optional, default None
940
- If multiple cards are present, use this id to identify this card.
941
- options : Dict[str, Any], default {}
942
- Options passed to the card. The contents depend on the card type.
943
- timeout : int, default 45
944
- Interrupt reporting if it takes more than this many seconds.
928
+ load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
929
+ Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
930
+ These artifact names give to `load` be reference objects or reference `key` string's from objects created by:
931
+ - `current.checkpoint`
932
+ - `current.model`
933
+ - `current.huggingface_hub`
934
+
935
+ If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
936
+ the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
937
+ If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
938
+
939
+ temp_dir_root : str, default: None
940
+ The root directory under which `current.model.loaded` will store loaded models
945
941
  """
946
942
  ...
947
943
 
948
944
  @typing.overload
949
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
945
+ def model(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
950
946
  ...
951
947
 
952
948
  @typing.overload
953
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
949
+ def model(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
954
950
  ...
955
951
 
956
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
952
+ def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None):
957
953
  """
958
- Creates a human-readable report, a Metaflow Card, after this step completes.
954
+ Enables loading / saving of models within a step.
959
955
 
960
- Note that you may add multiple `@card` decorators in a step with different parameters.
961
956
 
962
957
 
963
958
  Parameters
964
959
  ----------
965
- type : str, default 'default'
966
- Card type.
967
- id : str, optional, default None
968
- If multiple cards are present, use this id to identify this card.
969
- options : Dict[str, Any], default {}
970
- Options passed to the card. The contents depend on the card type.
971
- timeout : int, default 45
972
- Interrupt reporting if it takes more than this many seconds.
960
+ load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
961
+ Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
962
+ These artifact names give to `load` be reference objects or reference `key` string's from objects created by:
963
+ - `current.checkpoint`
964
+ - `current.model`
965
+ - `current.huggingface_hub`
966
+
967
+ If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
968
+ the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
969
+ If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
970
+
971
+ temp_dir_root : str, default: None
972
+ The root directory under which `current.model.loaded` will store loaded models
973
973
  """
974
974
  ...
975
975
 
@@ -1016,173 +1016,60 @@ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str,
1016
1016
  """
1017
1017
  ...
1018
1018
 
1019
- def with_artifact_store(f: typing.Optional[typing.Type[FlowSpecDerived]] = None):
1019
+ @typing.overload
1020
+ def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1020
1021
  """
1021
- Allows setting external datastores to save data for the
1022
- `@checkpoint`/`@model`/`@huggingface_hub` decorators.
1023
-
1024
- This decorator is useful when users wish to save data to a different datastore
1025
- than what is configured in Metaflow. This can be for variety of reasons:
1026
-
1027
- 1. Data security: The objects needs to be stored in a bucket (object storage) that is not accessible by other flows.
1028
- 2. Data Locality: The location where the task is executing is not located in the same region as the datastore.
1029
- - Example: Metaflow datastore lives in US East, but the task is executing in Finland datacenters.
1030
- 3. Data Lifecycle Policies: The objects need to be archived / managed separately from the Metaflow managed objects.
1031
- - Example: Flow is training very large models that need to be stored separately and will be deleted more aggressively than the Metaflow managed objects.
1032
-
1033
- Usage:
1034
- ----------
1035
-
1036
- - Using a custom IAM role to access the datastore.
1037
-
1038
- ```python
1039
- @with_artifact_store(
1040
- type="s3",
1041
- config=lambda: {
1042
- "root": "s3://my-bucket-foo/path/to/root",
1043
- "role_arn": ROLE,
1044
- },
1045
- )
1046
- class MyFlow(FlowSpec):
1047
-
1048
- @checkpoint
1049
- @step
1050
- def start(self):
1051
- with open("my_file.txt", "w") as f:
1052
- f.write("Hello, World!")
1053
- self.external_bucket_checkpoint = current.checkpoint.save("my_file.txt")
1054
- self.next(self.end)
1055
-
1056
- ```
1057
-
1058
- - Using credentials to access the s3-compatible datastore.
1059
-
1060
- ```python
1061
- @with_artifact_store(
1062
- type="s3",
1063
- config=lambda: {
1064
- "root": "s3://my-bucket-foo/path/to/root",
1065
- "client_params": {
1066
- "aws_access_key_id": os.environ.get("MY_CUSTOM_ACCESS_KEY"),
1067
- "aws_secret_access_key": os.environ.get("MY_CUSTOM_SECRET_KEY"),
1068
- },
1069
- },
1070
- )
1071
- class MyFlow(FlowSpec):
1072
-
1073
- @checkpoint
1074
- @step
1075
- def start(self):
1076
- with open("my_file.txt", "w") as f:
1077
- f.write("Hello, World!")
1078
- self.external_bucket_checkpoint = current.checkpoint.save("my_file.txt")
1079
- self.next(self.end)
1080
-
1081
- ```
1082
-
1083
- - Accessing objects stored in external datastores after task execution.
1022
+ Specifies the flow(s) that this flow depends on.
1084
1023
 
1085
- ```python
1086
- run = Run("CheckpointsTestsFlow/8992")
1087
- with artifact_store_from(run=run, config={
1088
- "client_params": {
1089
- "aws_access_key_id": os.environ.get("MY_CUSTOM_ACCESS_KEY"),
1090
- "aws_secret_access_key": os.environ.get("MY_CUSTOM_SECRET_KEY"),
1091
- },
1092
- }):
1093
- with Checkpoint() as cp:
1094
- latest = cp.list(
1095
- task=run["start"].task
1096
- )[0]
1097
- print(latest)
1098
- cp.load(
1099
- latest,
1100
- "test-checkpoints"
1101
- )
1024
+ ```
1025
+ @trigger_on_finish(flow='FooFlow')
1026
+ ```
1027
+ or
1028
+ ```
1029
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1030
+ ```
1031
+ This decorator respects the @project decorator and triggers the flow
1032
+ when upstream runs within the same namespace complete successfully
1102
1033
 
1103
- task = Task("TorchTuneFlow/8484/train/53673")
1104
- with artifact_store_from(run=run, config={
1105
- "client_params": {
1106
- "aws_access_key_id": os.environ.get("MY_CUSTOM_ACCESS_KEY"),
1107
- "aws_secret_access_key": os.environ.get("MY_CUSTOM_SECRET_KEY"),
1108
- },
1109
- }):
1110
- load_model(
1111
- task.data.model_ref,
1112
- "test-models"
1113
- )
1114
- ```
1115
- Parameters:
1116
- ----------
1034
+ Additionally, you can specify project aware upstream flow dependencies
1035
+ by specifying the fully qualified project_flow_name.
1036
+ ```
1037
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1038
+ ```
1039
+ or
1040
+ ```
1041
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1042
+ ```
1117
1043
 
1118
- type: str
1119
- The type of the datastore. Can be one of 's3', 'gcs', 'azure' or any other supported metaflow Datastore.
1044
+ You can also specify just the project or project branch (other values will be
1045
+ inferred from the current project or project branch):
1046
+ ```
1047
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1048
+ ```
1120
1049
 
1121
- config: dict or Callable
1122
- Dictionary of configuration options for the datastore. The following keys are required:
1123
- - root: The root path in the datastore where the data will be saved. (needs to be in the format expected by the datastore)
1124
- - example: 's3://bucket-name/path/to/root'
1125
- - example: 'gs://bucket-name/path/to/root'
1126
- - example: 'https://myblockacc.blob.core.windows.net/metaflow/'
1127
- - role_arn (optional): AWS IAM role to access s3 bucket (only when `type` is 's3')
1128
- - session_vars (optional): AWS session variables to access s3 bucket (only when `type` is 's3')
1129
- - client_params (optional): AWS client parameters to access s3 bucket (only when `type` is 's3')
1130
- """
1131
- ...
1132
-
1133
- @typing.overload
1134
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1135
- """
1136
- Specifies the times when the flow should be run when running on a
1137
- production scheduler.
1050
+ Note that `branch` is typically one of:
1051
+ - `prod`
1052
+ - `user.bob`
1053
+ - `test.my_experiment`
1054
+ - `prod.staging`
1138
1055
 
1139
1056
 
1140
1057
  Parameters
1141
1058
  ----------
1142
- hourly : bool, default False
1143
- Run the workflow hourly.
1144
- daily : bool, default True
1145
- Run the workflow daily.
1146
- weekly : bool, default False
1147
- Run the workflow weekly.
1148
- cron : str, optional, default None
1149
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1150
- specified by this expression.
1151
- timezone : str, optional, default None
1152
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1153
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1059
+ flow : Union[str, Dict[str, str]], optional, default None
1060
+ Upstream flow dependency for this flow.
1061
+ flows : List[Union[str, Dict[str, str]]], default []
1062
+ Upstream flow dependencies for this flow.
1063
+ options : Dict[str, Any], default {}
1064
+ Backend-specific configuration for tuning eventing behavior.
1154
1065
  """
1155
1066
  ...
1156
1067
 
1157
1068
  @typing.overload
1158
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1159
- ...
1160
-
1161
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1162
- """
1163
- Specifies the times when the flow should be run when running on a
1164
- production scheduler.
1165
-
1166
-
1167
- Parameters
1168
- ----------
1169
- hourly : bool, default False
1170
- Run the workflow hourly.
1171
- daily : bool, default True
1172
- Run the workflow daily.
1173
- weekly : bool, default False
1174
- Run the workflow weekly.
1175
- cron : str, optional, default None
1176
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1177
- specified by this expression.
1178
- timezone : str, optional, default None
1179
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1180
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1181
- """
1069
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1182
1070
  ...
1183
1071
 
1184
- @typing.overload
1185
- def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1072
+ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1186
1073
  """
1187
1074
  Specifies the flow(s) that this flow depends on.
1188
1075
 
@@ -1230,55 +1117,152 @@ def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] =
1230
1117
  """
1231
1118
  ...
1232
1119
 
1233
- @typing.overload
1234
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1235
- ...
1236
-
1237
- def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1120
+ def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1121
+ """
1122
+ Specifies what flows belong to the same project.
1123
+
1124
+ A project-specific namespace is created for all flows that
1125
+ use the same `@project(name)`.
1126
+
1127
+
1128
+ Parameters
1129
+ ----------
1130
+ name : str
1131
+ Project name. Make sure that the name is unique amongst all
1132
+ projects that use the same production scheduler. The name may
1133
+ contain only lowercase alphanumeric characters and underscores.
1134
+
1135
+ branch : Optional[str], default None
1136
+ The branch to use. If not specified, the branch is set to
1137
+ `user.<username>` unless `production` is set to `True`. This can
1138
+ also be set on the command line using `--branch` as a top-level option.
1139
+ It is an error to specify `branch` in the decorator and on the command line.
1140
+
1141
+ production : bool, default False
1142
+ Whether or not the branch is the production branch. This can also be set on the
1143
+ command line using `--production` as a top-level option. It is an error to specify
1144
+ `production` in the decorator and on the command line.
1145
+ The project branch name will be:
1146
+ - if `branch` is specified:
1147
+ - if `production` is True: `prod.<branch>`
1148
+ - if `production` is False: `test.<branch>`
1149
+ - if `branch` is not specified:
1150
+ - if `production` is True: `prod`
1151
+ - if `production` is False: `user.<username>`
1152
+ """
1153
+ ...
1154
+
1155
+ def with_artifact_store(f: typing.Optional[typing.Type[FlowSpecDerived]] = None):
1238
1156
  """
1239
- Specifies the flow(s) that this flow depends on.
1157
+ Allows setting external datastores to save data for the
1158
+ `@checkpoint`/`@model`/`@huggingface_hub` decorators.
1240
1159
 
1241
- ```
1242
- @trigger_on_finish(flow='FooFlow')
1243
- ```
1244
- or
1245
- ```
1246
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1247
- ```
1248
- This decorator respects the @project decorator and triggers the flow
1249
- when upstream runs within the same namespace complete successfully
1160
+ This decorator is useful when users wish to save data to a different datastore
1161
+ than what is configured in Metaflow. This can be for variety of reasons:
1250
1162
 
1251
- Additionally, you can specify project aware upstream flow dependencies
1252
- by specifying the fully qualified project_flow_name.
1253
- ```
1254
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1255
- ```
1256
- or
1257
- ```
1258
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1259
- ```
1163
+ 1. Data security: The objects needs to be stored in a bucket (object storage) that is not accessible by other flows.
1164
+ 2. Data Locality: The location where the task is executing is not located in the same region as the datastore.
1165
+ - Example: Metaflow datastore lives in US East, but the task is executing in Finland datacenters.
1166
+ 3. Data Lifecycle Policies: The objects need to be archived / managed separately from the Metaflow managed objects.
1167
+ - Example: Flow is training very large models that need to be stored separately and will be deleted more aggressively than the Metaflow managed objects.
1260
1168
 
1261
- You can also specify just the project or project branch (other values will be
1262
- inferred from the current project or project branch):
1263
- ```
1264
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1265
- ```
1169
+ Usage:
1170
+ ----------
1266
1171
 
1267
- Note that `branch` is typically one of:
1268
- - `prod`
1269
- - `user.bob`
1270
- - `test.my_experiment`
1271
- - `prod.staging`
1172
+ - Using a custom IAM role to access the datastore.
1272
1173
 
1174
+ ```python
1175
+ @with_artifact_store(
1176
+ type="s3",
1177
+ config=lambda: {
1178
+ "root": "s3://my-bucket-foo/path/to/root",
1179
+ "role_arn": ROLE,
1180
+ },
1181
+ )
1182
+ class MyFlow(FlowSpec):
1273
1183
 
1274
- Parameters
1184
+ @checkpoint
1185
+ @step
1186
+ def start(self):
1187
+ with open("my_file.txt", "w") as f:
1188
+ f.write("Hello, World!")
1189
+ self.external_bucket_checkpoint = current.checkpoint.save("my_file.txt")
1190
+ self.next(self.end)
1191
+
1192
+ ```
1193
+
1194
+ - Using credentials to access the s3-compatible datastore.
1195
+
1196
+ ```python
1197
+ @with_artifact_store(
1198
+ type="s3",
1199
+ config=lambda: {
1200
+ "root": "s3://my-bucket-foo/path/to/root",
1201
+ "client_params": {
1202
+ "aws_access_key_id": os.environ.get("MY_CUSTOM_ACCESS_KEY"),
1203
+ "aws_secret_access_key": os.environ.get("MY_CUSTOM_SECRET_KEY"),
1204
+ },
1205
+ },
1206
+ )
1207
+ class MyFlow(FlowSpec):
1208
+
1209
+ @checkpoint
1210
+ @step
1211
+ def start(self):
1212
+ with open("my_file.txt", "w") as f:
1213
+ f.write("Hello, World!")
1214
+ self.external_bucket_checkpoint = current.checkpoint.save("my_file.txt")
1215
+ self.next(self.end)
1216
+
1217
+ ```
1218
+
1219
+ - Accessing objects stored in external datastores after task execution.
1220
+
1221
+ ```python
1222
+ run = Run("CheckpointsTestsFlow/8992")
1223
+ with artifact_store_from(run=run, config={
1224
+ "client_params": {
1225
+ "aws_access_key_id": os.environ.get("MY_CUSTOM_ACCESS_KEY"),
1226
+ "aws_secret_access_key": os.environ.get("MY_CUSTOM_SECRET_KEY"),
1227
+ },
1228
+ }):
1229
+ with Checkpoint() as cp:
1230
+ latest = cp.list(
1231
+ task=run["start"].task
1232
+ )[0]
1233
+ print(latest)
1234
+ cp.load(
1235
+ latest,
1236
+ "test-checkpoints"
1237
+ )
1238
+
1239
+ task = Task("TorchTuneFlow/8484/train/53673")
1240
+ with artifact_store_from(run=run, config={
1241
+ "client_params": {
1242
+ "aws_access_key_id": os.environ.get("MY_CUSTOM_ACCESS_KEY"),
1243
+ "aws_secret_access_key": os.environ.get("MY_CUSTOM_SECRET_KEY"),
1244
+ },
1245
+ }):
1246
+ load_model(
1247
+ task.data.model_ref,
1248
+ "test-models"
1249
+ )
1250
+ ```
1251
+ Parameters:
1275
1252
  ----------
1276
- flow : Union[str, Dict[str, str]], optional, default None
1277
- Upstream flow dependency for this flow.
1278
- flows : List[Union[str, Dict[str, str]]], default []
1279
- Upstream flow dependencies for this flow.
1280
- options : Dict[str, Any], default {}
1281
- Backend-specific configuration for tuning eventing behavior.
1253
+
1254
+ type: str
1255
+ The type of the datastore. Can be one of 's3', 'gcs', 'azure' or any other supported metaflow Datastore.
1256
+
1257
+ config: dict or Callable
1258
+ Dictionary of configuration options for the datastore. The following keys are required:
1259
+ - root: The root path in the datastore where the data will be saved. (needs to be in the format expected by the datastore)
1260
+ - example: 's3://bucket-name/path/to/root'
1261
+ - example: 'gs://bucket-name/path/to/root'
1262
+ - example: 'https://myblockacc.blob.core.windows.net/metaflow/'
1263
+ - role_arn (optional): AWS IAM role to access s3 bucket (only when `type` is 's3')
1264
+ - session_vars (optional): AWS session variables to access s3 bucket (only when `type` is 's3')
1265
+ - client_params (optional): AWS client parameters to access s3 bucket (only when `type` is 's3')
1282
1266
  """
1283
1267
  ...
1284
1268
 
@@ -1376,43 +1360,53 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
1376
1360
  ...
1377
1361
 
1378
1362
  @typing.overload
1379
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1363
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1380
1364
  """
1381
- Specifies the PyPI packages for all steps of the flow.
1365
+ Specifies the times when the flow should be run when running on a
1366
+ production scheduler.
1382
1367
 
1383
- Use `@pypi_base` to set common packages required by all
1384
- steps and use `@pypi` to specify step-specific overrides.
1385
1368
 
1386
1369
  Parameters
1387
1370
  ----------
1388
- packages : Dict[str, str], default: {}
1389
- Packages to use for this flow. The key is the name of the package
1390
- and the value is the version to use.
1391
- python : str, optional, default: None
1392
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1393
- that the version used will correspond to the version of the Python interpreter used to start the run.
1371
+ hourly : bool, default False
1372
+ Run the workflow hourly.
1373
+ daily : bool, default True
1374
+ Run the workflow daily.
1375
+ weekly : bool, default False
1376
+ Run the workflow weekly.
1377
+ cron : str, optional, default None
1378
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1379
+ specified by this expression.
1380
+ timezone : str, optional, default None
1381
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1382
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1394
1383
  """
1395
1384
  ...
1396
1385
 
1397
1386
  @typing.overload
1398
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1387
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1399
1388
  ...
1400
1389
 
1401
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1390
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1402
1391
  """
1403
- Specifies the PyPI packages for all steps of the flow.
1392
+ Specifies the times when the flow should be run when running on a
1393
+ production scheduler.
1404
1394
 
1405
- Use `@pypi_base` to set common packages required by all
1406
- steps and use `@pypi` to specify step-specific overrides.
1407
1395
 
1408
1396
  Parameters
1409
1397
  ----------
1410
- packages : Dict[str, str], default: {}
1411
- Packages to use for this flow. The key is the name of the package
1412
- and the value is the version to use.
1413
- python : str, optional, default: None
1414
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1415
- that the version used will correspond to the version of the Python interpreter used to start the run.
1398
+ hourly : bool, default False
1399
+ Run the workflow hourly.
1400
+ daily : bool, default True
1401
+ Run the workflow daily.
1402
+ weekly : bool, default False
1403
+ Run the workflow weekly.
1404
+ cron : str, optional, default None
1405
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1406
+ specified by this expression.
1407
+ timezone : str, optional, default None
1408
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1409
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1416
1410
  """
1417
1411
  ...
1418
1412
 
@@ -1510,38 +1504,44 @@ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, expone
1510
1504
  """
1511
1505
  ...
1512
1506
 
1513
- def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1507
+ @typing.overload
1508
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1514
1509
  """
1515
- Specifies what flows belong to the same project.
1516
-
1517
- A project-specific namespace is created for all flows that
1518
- use the same `@project(name)`.
1510
+ Specifies the PyPI packages for all steps of the flow.
1519
1511
 
1512
+ Use `@pypi_base` to set common packages required by all
1513
+ steps and use `@pypi` to specify step-specific overrides.
1520
1514
 
1521
1515
  Parameters
1522
1516
  ----------
1523
- name : str
1524
- Project name. Make sure that the name is unique amongst all
1525
- projects that use the same production scheduler. The name may
1526
- contain only lowercase alphanumeric characters and underscores.
1517
+ packages : Dict[str, str], default: {}
1518
+ Packages to use for this flow. The key is the name of the package
1519
+ and the value is the version to use.
1520
+ python : str, optional, default: None
1521
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1522
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1523
+ """
1524
+ ...
1525
+
1526
+ @typing.overload
1527
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1528
+ ...
1529
+
1530
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1531
+ """
1532
+ Specifies the PyPI packages for all steps of the flow.
1527
1533
 
1528
- branch : Optional[str], default None
1529
- The branch to use. If not specified, the branch is set to
1530
- `user.<username>` unless `production` is set to `True`. This can
1531
- also be set on the command line using `--branch` as a top-level option.
1532
- It is an error to specify `branch` in the decorator and on the command line.
1534
+ Use `@pypi_base` to set common packages required by all
1535
+ steps and use `@pypi` to specify step-specific overrides.
1533
1536
 
1534
- production : bool, default False
1535
- Whether or not the branch is the production branch. This can also be set on the
1536
- command line using `--production` as a top-level option. It is an error to specify
1537
- `production` in the decorator and on the command line.
1538
- The project branch name will be:
1539
- - if `branch` is specified:
1540
- - if `production` is True: `prod.<branch>`
1541
- - if `production` is False: `test.<branch>`
1542
- - if `branch` is not specified:
1543
- - if `production` is True: `prod`
1544
- - if `production` is False: `user.<username>`
1537
+ Parameters
1538
+ ----------
1539
+ packages : Dict[str, str], default: {}
1540
+ Packages to use for this flow. The key is the name of the package
1541
+ and the value is the version to use.
1542
+ python : str, optional, default: None
1543
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1544
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1545
1545
  """
1546
1546
  ...
1547
1547