ob-metaflow-stubs 6.0.3.188rc0__py2.py3-none-any.whl → 6.0.3.188rc1__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (239) hide show
  1. metaflow-stubs/__init__.pyi +752 -752
  2. metaflow-stubs/cards.pyi +1 -1
  3. metaflow-stubs/cli.pyi +1 -1
  4. metaflow-stubs/cli_components/__init__.pyi +1 -1
  5. metaflow-stubs/cli_components/utils.pyi +1 -1
  6. metaflow-stubs/client/__init__.pyi +1 -1
  7. metaflow-stubs/client/core.pyi +4 -4
  8. metaflow-stubs/client/filecache.pyi +2 -2
  9. metaflow-stubs/events.pyi +2 -2
  10. metaflow-stubs/exception.pyi +1 -1
  11. metaflow-stubs/flowspec.pyi +3 -3
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +3 -3
  14. metaflow-stubs/info_file.pyi +1 -1
  15. metaflow-stubs/metadata_provider/__init__.pyi +1 -1
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +1 -1
  17. metaflow-stubs/metadata_provider/metadata.pyi +2 -2
  18. metaflow-stubs/metadata_provider/util.pyi +1 -1
  19. metaflow-stubs/metaflow_config.pyi +1 -1
  20. metaflow-stubs/metaflow_current.pyi +35 -35
  21. metaflow-stubs/metaflow_git.pyi +1 -1
  22. metaflow-stubs/mf_extensions/__init__.pyi +1 -1
  23. metaflow-stubs/mf_extensions/obcheckpoint/__init__.pyi +1 -1
  24. metaflow-stubs/mf_extensions/obcheckpoint/plugins/__init__.pyi +1 -1
  25. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/__init__.pyi +1 -1
  26. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/__init__.pyi +1 -1
  27. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/async_cards.pyi +1 -1
  28. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/deco_injection_mixin.pyi +1 -1
  29. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/extra_components.pyi +3 -3
  30. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/__init__.pyi +1 -1
  31. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/__init__.pyi +1 -1
  32. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/checkpoint_lister.pyi +3 -3
  33. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/lineage_card.pyi +1 -1
  34. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/checkpoint_storage.pyi +4 -4
  35. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/constructors.pyi +1 -1
  36. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/core.pyi +2 -2
  37. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/decorator.pyi +3 -3
  38. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/exceptions.pyi +1 -1
  39. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/final_api.pyi +2 -2
  40. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/lineage.pyi +1 -1
  41. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/__init__.pyi +1 -1
  42. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/context.pyi +3 -3
  43. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/core.pyi +2 -2
  44. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/decorator.pyi +1 -1
  45. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/exceptions.pyi +1 -1
  46. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/task_utils.pyi +3 -3
  47. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/utils.pyi +1 -1
  48. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastructures.pyi +2 -2
  49. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/exceptions.pyi +1 -1
  50. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/__init__.pyi +1 -1
  51. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/decorator.pyi +2 -2
  52. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/__init__.pyi +1 -1
  53. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/core.pyi +1 -1
  54. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/exceptions.pyi +1 -1
  55. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/model_storage.pyi +4 -4
  56. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/__init__.pyi +1 -1
  57. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/flowspec_utils.pyi +1 -1
  58. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/general.pyi +1 -1
  59. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/identity_utils.pyi +2 -2
  60. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/__init__.pyi +1 -1
  61. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/base.pyi +1 -1
  62. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/tar.pyi +2 -2
  63. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/tar_utils.pyi +2 -2
  64. metaflow-stubs/mf_extensions/outerbounds/__init__.pyi +1 -1
  65. metaflow-stubs/mf_extensions/outerbounds/plugins/__init__.pyi +1 -1
  66. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/__init__.pyi +1 -1
  67. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/__init__.pyi +1 -1
  68. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_state_machine.pyi +1 -1
  69. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/__init__.pyi +1 -1
  70. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/spinner/__init__.pyi +1 -1
  71. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/spinner/spinners.pyi +1 -1
  72. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/app_cli.pyi +2 -2
  73. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/app_config.pyi +1 -1
  74. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/capsule.pyi +2 -2
  75. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/cli_to_config.pyi +1 -1
  76. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/click_importer.pyi +1 -1
  77. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/code_package/__init__.pyi +1 -1
  78. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/code_package/code_packager.pyi +1 -1
  79. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/dependencies.pyi +2 -2
  80. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/experimental/__init__.pyi +1 -1
  81. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/perimeters.pyi +1 -1
  82. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/secrets.pyi +1 -1
  83. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/utils.pyi +2 -2
  84. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/validations.pyi +1 -1
  85. metaflow-stubs/mf_extensions/outerbounds/plugins/aws/__init__.pyi +1 -1
  86. metaflow-stubs/mf_extensions/outerbounds/plugins/aws/assume_role_decorator.pyi +1 -1
  87. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/__init__.pyi +1 -1
  88. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/async_cards.pyi +2 -2
  89. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/injector.pyi +1 -1
  90. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/__init__.pyi +1 -1
  91. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/coreweave.pyi +1 -1
  92. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/nebius.pyi +1 -1
  93. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/__init__.pyi +1 -1
  94. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/baker.pyi +4 -4
  95. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/docker_environment.pyi +2 -2
  96. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/fast_bakery.pyi +1 -1
  97. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/__init__.pyi +1 -1
  98. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/pod_killer.pyi +1 -1
  99. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/__init__.pyi +1 -1
  100. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/constants.pyi +1 -1
  101. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/exceptions.pyi +1 -1
  102. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/ollama.pyi +1 -1
  103. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/status_card.pyi +1 -1
  104. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/__init__.pyi +1 -1
  105. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/snowflake.pyi +1 -1
  106. metaflow-stubs/mf_extensions/outerbounds/profilers/__init__.pyi +1 -1
  107. metaflow-stubs/mf_extensions/outerbounds/profilers/gpu.pyi +1 -1
  108. metaflow-stubs/mf_extensions/outerbounds/remote_config.pyi +1 -1
  109. metaflow-stubs/mf_extensions/outerbounds/toplevel/__init__.pyi +1 -1
  110. metaflow-stubs/mf_extensions/outerbounds/toplevel/global_aliases_for_metaflow_package.pyi +1 -1
  111. metaflow-stubs/multicore_utils.pyi +1 -1
  112. metaflow-stubs/ob_internal.pyi +1 -1
  113. metaflow-stubs/parameters.pyi +3 -3
  114. metaflow-stubs/plugins/__init__.pyi +11 -11
  115. metaflow-stubs/plugins/airflow/__init__.pyi +1 -1
  116. metaflow-stubs/plugins/airflow/airflow_utils.pyi +1 -1
  117. metaflow-stubs/plugins/airflow/exception.pyi +1 -1
  118. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +1 -1
  119. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +1 -1
  120. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +1 -1
  121. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +1 -1
  122. metaflow-stubs/plugins/argo/__init__.pyi +1 -1
  123. metaflow-stubs/plugins/argo/argo_client.pyi +1 -1
  124. metaflow-stubs/plugins/argo/argo_events.pyi +1 -1
  125. metaflow-stubs/plugins/argo/argo_workflows.pyi +3 -3
  126. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +1 -1
  127. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +3 -3
  128. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +3 -3
  129. metaflow-stubs/plugins/aws/__init__.pyi +1 -1
  130. metaflow-stubs/plugins/aws/aws_client.pyi +1 -1
  131. metaflow-stubs/plugins/aws/aws_utils.pyi +1 -1
  132. metaflow-stubs/plugins/aws/batch/__init__.pyi +1 -1
  133. metaflow-stubs/plugins/aws/batch/batch.pyi +1 -1
  134. metaflow-stubs/plugins/aws/batch/batch_client.pyi +1 -1
  135. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +1 -1
  136. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +1 -1
  137. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +4 -4
  138. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +1 -1
  139. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +1 -1
  140. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +1 -1
  141. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +1 -1
  142. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +1 -1
  143. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +4 -4
  144. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +2 -2
  145. metaflow-stubs/plugins/azure/__init__.pyi +1 -1
  146. metaflow-stubs/plugins/azure/azure_credential.pyi +1 -1
  147. metaflow-stubs/plugins/azure/azure_exceptions.pyi +1 -1
  148. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +4 -4
  149. metaflow-stubs/plugins/azure/azure_utils.pyi +1 -1
  150. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +1 -1
  151. metaflow-stubs/plugins/azure/includefile_support.pyi +1 -1
  152. metaflow-stubs/plugins/cards/__init__.pyi +1 -1
  153. metaflow-stubs/plugins/cards/card_client.pyi +2 -2
  154. metaflow-stubs/plugins/cards/card_creator.pyi +1 -1
  155. metaflow-stubs/plugins/cards/card_datastore.pyi +1 -1
  156. metaflow-stubs/plugins/cards/card_decorator.pyi +1 -1
  157. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +1 -1
  158. metaflow-stubs/plugins/cards/card_modules/basic.pyi +2 -2
  159. metaflow-stubs/plugins/cards/card_modules/card.pyi +1 -1
  160. metaflow-stubs/plugins/cards/card_modules/components.pyi +4 -4
  161. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +1 -1
  162. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +1 -1
  163. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +1 -1
  164. metaflow-stubs/plugins/cards/card_resolver.pyi +1 -1
  165. metaflow-stubs/plugins/cards/component_serializer.pyi +1 -1
  166. metaflow-stubs/plugins/cards/exception.pyi +1 -1
  167. metaflow-stubs/plugins/catch_decorator.pyi +2 -2
  168. metaflow-stubs/plugins/datatools/__init__.pyi +1 -1
  169. metaflow-stubs/plugins/datatools/local.pyi +1 -1
  170. metaflow-stubs/plugins/datatools/s3/__init__.pyi +1 -1
  171. metaflow-stubs/plugins/datatools/s3/s3.pyi +3 -3
  172. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +1 -1
  173. metaflow-stubs/plugins/datatools/s3/s3util.pyi +1 -1
  174. metaflow-stubs/plugins/debug_logger.pyi +1 -1
  175. metaflow-stubs/plugins/debug_monitor.pyi +1 -1
  176. metaflow-stubs/plugins/environment_decorator.pyi +1 -1
  177. metaflow-stubs/plugins/events_decorator.pyi +1 -1
  178. metaflow-stubs/plugins/frameworks/__init__.pyi +1 -1
  179. metaflow-stubs/plugins/frameworks/pytorch.pyi +1 -1
  180. metaflow-stubs/plugins/gcp/__init__.pyi +1 -1
  181. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +4 -4
  182. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +1 -1
  183. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +1 -1
  184. metaflow-stubs/plugins/gcp/gs_utils.pyi +1 -1
  185. metaflow-stubs/plugins/gcp/includefile_support.pyi +1 -1
  186. metaflow-stubs/plugins/kubernetes/__init__.pyi +1 -1
  187. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +2 -2
  188. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +1 -1
  189. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +1 -1
  190. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +1 -1
  191. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +1 -1
  192. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +1 -1
  193. metaflow-stubs/plugins/ollama/__init__.pyi +2 -2
  194. metaflow-stubs/plugins/parallel_decorator.pyi +1 -1
  195. metaflow-stubs/plugins/perimeters.pyi +1 -1
  196. metaflow-stubs/plugins/project_decorator.pyi +1 -1
  197. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  198. metaflow-stubs/plugins/pypi/conda_decorator.pyi +1 -1
  199. metaflow-stubs/plugins/pypi/conda_environment.pyi +4 -4
  200. metaflow-stubs/plugins/pypi/parsers.pyi +1 -1
  201. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +1 -1
  202. metaflow-stubs/plugins/pypi/pypi_environment.pyi +1 -1
  203. metaflow-stubs/plugins/pypi/utils.pyi +1 -1
  204. metaflow-stubs/plugins/resources_decorator.pyi +1 -1
  205. metaflow-stubs/plugins/retry_decorator.pyi +1 -1
  206. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  207. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +3 -3
  208. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +1 -1
  209. metaflow-stubs/plugins/snowflake/__init__.pyi +1 -1
  210. metaflow-stubs/plugins/storage_executor.pyi +1 -1
  211. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +1 -1
  212. metaflow-stubs/plugins/timeout_decorator.pyi +2 -2
  213. metaflow-stubs/plugins/torchtune/__init__.pyi +1 -1
  214. metaflow-stubs/plugins/uv/__init__.pyi +1 -1
  215. metaflow-stubs/plugins/uv/uv_environment.pyi +2 -2
  216. metaflow-stubs/profilers/__init__.pyi +1 -1
  217. metaflow-stubs/pylint_wrapper.pyi +1 -1
  218. metaflow-stubs/runner/__init__.pyi +1 -1
  219. metaflow-stubs/runner/deployer.pyi +28 -28
  220. metaflow-stubs/runner/deployer_impl.pyi +2 -2
  221. metaflow-stubs/runner/metaflow_runner.pyi +3 -3
  222. metaflow-stubs/runner/nbdeploy.pyi +1 -1
  223. metaflow-stubs/runner/nbrun.pyi +1 -1
  224. metaflow-stubs/runner/subprocess_manager.pyi +1 -1
  225. metaflow-stubs/runner/utils.pyi +2 -2
  226. metaflow-stubs/system/__init__.pyi +1 -1
  227. metaflow-stubs/system/system_logger.pyi +1 -1
  228. metaflow-stubs/system/system_monitor.pyi +1 -1
  229. metaflow-stubs/tagging_util.pyi +1 -1
  230. metaflow-stubs/tuple_util.pyi +1 -1
  231. metaflow-stubs/user_configs/__init__.pyi +1 -1
  232. metaflow-stubs/user_configs/config_decorators.pyi +4 -4
  233. metaflow-stubs/user_configs/config_options.pyi +3 -3
  234. metaflow-stubs/user_configs/config_parameters.pyi +5 -5
  235. {ob_metaflow_stubs-6.0.3.188rc0.dist-info → ob_metaflow_stubs-6.0.3.188rc1.dist-info}/METADATA +1 -1
  236. ob_metaflow_stubs-6.0.3.188rc1.dist-info/RECORD +239 -0
  237. ob_metaflow_stubs-6.0.3.188rc0.dist-info/RECORD +0 -239
  238. {ob_metaflow_stubs-6.0.3.188rc0.dist-info → ob_metaflow_stubs-6.0.3.188rc1.dist-info}/WHEEL +0 -0
  239. {ob_metaflow_stubs-6.0.3.188rc0.dist-info → ob_metaflow_stubs-6.0.3.188rc1.dist-info}/top_level.txt +0 -0
@@ -1,15 +1,15 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
3
  # MF version: 2.15.18.1+obcheckpoint(0.2.4);ob(v1) #
4
- # Generated on 2025-07-03T01:34:48.431701 #
4
+ # Generated on 2025-07-07T22:26:05.548812 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import typing
12
11
  import datetime
12
+ import typing
13
13
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
14
14
  StepFlag = typing.NewType("StepFlag", bool)
15
15
 
@@ -35,18 +35,18 @@ from .user_configs.config_parameters import ConfigValue as ConfigValue
35
35
  from .user_configs.config_parameters import config_expr as config_expr
36
36
  from .user_configs.config_decorators import CustomFlowDecorator as CustomFlowDecorator
37
37
  from .user_configs.config_decorators import CustomStepDecorator as CustomStepDecorator
38
- from . import metaflow_git as metaflow_git
39
- from . import cards as cards
40
- from . import tuple_util as tuple_util
41
38
  from . import events as events
39
+ from . import tuple_util as tuple_util
40
+ from . import cards as cards
41
+ from . import metaflow_git as metaflow_git
42
42
  from . import runner as runner
43
43
  from . import plugins as plugins
44
44
  from .mf_extensions.outerbounds.toplevel.global_aliases_for_metaflow_package import S3 as S3
45
45
  from . import includefile as includefile
46
46
  from .includefile import IncludeFile as IncludeFile
47
- from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
48
- from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
49
47
  from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
48
+ from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
49
+ from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
50
50
  from . import client as client
51
51
  from .client.core import namespace as namespace
52
52
  from .client.core import get_namespace as get_namespace
@@ -155,196 +155,6 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
155
155
  """
156
156
  ...
157
157
 
158
- @typing.overload
159
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
160
- """
161
- Specifies the Conda environment for the step.
162
-
163
- Information in this decorator will augment any
164
- attributes set in the `@conda_base` flow-level decorator. Hence,
165
- you can use `@conda_base` to set packages required by all
166
- steps and use `@conda` to specify step-specific overrides.
167
-
168
-
169
- Parameters
170
- ----------
171
- packages : Dict[str, str], default {}
172
- Packages to use for this step. The key is the name of the package
173
- and the value is the version to use.
174
- libraries : Dict[str, str], default {}
175
- Supported for backward compatibility. When used with packages, packages will take precedence.
176
- python : str, optional, default None
177
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
178
- that the version used will correspond to the version of the Python interpreter used to start the run.
179
- disabled : bool, default False
180
- If set to True, disables @conda.
181
- """
182
- ...
183
-
184
- @typing.overload
185
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
186
- ...
187
-
188
- @typing.overload
189
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
190
- ...
191
-
192
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
193
- """
194
- Specifies the Conda environment for the step.
195
-
196
- Information in this decorator will augment any
197
- attributes set in the `@conda_base` flow-level decorator. Hence,
198
- you can use `@conda_base` to set packages required by all
199
- steps and use `@conda` to specify step-specific overrides.
200
-
201
-
202
- Parameters
203
- ----------
204
- packages : Dict[str, str], default {}
205
- Packages to use for this step. The key is the name of the package
206
- and the value is the version to use.
207
- libraries : Dict[str, str], default {}
208
- Supported for backward compatibility. When used with packages, packages will take precedence.
209
- python : str, optional, default None
210
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
211
- that the version used will correspond to the version of the Python interpreter used to start the run.
212
- disabled : bool, default False
213
- If set to True, disables @conda.
214
- """
215
- ...
216
-
217
- @typing.overload
218
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
219
- """
220
- Decorator prototype for all step decorators. This function gets specialized
221
- and imported for all decorators types by _import_plugin_decorators().
222
- """
223
- ...
224
-
225
- @typing.overload
226
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
227
- ...
228
-
229
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
230
- """
231
- Decorator prototype for all step decorators. This function gets specialized
232
- and imported for all decorators types by _import_plugin_decorators().
233
- """
234
- ...
235
-
236
- @typing.overload
237
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
238
- """
239
- Specifies the number of times the task corresponding
240
- to a step needs to be retried.
241
-
242
- This decorator is useful for handling transient errors, such as networking issues.
243
- If your task contains operations that can't be retried safely, e.g. database updates,
244
- it is advisable to annotate it with `@retry(times=0)`.
245
-
246
- This can be used in conjunction with the `@catch` decorator. The `@catch`
247
- decorator will execute a no-op task after all retries have been exhausted,
248
- ensuring that the flow execution can continue.
249
-
250
-
251
- Parameters
252
- ----------
253
- times : int, default 3
254
- Number of times to retry this task.
255
- minutes_between_retries : int, default 2
256
- Number of minutes between retries.
257
- """
258
- ...
259
-
260
- @typing.overload
261
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
262
- ...
263
-
264
- @typing.overload
265
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
266
- ...
267
-
268
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
269
- """
270
- Specifies the number of times the task corresponding
271
- to a step needs to be retried.
272
-
273
- This decorator is useful for handling transient errors, such as networking issues.
274
- If your task contains operations that can't be retried safely, e.g. database updates,
275
- it is advisable to annotate it with `@retry(times=0)`.
276
-
277
- This can be used in conjunction with the `@catch` decorator. The `@catch`
278
- decorator will execute a no-op task after all retries have been exhausted,
279
- ensuring that the flow execution can continue.
280
-
281
-
282
- Parameters
283
- ----------
284
- times : int, default 3
285
- Number of times to retry this task.
286
- minutes_between_retries : int, default 2
287
- Number of minutes between retries.
288
- """
289
- ...
290
-
291
- def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
292
- """
293
- Specifies that this step should execute on DGX cloud.
294
-
295
-
296
- Parameters
297
- ----------
298
- gpu : int
299
- Number of GPUs to use.
300
- gpu_type : str
301
- Type of Nvidia GPU to use.
302
- """
303
- ...
304
-
305
- def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy: str, force_cache_update: bool, debug: bool, circuit_breaker_config: dict, timeout_config: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
306
- """
307
- This decorator is used to run Ollama APIs as Metaflow task sidecars.
308
-
309
- User code call
310
- --------------
311
- @ollama(
312
- models=[...],
313
- ...
314
- )
315
-
316
- Valid backend options
317
- ---------------------
318
- - 'local': Run as a separate process on the local task machine.
319
- - (TODO) 'managed': Outerbounds hosts and selects compute provider.
320
- - (TODO) 'remote': Spin up separate instance to serve Ollama models.
321
-
322
- Valid model options
323
- -------------------
324
- Any model here https://ollama.com/search, e.g. 'llama3.2', 'llama3.3'
325
-
326
-
327
- Parameters
328
- ----------
329
- models: list[str]
330
- List of Ollama containers running models in sidecars.
331
- backend: str
332
- Determines where and how to run the Ollama process.
333
- force_pull: bool
334
- Whether to run `ollama pull` no matter what, or first check the remote cache in Metaflow datastore for this model key.
335
- cache_update_policy: str
336
- Cache update policy: "auto", "force", or "never".
337
- force_cache_update: bool
338
- Simple override for "force" cache update policy.
339
- debug: bool
340
- Whether to turn on verbose debugging logs.
341
- circuit_breaker_config: dict
342
- Configuration for circuit breaker protection. Keys: failure_threshold, recovery_timeout, reset_timeout.
343
- timeout_config: dict
344
- Configuration for various operation timeouts. Keys: pull, stop, health_check, install, server_startup.
345
- """
346
- ...
347
-
348
158
  @typing.overload
349
159
  def model(*, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
350
160
  """
@@ -474,83 +284,384 @@ def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
474
284
  """
475
285
  ...
476
286
 
477
- def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
287
+ def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
478
288
  """
479
- Decorator that helps cache, version and store models/datasets from huggingface hub.
480
-
481
- > Examples
482
-
483
- **Usage: creating references of models from huggingface that may be loaded in downstream steps**
484
- ```python
485
- @huggingface_hub
486
- @step
487
- def pull_model_from_huggingface(self):
488
- # `current.huggingface_hub.snapshot_download` downloads the model from the Hugging Face Hub
489
- # and saves it in the backend storage based on the model's `repo_id`. If there exists a model
490
- # with the same `repo_id` in the backend storage, it will not download the model again. The return
491
- # value of the function is a reference to the model in the backend storage.
492
- # This reference can be used to load the model in the subsequent steps via `@model(load=["llama_model"])`
289
+ Specifies that this step should execute on DGX cloud.
493
290
 
494
- self.model_id = "mistralai/Mistral-7B-Instruct-v0.1"
495
- self.llama_model = current.huggingface_hub.snapshot_download(
496
- repo_id=self.model_id,
497
- allow_patterns=["*.safetensors", "*.json", "tokenizer.*"],
498
- )
499
- self.next(self.train)
500
- ```
501
291
 
502
- **Usage: loading models directly from huggingface hub or from cache (from metaflow's datastore)**
503
- ```python
504
- @huggingface_hub(load=["mistralai/Mistral-7B-Instruct-v0.1"])
505
- @step
506
- def pull_model_from_huggingface(self):
507
- path_to_model = current.huggingface_hub.loaded["mistralai/Mistral-7B-Instruct-v0.1"]
508
- ```
292
+ Parameters
293
+ ----------
294
+ gpu : int
295
+ Number of GPUs to use.
296
+ gpu_type : str
297
+ Type of Nvidia GPU to use.
298
+ """
299
+ ...
300
+
301
+ def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
302
+ """
303
+ Specifies that this step should execute on DGX cloud.
509
304
 
510
- ```python
511
- @huggingface_hub(load=[("mistralai/Mistral-7B-Instruct-v0.1", "/my-directory"), ("myorg/mistral-lora, "/my-lora-directory")])
512
- @step
513
- def finetune_model(self):
514
- path_to_model = current.huggingface_hub.loaded["mistralai/Mistral-7B-Instruct-v0.1"]
515
- # path_to_model will be /my-directory
516
- ```
517
305
 
518
- ```python
519
- # Takes all the arguments passed to `snapshot_download`
520
- # except for `local_dir`
521
- @huggingface_hub(load=[
522
- {
523
- "repo_id": "mistralai/Mistral-7B-Instruct-v0.1",
524
- },
525
- {
526
- "repo_id": "myorg/mistral-lora",
527
- "repo_type": "model",
528
- },
529
- ])
530
- @step
531
- def finetune_model(self):
532
- path_to_model = current.huggingface_hub.loaded["mistralai/Mistral-7B-Instruct-v0.1"]
533
- # path_to_model will be /my-directory
534
- ```
306
+ Parameters
307
+ ----------
308
+ gpu : int
309
+ Number of GPUs to use.
310
+ gpu_type : str
311
+ Type of Nvidia GPU to use.
312
+ queue_timeout : int
313
+ Time to keep the job in NVCF's queue.
314
+ """
315
+ ...
316
+
317
+ @typing.overload
318
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
319
+ """
320
+ Specifies the PyPI packages for the step.
321
+
322
+ Information in this decorator will augment any
323
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
324
+ you can use `@pypi_base` to set packages required by all
325
+ steps and use `@pypi` to specify step-specific overrides.
535
326
 
536
327
 
537
328
  Parameters
538
329
  ----------
539
- temp_dir_root : str, optional
540
- The root directory that will hold the temporary directory where objects will be downloaded.
330
+ packages : Dict[str, str], default: {}
331
+ Packages to use for this step. The key is the name of the package
332
+ and the value is the version to use.
333
+ python : str, optional, default: None
334
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
335
+ that the version used will correspond to the version of the Python interpreter used to start the run.
336
+ """
337
+ ...
338
+
339
+ @typing.overload
340
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
341
+ ...
342
+
343
+ @typing.overload
344
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
345
+ ...
346
+
347
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
348
+ """
349
+ Specifies the PyPI packages for the step.
541
350
 
542
- load: Union[List[str], List[Tuple[Dict, str]], List[Tuple[str, str]], List[Dict], None]
543
- The list of repos (models/datasets) to load.
351
+ Information in this decorator will augment any
352
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
353
+ you can use `@pypi_base` to set packages required by all
354
+ steps and use `@pypi` to specify step-specific overrides.
544
355
 
545
- Loaded repos can be accessed via `current.huggingface_hub.loaded`. If load is set, then the following happens:
546
356
 
547
- - If repo (model/dataset) is not found in the datastore:
548
- - Downloads the repo from Hugging Face Hub to a temporary directory (or uses specified path) for local access
549
- - Stores it in Metaflow's datastore (s3/gcs/azure etc.) with a unique name based on repo_type/repo_id
550
- - All HF models loaded for a `@step` will be cached separately under flow/step/namespace.
357
+ Parameters
358
+ ----------
359
+ packages : Dict[str, str], default: {}
360
+ Packages to use for this step. The key is the name of the package
361
+ and the value is the version to use.
362
+ python : str, optional, default: None
363
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
364
+ that the version used will correspond to the version of the Python interpreter used to start the run.
365
+ """
366
+ ...
367
+
368
+ @typing.overload
369
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
370
+ """
371
+ Specifies that the step will success under all circumstances.
551
372
 
552
- - If repo is found in the datastore:
553
- - Loads it directly from datastore to local path (can be temporary directory or specified path)
373
+ The decorator will create an optional artifact, specified by `var`, which
374
+ contains the exception raised. You can use it to detect the presence
375
+ of errors, indicating that all happy-path artifacts produced by the step
376
+ are missing.
377
+
378
+
379
+ Parameters
380
+ ----------
381
+ var : str, optional, default None
382
+ Name of the artifact in which to store the caught exception.
383
+ If not specified, the exception is not stored.
384
+ print_exception : bool, default True
385
+ Determines whether or not the exception is printed to
386
+ stdout when caught.
387
+ """
388
+ ...
389
+
390
+ @typing.overload
391
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
392
+ ...
393
+
394
+ @typing.overload
395
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
396
+ ...
397
+
398
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
399
+ """
400
+ Specifies that the step will success under all circumstances.
401
+
402
+ The decorator will create an optional artifact, specified by `var`, which
403
+ contains the exception raised. You can use it to detect the presence
404
+ of errors, indicating that all happy-path artifacts produced by the step
405
+ are missing.
406
+
407
+
408
+ Parameters
409
+ ----------
410
+ var : str, optional, default None
411
+ Name of the artifact in which to store the caught exception.
412
+ If not specified, the exception is not stored.
413
+ print_exception : bool, default True
414
+ Determines whether or not the exception is printed to
415
+ stdout when caught.
416
+ """
417
+ ...
418
+
419
+ @typing.overload
420
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
421
+ """
422
+ Specifies the Conda environment for the step.
423
+
424
+ Information in this decorator will augment any
425
+ attributes set in the `@conda_base` flow-level decorator. Hence,
426
+ you can use `@conda_base` to set packages required by all
427
+ steps and use `@conda` to specify step-specific overrides.
428
+
429
+
430
+ Parameters
431
+ ----------
432
+ packages : Dict[str, str], default {}
433
+ Packages to use for this step. The key is the name of the package
434
+ and the value is the version to use.
435
+ libraries : Dict[str, str], default {}
436
+ Supported for backward compatibility. When used with packages, packages will take precedence.
437
+ python : str, optional, default None
438
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
439
+ that the version used will correspond to the version of the Python interpreter used to start the run.
440
+ disabled : bool, default False
441
+ If set to True, disables @conda.
442
+ """
443
+ ...
444
+
445
+ @typing.overload
446
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
447
+ ...
448
+
449
+ @typing.overload
450
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
451
+ ...
452
+
453
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
454
+ """
455
+ Specifies the Conda environment for the step.
456
+
457
+ Information in this decorator will augment any
458
+ attributes set in the `@conda_base` flow-level decorator. Hence,
459
+ you can use `@conda_base` to set packages required by all
460
+ steps and use `@conda` to specify step-specific overrides.
461
+
462
+
463
+ Parameters
464
+ ----------
465
+ packages : Dict[str, str], default {}
466
+ Packages to use for this step. The key is the name of the package
467
+ and the value is the version to use.
468
+ libraries : Dict[str, str], default {}
469
+ Supported for backward compatibility. When used with packages, packages will take precedence.
470
+ python : str, optional, default None
471
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
472
+ that the version used will correspond to the version of the Python interpreter used to start the run.
473
+ disabled : bool, default False
474
+ If set to True, disables @conda.
475
+ """
476
+ ...
477
+
478
+ @typing.overload
479
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
480
+ """
481
+ Specifies the number of times the task corresponding
482
+ to a step needs to be retried.
483
+
484
+ This decorator is useful for handling transient errors, such as networking issues.
485
+ If your task contains operations that can't be retried safely, e.g. database updates,
486
+ it is advisable to annotate it with `@retry(times=0)`.
487
+
488
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
489
+ decorator will execute a no-op task after all retries have been exhausted,
490
+ ensuring that the flow execution can continue.
491
+
492
+
493
+ Parameters
494
+ ----------
495
+ times : int, default 3
496
+ Number of times to retry this task.
497
+ minutes_between_retries : int, default 2
498
+ Number of minutes between retries.
499
+ """
500
+ ...
501
+
502
+ @typing.overload
503
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
504
+ ...
505
+
506
+ @typing.overload
507
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
508
+ ...
509
+
510
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
511
+ """
512
+ Specifies the number of times the task corresponding
513
+ to a step needs to be retried.
514
+
515
+ This decorator is useful for handling transient errors, such as networking issues.
516
+ If your task contains operations that can't be retried safely, e.g. database updates,
517
+ it is advisable to annotate it with `@retry(times=0)`.
518
+
519
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
520
+ decorator will execute a no-op task after all retries have been exhausted,
521
+ ensuring that the flow execution can continue.
522
+
523
+
524
+ Parameters
525
+ ----------
526
+ times : int, default 3
527
+ Number of times to retry this task.
528
+ minutes_between_retries : int, default 2
529
+ Number of minutes between retries.
530
+ """
531
+ ...
532
+
533
+ def vllm(*, model: str, backend: str, openai_api_server: bool, debug: bool, card_refresh_interval: int, max_retries: int, retry_alert_frequency: int, engine_args: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
534
+ """
535
+ This decorator is used to run vllm APIs as Metaflow task sidecars.
536
+
537
+ User code call
538
+ --------------
539
+ @vllm(
540
+ model="...",
541
+ ...
542
+ )
543
+
544
+ Valid backend options
545
+ ---------------------
546
+ - 'local': Run as a separate process on the local task machine.
547
+
548
+ Valid model options
549
+ -------------------
550
+ Any HuggingFace model identifier, e.g. 'meta-llama/Llama-3.2-1B'
551
+
552
+ NOTE: vLLM's OpenAI-compatible server serves ONE model per server instance.
553
+ If you need multiple models, you must create multiple @vllm decorators.
554
+
555
+
556
+ Parameters
557
+ ----------
558
+ model: str
559
+ HuggingFace model identifier to be served by vLLM.
560
+ backend: str
561
+ Determines where and how to run the vLLM process.
562
+ openai_api_server: bool
563
+ Whether to use OpenAI-compatible API server mode (subprocess) instead of native engine.
564
+ Default is False (uses native engine).
565
+ Set to True for backward compatibility with existing code.
566
+ debug: bool
567
+ Whether to turn on verbose debugging logs.
568
+ card_refresh_interval: int
569
+ Interval in seconds for refreshing the vLLM status card.
570
+ Only used when openai_api_server=True.
571
+ max_retries: int
572
+ Maximum number of retries checking for vLLM server startup.
573
+ Only used when openai_api_server=True.
574
+ retry_alert_frequency: int
575
+ Frequency of alert logs for vLLM server startup retries.
576
+ Only used when openai_api_server=True.
577
+ engine_args : dict
578
+ Additional keyword arguments to pass to the vLLM engine.
579
+ For example, `tensor_parallel_size=2`.
580
+ """
581
+ ...
582
+
583
+ @typing.overload
584
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
585
+ """
586
+ Decorator prototype for all step decorators. This function gets specialized
587
+ and imported for all decorators types by _import_plugin_decorators().
588
+ """
589
+ ...
590
+
591
+ @typing.overload
592
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
593
+ ...
594
+
595
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
596
+ """
597
+ Decorator prototype for all step decorators. This function gets specialized
598
+ and imported for all decorators types by _import_plugin_decorators().
599
+ """
600
+ ...
601
+
602
+ @typing.overload
603
+ def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
604
+ """
605
+ Creates a human-readable report, a Metaflow Card, after this step completes.
606
+
607
+ Note that you may add multiple `@card` decorators in a step with different parameters.
608
+
609
+
610
+ Parameters
611
+ ----------
612
+ type : str, default 'default'
613
+ Card type.
614
+ id : str, optional, default None
615
+ If multiple cards are present, use this id to identify this card.
616
+ options : Dict[str, Any], default {}
617
+ Options passed to the card. The contents depend on the card type.
618
+ timeout : int, default 45
619
+ Interrupt reporting if it takes more than this many seconds.
620
+ """
621
+ ...
622
+
623
+ @typing.overload
624
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
625
+ ...
626
+
627
+ @typing.overload
628
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
629
+ ...
630
+
631
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
632
+ """
633
+ Creates a human-readable report, a Metaflow Card, after this step completes.
634
+
635
+ Note that you may add multiple `@card` decorators in a step with different parameters.
636
+
637
+
638
+ Parameters
639
+ ----------
640
+ type : str, default 'default'
641
+ Card type.
642
+ id : str, optional, default None
643
+ If multiple cards are present, use this id to identify this card.
644
+ options : Dict[str, Any], default {}
645
+ Options passed to the card. The contents depend on the card type.
646
+ timeout : int, default 45
647
+ Interrupt reporting if it takes more than this many seconds.
648
+ """
649
+ ...
650
+
651
+ @typing.overload
652
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
653
+ """
654
+ Internal decorator to support Fast bakery
655
+ """
656
+ ...
657
+
658
+ @typing.overload
659
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
660
+ ...
661
+
662
+ def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
663
+ """
664
+ Internal decorator to support Fast bakery
554
665
  """
555
666
  ...
556
667
 
@@ -668,117 +779,36 @@ def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None
668
779
  ```python
669
780
  @retry(times=3)
670
781
  @checkpoint
671
- @step
672
- def train(self):
673
- # Assume that the task has restarted and the previous attempt of the task
674
- # saved a checkpoint
675
- checkpoint_path = None
676
- if current.checkpoint.is_loaded: # Check if a checkpoint is loaded
677
- print("Loaded checkpoint from the previous attempt")
678
- checkpoint_path = current.checkpoint.directory
679
-
680
- model = create_model(self.parameters, checkpoint_path = checkpoint_path)
681
- for i in range(self.epochs):
682
- ...
683
- ```
684
-
685
-
686
- Parameters
687
- ----------
688
- load_policy : str, default: "fresh"
689
- The policy for loading the checkpoint. The following policies are supported:
690
- - "eager": Loads the the latest available checkpoint within the namespace.
691
- With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
692
- will be loaded at the start of the task.
693
- - "none": Do not load any checkpoint
694
- - "fresh": Loads the lastest checkpoint created within the running Task.
695
- This mode helps loading checkpoints across various retry attempts of the same task.
696
- With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
697
- created within the task will be loaded when the task is retries execution on failure.
698
-
699
- temp_dir_root : str, default: None
700
- The root directory under which `current.checkpoint.directory` will be created.
701
- """
702
- ...
703
-
704
- @typing.overload
705
- def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
706
- """
707
- Creates a human-readable report, a Metaflow Card, after this step completes.
708
-
709
- Note that you may add multiple `@card` decorators in a step with different parameters.
710
-
711
-
712
- Parameters
713
- ----------
714
- type : str, default 'default'
715
- Card type.
716
- id : str, optional, default None
717
- If multiple cards are present, use this id to identify this card.
718
- options : Dict[str, Any], default {}
719
- Options passed to the card. The contents depend on the card type.
720
- timeout : int, default 45
721
- Interrupt reporting if it takes more than this many seconds.
722
- """
723
- ...
724
-
725
- @typing.overload
726
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
727
- ...
728
-
729
- @typing.overload
730
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
731
- ...
732
-
733
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
734
- """
735
- Creates a human-readable report, a Metaflow Card, after this step completes.
736
-
737
- Note that you may add multiple `@card` decorators in a step with different parameters.
738
-
739
-
740
- Parameters
741
- ----------
742
- type : str, default 'default'
743
- Card type.
744
- id : str, optional, default None
745
- If multiple cards are present, use this id to identify this card.
746
- options : Dict[str, Any], default {}
747
- Options passed to the card. The contents depend on the card type.
748
- timeout : int, default 45
749
- Interrupt reporting if it takes more than this many seconds.
750
- """
751
- ...
752
-
753
- @typing.overload
754
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
755
- """
756
- Internal decorator to support Fast bakery
757
- """
758
- ...
759
-
760
- @typing.overload
761
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
762
- ...
763
-
764
- def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
765
- """
766
- Internal decorator to support Fast bakery
767
- """
768
- ...
769
-
770
- def app_deploy(*, app_port: int, app_name: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
771
- """
772
- Specifies that this step is used to deploy an instance of the app.
773
- Requires that self.app_name, self.app_port, self.entrypoint and self.deployDir is set.
782
+ @step
783
+ def train(self):
784
+ # Assume that the task has restarted and the previous attempt of the task
785
+ # saved a checkpoint
786
+ checkpoint_path = None
787
+ if current.checkpoint.is_loaded: # Check if a checkpoint is loaded
788
+ print("Loaded checkpoint from the previous attempt")
789
+ checkpoint_path = current.checkpoint.directory
790
+
791
+ model = create_model(self.parameters, checkpoint_path = checkpoint_path)
792
+ for i in range(self.epochs):
793
+ ...
794
+ ```
774
795
 
775
796
 
776
797
  Parameters
777
798
  ----------
778
- app_port : int
779
- Number of GPUs to use.
780
- app_name : str
781
- Name of the app to deploy.
799
+ load_policy : str, default: "fresh"
800
+ The policy for loading the checkpoint. The following policies are supported:
801
+ - "eager": Loads the the latest available checkpoint within the namespace.
802
+ With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
803
+ will be loaded at the start of the task.
804
+ - "none": Do not load any checkpoint
805
+ - "fresh": Loads the lastest checkpoint created within the running Task.
806
+ This mode helps loading checkpoints across various retry attempts of the same task.
807
+ With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
808
+ created within the task will be loaded when the task is retries execution on failure.
809
+
810
+ temp_dir_root : str, default: None
811
+ The root directory under which `current.checkpoint.directory` will be created.
782
812
  """
783
813
  ...
784
814
 
@@ -871,39 +901,6 @@ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: ty
871
901
  """
872
902
  ...
873
903
 
874
- @typing.overload
875
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
876
- """
877
- Specifies environment variables to be set prior to the execution of a step.
878
-
879
-
880
- Parameters
881
- ----------
882
- vars : Dict[str, str], default {}
883
- Dictionary of environment variables to set.
884
- """
885
- ...
886
-
887
- @typing.overload
888
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
889
- ...
890
-
891
- @typing.overload
892
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
893
- ...
894
-
895
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
896
- """
897
- Specifies environment variables to be set prior to the execution of a step.
898
-
899
-
900
- Parameters
901
- ----------
902
- vars : Dict[str, str], default {}
903
- Dictionary of environment variables to set.
904
- """
905
- ...
906
-
907
904
  @typing.overload
908
905
  def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
909
906
  """
@@ -939,6 +936,21 @@ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
939
936
  """
940
937
  ...
941
938
 
939
+ def app_deploy(*, app_port: int, app_name: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
940
+ """
941
+ Specifies that this step is used to deploy an instance of the app.
942
+ Requires that self.app_name, self.app_port, self.entrypoint and self.deployDir is set.
943
+
944
+
945
+ Parameters
946
+ ----------
947
+ app_port : int
948
+ Number of GPUs to use.
949
+ app_name : str
950
+ Name of the app to deploy.
951
+ """
952
+ ...
953
+
942
954
  @typing.overload
943
955
  def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
944
956
  """
@@ -1018,230 +1030,363 @@ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None]
1018
1030
  """
1019
1031
  ...
1020
1032
 
1021
- def vllm(*, model: str, backend: str, openai_api_server: bool, debug: bool, card_refresh_interval: int, max_retries: int, retry_alert_frequency: int, engine_args: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1033
+ def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy: str, force_cache_update: bool, debug: bool, circuit_breaker_config: dict, timeout_config: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1022
1034
  """
1023
- This decorator is used to run vllm APIs as Metaflow task sidecars.
1035
+ This decorator is used to run Ollama APIs as Metaflow task sidecars.
1024
1036
 
1025
1037
  User code call
1026
1038
  --------------
1027
- @vllm(
1028
- model="...",
1039
+ @ollama(
1040
+ models=[...],
1029
1041
  ...
1030
1042
  )
1031
1043
 
1032
1044
  Valid backend options
1033
1045
  ---------------------
1034
1046
  - 'local': Run as a separate process on the local task machine.
1047
+ - (TODO) 'managed': Outerbounds hosts and selects compute provider.
1048
+ - (TODO) 'remote': Spin up separate instance to serve Ollama models.
1035
1049
 
1036
1050
  Valid model options
1037
1051
  -------------------
1038
- Any HuggingFace model identifier, e.g. 'meta-llama/Llama-3.2-1B'
1039
-
1040
- NOTE: vLLM's OpenAI-compatible server serves ONE model per server instance.
1041
- If you need multiple models, you must create multiple @vllm decorators.
1052
+ Any model here https://ollama.com/search, e.g. 'llama3.2', 'llama3.3'
1042
1053
 
1043
1054
 
1044
1055
  Parameters
1045
1056
  ----------
1046
- model: str
1047
- HuggingFace model identifier to be served by vLLM.
1057
+ models: list[str]
1058
+ List of Ollama containers running models in sidecars.
1048
1059
  backend: str
1049
- Determines where and how to run the vLLM process.
1050
- openai_api_server: bool
1051
- Whether to use OpenAI-compatible API server mode (subprocess) instead of native engine.
1052
- Default is False (uses native engine).
1053
- Set to True for backward compatibility with existing code.
1060
+ Determines where and how to run the Ollama process.
1061
+ force_pull: bool
1062
+ Whether to run `ollama pull` no matter what, or first check the remote cache in Metaflow datastore for this model key.
1063
+ cache_update_policy: str
1064
+ Cache update policy: "auto", "force", or "never".
1065
+ force_cache_update: bool
1066
+ Simple override for "force" cache update policy.
1054
1067
  debug: bool
1055
1068
  Whether to turn on verbose debugging logs.
1056
- card_refresh_interval: int
1057
- Interval in seconds for refreshing the vLLM status card.
1058
- Only used when openai_api_server=True.
1059
- max_retries: int
1060
- Maximum number of retries checking for vLLM server startup.
1061
- Only used when openai_api_server=True.
1062
- retry_alert_frequency: int
1063
- Frequency of alert logs for vLLM server startup retries.
1064
- Only used when openai_api_server=True.
1065
- engine_args : dict
1066
- Additional keyword arguments to pass to the vLLM engine.
1067
- For example, `tensor_parallel_size=2`.
1069
+ circuit_breaker_config: dict
1070
+ Configuration for circuit breaker protection. Keys: failure_threshold, recovery_timeout, reset_timeout.
1071
+ timeout_config: dict
1072
+ Configuration for various operation timeouts. Keys: pull, stop, health_check, install, server_startup.
1073
+ """
1074
+ ...
1075
+
1076
+ @typing.overload
1077
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1078
+ """
1079
+ Specifies a timeout for your step.
1080
+
1081
+ This decorator is useful if this step may hang indefinitely.
1082
+
1083
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1084
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
1085
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
1086
+
1087
+ Note that all the values specified in parameters are added together so if you specify
1088
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1089
+
1090
+
1091
+ Parameters
1092
+ ----------
1093
+ seconds : int, default 0
1094
+ Number of seconds to wait prior to timing out.
1095
+ minutes : int, default 0
1096
+ Number of minutes to wait prior to timing out.
1097
+ hours : int, default 0
1098
+ Number of hours to wait prior to timing out.
1099
+ """
1100
+ ...
1101
+
1102
+ @typing.overload
1103
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1104
+ ...
1105
+
1106
+ @typing.overload
1107
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1108
+ ...
1109
+
1110
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
1111
+ """
1112
+ Specifies a timeout for your step.
1113
+
1114
+ This decorator is useful if this step may hang indefinitely.
1115
+
1116
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1117
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
1118
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
1119
+
1120
+ Note that all the values specified in parameters are added together so if you specify
1121
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1122
+
1123
+
1124
+ Parameters
1125
+ ----------
1126
+ seconds : int, default 0
1127
+ Number of seconds to wait prior to timing out.
1128
+ minutes : int, default 0
1129
+ Number of minutes to wait prior to timing out.
1130
+ hours : int, default 0
1131
+ Number of hours to wait prior to timing out.
1132
+ """
1133
+ ...
1134
+
1135
+ def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1136
+ """
1137
+ Decorator that helps cache, version and store models/datasets from huggingface hub.
1138
+
1139
+ > Examples
1140
+
1141
+ **Usage: creating references of models from huggingface that may be loaded in downstream steps**
1142
+ ```python
1143
+ @huggingface_hub
1144
+ @step
1145
+ def pull_model_from_huggingface(self):
1146
+ # `current.huggingface_hub.snapshot_download` downloads the model from the Hugging Face Hub
1147
+ # and saves it in the backend storage based on the model's `repo_id`. If there exists a model
1148
+ # with the same `repo_id` in the backend storage, it will not download the model again. The return
1149
+ # value of the function is a reference to the model in the backend storage.
1150
+ # This reference can be used to load the model in the subsequent steps via `@model(load=["llama_model"])`
1151
+
1152
+ self.model_id = "mistralai/Mistral-7B-Instruct-v0.1"
1153
+ self.llama_model = current.huggingface_hub.snapshot_download(
1154
+ repo_id=self.model_id,
1155
+ allow_patterns=["*.safetensors", "*.json", "tokenizer.*"],
1156
+ )
1157
+ self.next(self.train)
1158
+ ```
1159
+
1160
+ **Usage: loading models directly from huggingface hub or from cache (from metaflow's datastore)**
1161
+ ```python
1162
+ @huggingface_hub(load=["mistralai/Mistral-7B-Instruct-v0.1"])
1163
+ @step
1164
+ def pull_model_from_huggingface(self):
1165
+ path_to_model = current.huggingface_hub.loaded["mistralai/Mistral-7B-Instruct-v0.1"]
1166
+ ```
1167
+
1168
+ ```python
1169
+ @huggingface_hub(load=[("mistralai/Mistral-7B-Instruct-v0.1", "/my-directory"), ("myorg/mistral-lora, "/my-lora-directory")])
1170
+ @step
1171
+ def finetune_model(self):
1172
+ path_to_model = current.huggingface_hub.loaded["mistralai/Mistral-7B-Instruct-v0.1"]
1173
+ # path_to_model will be /my-directory
1174
+ ```
1175
+
1176
+ ```python
1177
+ # Takes all the arguments passed to `snapshot_download`
1178
+ # except for `local_dir`
1179
+ @huggingface_hub(load=[
1180
+ {
1181
+ "repo_id": "mistralai/Mistral-7B-Instruct-v0.1",
1182
+ },
1183
+ {
1184
+ "repo_id": "myorg/mistral-lora",
1185
+ "repo_type": "model",
1186
+ },
1187
+ ])
1188
+ @step
1189
+ def finetune_model(self):
1190
+ path_to_model = current.huggingface_hub.loaded["mistralai/Mistral-7B-Instruct-v0.1"]
1191
+ # path_to_model will be /my-directory
1192
+ ```
1193
+
1194
+
1195
+ Parameters
1196
+ ----------
1197
+ temp_dir_root : str, optional
1198
+ The root directory that will hold the temporary directory where objects will be downloaded.
1199
+
1200
+ load: Union[List[str], List[Tuple[Dict, str]], List[Tuple[str, str]], List[Dict], None]
1201
+ The list of repos (models/datasets) to load.
1202
+
1203
+ Loaded repos can be accessed via `current.huggingface_hub.loaded`. If load is set, then the following happens:
1204
+
1205
+ - If repo (model/dataset) is not found in the datastore:
1206
+ - Downloads the repo from Hugging Face Hub to a temporary directory (or uses specified path) for local access
1207
+ - Stores it in Metaflow's datastore (s3/gcs/azure etc.) with a unique name based on repo_type/repo_id
1208
+ - All HF models loaded for a `@step` will be cached separately under flow/step/namespace.
1209
+
1210
+ - If repo is found in the datastore:
1211
+ - Loads it directly from datastore to local path (can be temporary directory or specified path)
1068
1212
  """
1069
1213
  ...
1070
1214
 
1071
1215
  @typing.overload
1072
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1216
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1073
1217
  """
1074
- Specifies the PyPI packages for the step.
1075
-
1076
- Information in this decorator will augment any
1077
- attributes set in the `@pyi_base` flow-level decorator. Hence,
1078
- you can use `@pypi_base` to set packages required by all
1079
- steps and use `@pypi` to specify step-specific overrides.
1218
+ Specifies environment variables to be set prior to the execution of a step.
1080
1219
 
1081
1220
 
1082
1221
  Parameters
1083
1222
  ----------
1084
- packages : Dict[str, str], default: {}
1085
- Packages to use for this step. The key is the name of the package
1086
- and the value is the version to use.
1087
- python : str, optional, default: None
1088
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1089
- that the version used will correspond to the version of the Python interpreter used to start the run.
1223
+ vars : Dict[str, str], default {}
1224
+ Dictionary of environment variables to set.
1090
1225
  """
1091
1226
  ...
1092
1227
 
1093
1228
  @typing.overload
1094
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1229
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1095
1230
  ...
1096
1231
 
1097
1232
  @typing.overload
1098
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1233
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1099
1234
  ...
1100
1235
 
1101
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1236
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
1102
1237
  """
1103
- Specifies the PyPI packages for the step.
1104
-
1105
- Information in this decorator will augment any
1106
- attributes set in the `@pyi_base` flow-level decorator. Hence,
1107
- you can use `@pypi_base` to set packages required by all
1108
- steps and use `@pypi` to specify step-specific overrides.
1238
+ Specifies environment variables to be set prior to the execution of a step.
1109
1239
 
1110
1240
 
1111
1241
  Parameters
1112
1242
  ----------
1113
- packages : Dict[str, str], default: {}
1114
- Packages to use for this step. The key is the name of the package
1115
- and the value is the version to use.
1116
- python : str, optional, default: None
1117
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1118
- that the version used will correspond to the version of the Python interpreter used to start the run.
1243
+ vars : Dict[str, str], default {}
1244
+ Dictionary of environment variables to set.
1119
1245
  """
1120
1246
  ...
1121
1247
 
1122
1248
  @typing.overload
1123
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1249
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1124
1250
  """
1125
- Specifies that the step will success under all circumstances.
1126
-
1127
- The decorator will create an optional artifact, specified by `var`, which
1128
- contains the exception raised. You can use it to detect the presence
1129
- of errors, indicating that all happy-path artifacts produced by the step
1130
- are missing.
1251
+ Specifies the times when the flow should be run when running on a
1252
+ production scheduler.
1131
1253
 
1132
1254
 
1133
1255
  Parameters
1134
1256
  ----------
1135
- var : str, optional, default None
1136
- Name of the artifact in which to store the caught exception.
1137
- If not specified, the exception is not stored.
1138
- print_exception : bool, default True
1139
- Determines whether or not the exception is printed to
1140
- stdout when caught.
1257
+ hourly : bool, default False
1258
+ Run the workflow hourly.
1259
+ daily : bool, default True
1260
+ Run the workflow daily.
1261
+ weekly : bool, default False
1262
+ Run the workflow weekly.
1263
+ cron : str, optional, default None
1264
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1265
+ specified by this expression.
1266
+ timezone : str, optional, default None
1267
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1268
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1141
1269
  """
1142
1270
  ...
1143
1271
 
1144
1272
  @typing.overload
1145
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1146
- ...
1147
-
1148
- @typing.overload
1149
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1273
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1150
1274
  ...
1151
1275
 
1152
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
1276
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1153
1277
  """
1154
- Specifies that the step will success under all circumstances.
1155
-
1156
- The decorator will create an optional artifact, specified by `var`, which
1157
- contains the exception raised. You can use it to detect the presence
1158
- of errors, indicating that all happy-path artifacts produced by the step
1159
- are missing.
1278
+ Specifies the times when the flow should be run when running on a
1279
+ production scheduler.
1160
1280
 
1161
1281
 
1162
1282
  Parameters
1163
1283
  ----------
1164
- var : str, optional, default None
1165
- Name of the artifact in which to store the caught exception.
1166
- If not specified, the exception is not stored.
1167
- print_exception : bool, default True
1168
- Determines whether or not the exception is printed to
1169
- stdout when caught.
1284
+ hourly : bool, default False
1285
+ Run the workflow hourly.
1286
+ daily : bool, default True
1287
+ Run the workflow daily.
1288
+ weekly : bool, default False
1289
+ Run the workflow weekly.
1290
+ cron : str, optional, default None
1291
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1292
+ specified by this expression.
1293
+ timezone : str, optional, default None
1294
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1295
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1170
1296
  """
1171
1297
  ...
1172
1298
 
1173
- @typing.overload
1174
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1299
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1175
1300
  """
1176
- Specifies a timeout for your step.
1177
-
1178
- This decorator is useful if this step may hang indefinitely.
1179
-
1180
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1181
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
1182
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
1183
-
1184
- Note that all the values specified in parameters are added together so if you specify
1185
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1301
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1302
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1303
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1304
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
1305
+ starts only after all sensors finish.
1186
1306
 
1187
1307
 
1188
1308
  Parameters
1189
1309
  ----------
1190
- seconds : int, default 0
1191
- Number of seconds to wait prior to timing out.
1192
- minutes : int, default 0
1193
- Number of minutes to wait prior to timing out.
1194
- hours : int, default 0
1195
- Number of hours to wait prior to timing out.
1310
+ timeout : int
1311
+ Time, in seconds before the task times out and fails. (Default: 3600)
1312
+ poke_interval : int
1313
+ Time in seconds that the job should wait in between each try. (Default: 60)
1314
+ mode : str
1315
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1316
+ exponential_backoff : bool
1317
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1318
+ pool : str
1319
+ the slot pool this task should run in,
1320
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1321
+ soft_fail : bool
1322
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1323
+ name : str
1324
+ Name of the sensor on Airflow
1325
+ description : str
1326
+ Description of sensor in the Airflow UI
1327
+ bucket_key : Union[str, List[str]]
1328
+ The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1329
+ When it's specified as a full s3:// url, please leave `bucket_name` as None
1330
+ bucket_name : str
1331
+ Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1332
+ When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1333
+ wildcard_match : bool
1334
+ whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1335
+ aws_conn_id : str
1336
+ a reference to the s3 connection on Airflow. (Default: None)
1337
+ verify : bool
1338
+ Whether or not to verify SSL certificates for S3 connection. (Default: None)
1196
1339
  """
1197
1340
  ...
1198
1341
 
1199
1342
  @typing.overload
1200
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1201
- ...
1202
-
1203
- @typing.overload
1204
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1205
- ...
1206
-
1207
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
1343
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1208
1344
  """
1209
- Specifies a timeout for your step.
1210
-
1211
- This decorator is useful if this step may hang indefinitely.
1212
-
1213
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1214
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
1215
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
1345
+ Specifies the Conda environment for all steps of the flow.
1216
1346
 
1217
- Note that all the values specified in parameters are added together so if you specify
1218
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1347
+ Use `@conda_base` to set common libraries required by all
1348
+ steps and use `@conda` to specify step-specific additions.
1219
1349
 
1220
1350
 
1221
1351
  Parameters
1222
1352
  ----------
1223
- seconds : int, default 0
1224
- Number of seconds to wait prior to timing out.
1225
- minutes : int, default 0
1226
- Number of minutes to wait prior to timing out.
1227
- hours : int, default 0
1228
- Number of hours to wait prior to timing out.
1353
+ packages : Dict[str, str], default {}
1354
+ Packages to use for this flow. The key is the name of the package
1355
+ and the value is the version to use.
1356
+ libraries : Dict[str, str], default {}
1357
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1358
+ python : str, optional, default None
1359
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1360
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1361
+ disabled : bool, default False
1362
+ If set to True, disables Conda.
1229
1363
  """
1230
1364
  ...
1231
1365
 
1232
- def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1366
+ @typing.overload
1367
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1368
+ ...
1369
+
1370
+ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1233
1371
  """
1234
- Specifies that this step should execute on DGX cloud.
1372
+ Specifies the Conda environment for all steps of the flow.
1373
+
1374
+ Use `@conda_base` to set common libraries required by all
1375
+ steps and use `@conda` to specify step-specific additions.
1235
1376
 
1236
1377
 
1237
1378
  Parameters
1238
1379
  ----------
1239
- gpu : int
1240
- Number of GPUs to use.
1241
- gpu_type : str
1242
- Type of Nvidia GPU to use.
1243
- queue_timeout : int
1244
- Time to keep the job in NVCF's queue.
1380
+ packages : Dict[str, str], default {}
1381
+ Packages to use for this flow. The key is the name of the package
1382
+ and the value is the version to use.
1383
+ libraries : Dict[str, str], default {}
1384
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1385
+ python : str, optional, default None
1386
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1387
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1388
+ disabled : bool, default False
1389
+ If set to True, disables Conda.
1245
1390
  """
1246
1391
  ...
1247
1392
 
@@ -1337,12 +1482,53 @@ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *
1337
1482
 
1338
1483
  Parameters
1339
1484
  ----------
1340
- flow : Union[str, Dict[str, str]], optional, default None
1341
- Upstream flow dependency for this flow.
1342
- flows : List[Union[str, Dict[str, str]]], default []
1343
- Upstream flow dependencies for this flow.
1344
- options : Dict[str, Any], default {}
1345
- Backend-specific configuration for tuning eventing behavior.
1485
+ flow : Union[str, Dict[str, str]], optional, default None
1486
+ Upstream flow dependency for this flow.
1487
+ flows : List[Union[str, Dict[str, str]]], default []
1488
+ Upstream flow dependencies for this flow.
1489
+ options : Dict[str, Any], default {}
1490
+ Backend-specific configuration for tuning eventing behavior.
1491
+ """
1492
+ ...
1493
+
1494
+ @typing.overload
1495
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1496
+ """
1497
+ Specifies the PyPI packages for all steps of the flow.
1498
+
1499
+ Use `@pypi_base` to set common packages required by all
1500
+ steps and use `@pypi` to specify step-specific overrides.
1501
+
1502
+ Parameters
1503
+ ----------
1504
+ packages : Dict[str, str], default: {}
1505
+ Packages to use for this flow. The key is the name of the package
1506
+ and the value is the version to use.
1507
+ python : str, optional, default: None
1508
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1509
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1510
+ """
1511
+ ...
1512
+
1513
+ @typing.overload
1514
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1515
+ ...
1516
+
1517
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1518
+ """
1519
+ Specifies the PyPI packages for all steps of the flow.
1520
+
1521
+ Use `@pypi_base` to set common packages required by all
1522
+ steps and use `@pypi` to specify step-specific overrides.
1523
+
1524
+ Parameters
1525
+ ----------
1526
+ packages : Dict[str, str], default: {}
1527
+ Packages to use for this flow. The key is the name of the package
1528
+ and the value is the version to use.
1529
+ python : str, optional, default: None
1530
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1531
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1346
1532
  """
1347
1533
  ...
1348
1534
 
@@ -1439,47 +1625,6 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
1439
1625
  """
1440
1626
  ...
1441
1627
 
1442
- @typing.overload
1443
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1444
- """
1445
- Specifies the PyPI packages for all steps of the flow.
1446
-
1447
- Use `@pypi_base` to set common packages required by all
1448
- steps and use `@pypi` to specify step-specific overrides.
1449
-
1450
- Parameters
1451
- ----------
1452
- packages : Dict[str, str], default: {}
1453
- Packages to use for this flow. The key is the name of the package
1454
- and the value is the version to use.
1455
- python : str, optional, default: None
1456
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1457
- that the version used will correspond to the version of the Python interpreter used to start the run.
1458
- """
1459
- ...
1460
-
1461
- @typing.overload
1462
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1463
- ...
1464
-
1465
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1466
- """
1467
- Specifies the PyPI packages for all steps of the flow.
1468
-
1469
- Use `@pypi_base` to set common packages required by all
1470
- steps and use `@pypi` to specify step-specific overrides.
1471
-
1472
- Parameters
1473
- ----------
1474
- packages : Dict[str, str], default: {}
1475
- Packages to use for this flow. The key is the name of the package
1476
- and the value is the version to use.
1477
- python : str, optional, default: None
1478
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1479
- that the version used will correspond to the version of the Python interpreter used to start the run.
1480
- """
1481
- ...
1482
-
1483
1628
  def with_artifact_store(f: typing.Optional[typing.Type[FlowSpecDerived]] = None):
1484
1629
  """
1485
1630
  Allows setting external datastores to save data for the
@@ -1594,92 +1739,6 @@ def with_artifact_store(f: typing.Optional[typing.Type[FlowSpecDerived]] = None)
1594
1739
  """
1595
1740
  ...
1596
1741
 
1597
- @typing.overload
1598
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1599
- """
1600
- Specifies the times when the flow should be run when running on a
1601
- production scheduler.
1602
-
1603
-
1604
- Parameters
1605
- ----------
1606
- hourly : bool, default False
1607
- Run the workflow hourly.
1608
- daily : bool, default True
1609
- Run the workflow daily.
1610
- weekly : bool, default False
1611
- Run the workflow weekly.
1612
- cron : str, optional, default None
1613
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1614
- specified by this expression.
1615
- timezone : str, optional, default None
1616
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1617
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1618
- """
1619
- ...
1620
-
1621
- @typing.overload
1622
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1623
- ...
1624
-
1625
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1626
- """
1627
- Specifies the times when the flow should be run when running on a
1628
- production scheduler.
1629
-
1630
-
1631
- Parameters
1632
- ----------
1633
- hourly : bool, default False
1634
- Run the workflow hourly.
1635
- daily : bool, default True
1636
- Run the workflow daily.
1637
- weekly : bool, default False
1638
- Run the workflow weekly.
1639
- cron : str, optional, default None
1640
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1641
- specified by this expression.
1642
- timezone : str, optional, default None
1643
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1644
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1645
- """
1646
- ...
1647
-
1648
- def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1649
- """
1650
- Specifies what flows belong to the same project.
1651
-
1652
- A project-specific namespace is created for all flows that
1653
- use the same `@project(name)`.
1654
-
1655
-
1656
- Parameters
1657
- ----------
1658
- name : str
1659
- Project name. Make sure that the name is unique amongst all
1660
- projects that use the same production scheduler. The name may
1661
- contain only lowercase alphanumeric characters and underscores.
1662
-
1663
- branch : Optional[str], default None
1664
- The branch to use. If not specified, the branch is set to
1665
- `user.<username>` unless `production` is set to `True`. This can
1666
- also be set on the command line using `--branch` as a top-level option.
1667
- It is an error to specify `branch` in the decorator and on the command line.
1668
-
1669
- production : bool, default False
1670
- Whether or not the branch is the production branch. This can also be set on the
1671
- command line using `--production` as a top-level option. It is an error to specify
1672
- `production` in the decorator and on the command line.
1673
- The project branch name will be:
1674
- - if `branch` is specified:
1675
- - if `production` is True: `prod.<branch>`
1676
- - if `production` is False: `test.<branch>`
1677
- - if `branch` is not specified:
1678
- - if `production` is True: `prod`
1679
- - if `production` is False: `user.<username>`
1680
- """
1681
- ...
1682
-
1683
1742
  def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1684
1743
  """
1685
1744
  The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
@@ -1723,97 +1782,38 @@ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str,
1723
1782
  """
1724
1783
  ...
1725
1784
 
1726
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1727
- """
1728
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1729
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1730
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1731
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
1732
- starts only after all sensors finish.
1733
-
1734
-
1735
- Parameters
1736
- ----------
1737
- timeout : int
1738
- Time, in seconds before the task times out and fails. (Default: 3600)
1739
- poke_interval : int
1740
- Time in seconds that the job should wait in between each try. (Default: 60)
1741
- mode : str
1742
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1743
- exponential_backoff : bool
1744
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1745
- pool : str
1746
- the slot pool this task should run in,
1747
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1748
- soft_fail : bool
1749
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1750
- name : str
1751
- Name of the sensor on Airflow
1752
- description : str
1753
- Description of sensor in the Airflow UI
1754
- bucket_key : Union[str, List[str]]
1755
- The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1756
- When it's specified as a full s3:// url, please leave `bucket_name` as None
1757
- bucket_name : str
1758
- Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1759
- When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1760
- wildcard_match : bool
1761
- whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1762
- aws_conn_id : str
1763
- a reference to the s3 connection on Airflow. (Default: None)
1764
- verify : bool
1765
- Whether or not to verify SSL certificates for S3 connection. (Default: None)
1766
- """
1767
- ...
1768
-
1769
- @typing.overload
1770
- def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1785
+ def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1771
1786
  """
1772
- Specifies the Conda environment for all steps of the flow.
1787
+ Specifies what flows belong to the same project.
1773
1788
 
1774
- Use `@conda_base` to set common libraries required by all
1775
- steps and use `@conda` to specify step-specific additions.
1789
+ A project-specific namespace is created for all flows that
1790
+ use the same `@project(name)`.
1776
1791
 
1777
1792
 
1778
1793
  Parameters
1779
1794
  ----------
1780
- packages : Dict[str, str], default {}
1781
- Packages to use for this flow. The key is the name of the package
1782
- and the value is the version to use.
1783
- libraries : Dict[str, str], default {}
1784
- Supported for backward compatibility. When used with packages, packages will take precedence.
1785
- python : str, optional, default None
1786
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1787
- that the version used will correspond to the version of the Python interpreter used to start the run.
1788
- disabled : bool, default False
1789
- If set to True, disables Conda.
1790
- """
1791
- ...
1792
-
1793
- @typing.overload
1794
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1795
- ...
1796
-
1797
- def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1798
- """
1799
- Specifies the Conda environment for all steps of the flow.
1800
-
1801
- Use `@conda_base` to set common libraries required by all
1802
- steps and use `@conda` to specify step-specific additions.
1795
+ name : str
1796
+ Project name. Make sure that the name is unique amongst all
1797
+ projects that use the same production scheduler. The name may
1798
+ contain only lowercase alphanumeric characters and underscores.
1803
1799
 
1800
+ branch : Optional[str], default None
1801
+ The branch to use. If not specified, the branch is set to
1802
+ `user.<username>` unless `production` is set to `True`. This can
1803
+ also be set on the command line using `--branch` as a top-level option.
1804
+ It is an error to specify `branch` in the decorator and on the command line.
1804
1805
 
1805
- Parameters
1806
- ----------
1807
- packages : Dict[str, str], default {}
1808
- Packages to use for this flow. The key is the name of the package
1809
- and the value is the version to use.
1810
- libraries : Dict[str, str], default {}
1811
- Supported for backward compatibility. When used with packages, packages will take precedence.
1812
- python : str, optional, default None
1813
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1814
- that the version used will correspond to the version of the Python interpreter used to start the run.
1815
- disabled : bool, default False
1816
- If set to True, disables Conda.
1806
+ production : bool, default False
1807
+ Whether or not the branch is the production branch. This can also be set on the
1808
+ command line using `--production` as a top-level option. It is an error to specify
1809
+ `production` in the decorator and on the command line.
1810
+ The project branch name will be:
1811
+ - if `branch` is specified:
1812
+ - if `production` is True: `prod.<branch>`
1813
+ - if `production` is False: `test.<branch>`
1814
+ - if `branch` is not specified:
1815
+ - if `production` is True: `prod`
1816
+ - if `production` is False: `user.<username>`
1817
1817
  """
1818
1818
  ...
1819
1819