ob-metaflow-stubs 6.0.4.2__py2.py3-none-any.whl → 6.0.4.4__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (260) hide show
  1. metaflow-stubs/__init__.pyi +770 -770
  2. metaflow-stubs/cards.pyi +2 -2
  3. metaflow-stubs/cli.pyi +2 -2
  4. metaflow-stubs/cli_components/__init__.pyi +2 -2
  5. metaflow-stubs/cli_components/utils.pyi +2 -2
  6. metaflow-stubs/client/__init__.pyi +2 -2
  7. metaflow-stubs/client/core.pyi +7 -7
  8. metaflow-stubs/client/filecache.pyi +3 -3
  9. metaflow-stubs/events.pyi +2 -2
  10. metaflow-stubs/exception.pyi +2 -2
  11. metaflow-stubs/flowspec.pyi +4 -4
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +3 -3
  14. metaflow-stubs/meta_files.pyi +2 -2
  15. metaflow-stubs/metadata_provider/__init__.pyi +2 -2
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +2 -2
  17. metaflow-stubs/metadata_provider/metadata.pyi +3 -3
  18. metaflow-stubs/metadata_provider/util.pyi +2 -2
  19. metaflow-stubs/metaflow_config.pyi +2 -2
  20. metaflow-stubs/metaflow_current.pyi +51 -51
  21. metaflow-stubs/metaflow_git.pyi +2 -2
  22. metaflow-stubs/mf_extensions/__init__.pyi +2 -2
  23. metaflow-stubs/mf_extensions/obcheckpoint/__init__.pyi +2 -2
  24. metaflow-stubs/mf_extensions/obcheckpoint/plugins/__init__.pyi +2 -2
  25. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/__init__.pyi +2 -2
  26. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/__init__.pyi +2 -2
  27. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/async_cards.pyi +2 -2
  28. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/deco_injection_mixin.pyi +2 -2
  29. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/extra_components.pyi +3 -3
  30. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/__init__.pyi +2 -2
  31. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/__init__.pyi +2 -2
  32. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/checkpoint_lister.pyi +3 -3
  33. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/lineage_card.pyi +2 -2
  34. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/checkpoint_storage.pyi +5 -5
  35. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/constructors.pyi +2 -2
  36. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/core.pyi +5 -5
  37. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/decorator.pyi +5 -5
  38. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/exceptions.pyi +2 -2
  39. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/final_api.pyi +3 -3
  40. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/lineage.pyi +2 -2
  41. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/__init__.pyi +2 -2
  42. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/context.pyi +3 -3
  43. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/core.pyi +3 -3
  44. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/decorator.pyi +2 -2
  45. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/exceptions.pyi +2 -2
  46. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/task_utils.pyi +4 -4
  47. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/utils.pyi +2 -2
  48. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastructures.pyi +3 -3
  49. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/exceptions.pyi +2 -2
  50. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/__init__.pyi +2 -2
  51. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/decorator.pyi +3 -3
  52. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/__init__.pyi +2 -2
  53. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/core.pyi +3 -3
  54. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/exceptions.pyi +2 -2
  55. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/model_storage.pyi +4 -4
  56. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/__init__.pyi +2 -2
  57. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/flowspec_utils.pyi +2 -2
  58. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/general.pyi +2 -2
  59. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/identity_utils.pyi +3 -3
  60. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/__init__.pyi +2 -2
  61. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/base.pyi +2 -2
  62. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/tar.pyi +3 -3
  63. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/tar_utils.pyi +3 -3
  64. metaflow-stubs/mf_extensions/outerbounds/__init__.pyi +2 -2
  65. metaflow-stubs/mf_extensions/outerbounds/plugins/__init__.pyi +2 -2
  66. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/__init__.pyi +2 -2
  67. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/__init__.pyi +2 -2
  68. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_state_machine.pyi +2 -2
  69. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/__init__.pyi +2 -2
  70. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/spinner/__init__.pyi +2 -2
  71. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/spinner/spinners.pyi +2 -2
  72. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/app_cli.pyi +2 -2
  73. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/app_config.pyi +3 -3
  74. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/capsule.pyi +5 -5
  75. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/click_importer.pyi +2 -2
  76. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/code_package/__init__.pyi +2 -2
  77. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/code_package/code_packager.pyi +3 -3
  78. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/__init__.pyi +2 -2
  79. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/cli_generator.pyi +2 -2
  80. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/config_utils.pyi +4 -4
  81. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/schema_export.pyi +2 -2
  82. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/typed_configs.pyi +3 -3
  83. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/unified_config.pyi +4 -4
  84. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/dependencies.pyi +3 -3
  85. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/deployer.pyi +5 -5
  86. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/experimental/__init__.pyi +2 -2
  87. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/perimeters.pyi +2 -2
  88. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/utils.pyi +3 -3
  89. metaflow-stubs/mf_extensions/outerbounds/plugins/aws/__init__.pyi +2 -2
  90. metaflow-stubs/mf_extensions/outerbounds/plugins/aws/assume_role_decorator.pyi +2 -2
  91. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/__init__.pyi +2 -2
  92. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/async_cards.pyi +2 -2
  93. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/injector.pyi +2 -2
  94. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/__init__.pyi +2 -2
  95. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/coreweave.pyi +2 -2
  96. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/nebius.pyi +2 -2
  97. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/__init__.pyi +2 -2
  98. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/baker.pyi +4 -4
  99. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/docker_environment.pyi +2 -2
  100. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/fast_bakery.pyi +2 -2
  101. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/__init__.pyi +2 -2
  102. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/pod_killer.pyi +2 -2
  103. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/__init__.pyi +2 -2
  104. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/constants.pyi +2 -2
  105. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/exceptions.pyi +2 -2
  106. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/ollama.pyi +2 -2
  107. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/status_card.pyi +2 -2
  108. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/__init__.pyi +2 -2
  109. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/snowflake.pyi +2 -2
  110. metaflow-stubs/mf_extensions/outerbounds/profilers/__init__.pyi +2 -2
  111. metaflow-stubs/mf_extensions/outerbounds/profilers/gpu.pyi +2 -2
  112. metaflow-stubs/mf_extensions/outerbounds/remote_config.pyi +2 -2
  113. metaflow-stubs/mf_extensions/outerbounds/toplevel/__init__.pyi +2 -2
  114. metaflow-stubs/mf_extensions/outerbounds/toplevel/global_aliases_for_metaflow_package.pyi +2 -2
  115. metaflow-stubs/multicore_utils.pyi +2 -2
  116. metaflow-stubs/ob_internal.pyi +2 -2
  117. metaflow-stubs/packaging_sys/__init__.pyi +4 -4
  118. metaflow-stubs/packaging_sys/backend.pyi +4 -4
  119. metaflow-stubs/packaging_sys/distribution_support.pyi +5 -5
  120. metaflow-stubs/packaging_sys/tar_backend.pyi +6 -6
  121. metaflow-stubs/packaging_sys/utils.pyi +2 -2
  122. metaflow-stubs/packaging_sys/v1.pyi +3 -3
  123. metaflow-stubs/parameters.pyi +3 -3
  124. metaflow-stubs/plugins/__init__.pyi +11 -11
  125. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  126. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  127. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  128. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  129. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
  130. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
  131. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
  132. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  133. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  134. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  135. metaflow-stubs/plugins/argo/argo_workflows.pyi +5 -5
  136. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +4 -4
  137. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +4 -4
  138. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +3 -3
  139. metaflow-stubs/plugins/argo/exit_hooks.pyi +3 -3
  140. metaflow-stubs/plugins/aws/__init__.pyi +2 -2
  141. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  142. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  143. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  144. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  145. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  146. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
  147. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  148. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +4 -4
  149. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  150. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  151. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  152. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  153. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  154. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +4 -4
  155. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +3 -3
  156. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  157. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  158. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  159. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +4 -4
  160. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  161. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  162. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  163. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  164. metaflow-stubs/plugins/cards/card_client.pyi +2 -2
  165. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  166. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  167. metaflow-stubs/plugins/cards/card_decorator.pyi +3 -3
  168. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  169. metaflow-stubs/plugins/cards/card_modules/basic.pyi +2 -2
  170. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  171. metaflow-stubs/plugins/cards/card_modules/components.pyi +3 -3
  172. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  173. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  174. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  175. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  176. metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
  177. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  178. metaflow-stubs/plugins/catch_decorator.pyi +2 -2
  179. metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
  180. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  181. metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
  182. metaflow-stubs/plugins/datatools/s3/s3.pyi +5 -5
  183. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  184. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  185. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  186. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  187. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  188. metaflow-stubs/plugins/events_decorator.pyi +2 -2
  189. metaflow-stubs/plugins/exit_hook/__init__.pyi +2 -2
  190. metaflow-stubs/plugins/exit_hook/exit_hook_decorator.pyi +2 -2
  191. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  192. metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
  193. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  194. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +4 -4
  195. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  196. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  197. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  198. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  199. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  200. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +3 -3
  201. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
  202. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  203. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
  204. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
  205. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +2 -2
  206. metaflow-stubs/plugins/ollama/__init__.pyi +3 -3
  207. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  208. metaflow-stubs/plugins/perimeters.pyi +2 -2
  209. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  210. metaflow-stubs/plugins/pypi/__init__.pyi +3 -3
  211. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  212. metaflow-stubs/plugins/pypi/conda_environment.pyi +5 -5
  213. metaflow-stubs/plugins/pypi/parsers.pyi +2 -2
  214. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  215. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  216. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  217. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  218. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  219. metaflow-stubs/plugins/secrets/__init__.pyi +3 -3
  220. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +4 -4
  221. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  222. metaflow-stubs/plugins/secrets/secrets_func.pyi +2 -2
  223. metaflow-stubs/plugins/secrets/secrets_spec.pyi +2 -2
  224. metaflow-stubs/plugins/secrets/utils.pyi +2 -2
  225. metaflow-stubs/plugins/snowflake/__init__.pyi +2 -2
  226. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  227. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +2 -2
  228. metaflow-stubs/plugins/timeout_decorator.pyi +2 -2
  229. metaflow-stubs/plugins/torchtune/__init__.pyi +2 -2
  230. metaflow-stubs/plugins/uv/__init__.pyi +2 -2
  231. metaflow-stubs/plugins/uv/uv_environment.pyi +2 -2
  232. metaflow-stubs/profilers/__init__.pyi +2 -2
  233. metaflow-stubs/pylint_wrapper.pyi +2 -2
  234. metaflow-stubs/runner/__init__.pyi +2 -2
  235. metaflow-stubs/runner/deployer.pyi +30 -30
  236. metaflow-stubs/runner/deployer_impl.pyi +2 -2
  237. metaflow-stubs/runner/metaflow_runner.pyi +3 -3
  238. metaflow-stubs/runner/nbdeploy.pyi +2 -2
  239. metaflow-stubs/runner/nbrun.pyi +2 -2
  240. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  241. metaflow-stubs/runner/utils.pyi +3 -3
  242. metaflow-stubs/system/__init__.pyi +2 -2
  243. metaflow-stubs/system/system_logger.pyi +2 -2
  244. metaflow-stubs/system/system_monitor.pyi +2 -2
  245. metaflow-stubs/tagging_util.pyi +2 -2
  246. metaflow-stubs/tuple_util.pyi +2 -2
  247. metaflow-stubs/user_configs/__init__.pyi +2 -2
  248. metaflow-stubs/user_configs/config_options.pyi +3 -3
  249. metaflow-stubs/user_configs/config_parameters.pyi +7 -7
  250. metaflow-stubs/user_decorators/__init__.pyi +2 -2
  251. metaflow-stubs/user_decorators/common.pyi +2 -2
  252. metaflow-stubs/user_decorators/mutable_flow.pyi +6 -6
  253. metaflow-stubs/user_decorators/mutable_step.pyi +6 -6
  254. metaflow-stubs/user_decorators/user_flow_decorator.pyi +5 -5
  255. metaflow-stubs/user_decorators/user_step_decorator.pyi +7 -7
  256. {ob_metaflow_stubs-6.0.4.2.dist-info → ob_metaflow_stubs-6.0.4.4.dist-info}/METADATA +1 -1
  257. ob_metaflow_stubs-6.0.4.4.dist-info/RECORD +260 -0
  258. ob_metaflow_stubs-6.0.4.2.dist-info/RECORD +0 -260
  259. {ob_metaflow_stubs-6.0.4.2.dist-info → ob_metaflow_stubs-6.0.4.4.dist-info}/WHEEL +0 -0
  260. {ob_metaflow_stubs-6.0.4.2.dist-info → ob_metaflow_stubs-6.0.4.4.dist-info}/top_level.txt +0 -0
@@ -1,15 +1,15 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.16.0.1+obcheckpoint(0.2.4);ob(v1) #
4
- # Generated on 2025-07-15T03:12:46.861592 #
3
+ # MF version: 2.16.2.1+obcheckpoint(0.2.4);ob(v1) #
4
+ # Generated on 2025-07-16T08:15:48.145976 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import typing
12
11
  import datetime
12
+ import typing
13
13
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
14
14
  StepFlag = typing.NewType("StepFlag", bool)
15
15
 
@@ -48,9 +48,9 @@ from . import plugins as plugins
48
48
  from .mf_extensions.outerbounds.toplevel.global_aliases_for_metaflow_package import S3 as S3
49
49
  from . import includefile as includefile
50
50
  from .includefile import IncludeFile as IncludeFile
51
- from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
52
- from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
53
51
  from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
52
+ from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
53
+ from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
54
54
  from . import client as client
55
55
  from .client.core import namespace as namespace
56
56
  from .client.core import get_namespace as get_namespace
@@ -162,6 +162,42 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
162
162
  """
163
163
  ...
164
164
 
165
+ @typing.overload
166
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
167
+ """
168
+ Decorator prototype for all step decorators. This function gets specialized
169
+ and imported for all decorators types by _import_plugin_decorators().
170
+ """
171
+ ...
172
+
173
+ @typing.overload
174
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
175
+ ...
176
+
177
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
178
+ """
179
+ Decorator prototype for all step decorators. This function gets specialized
180
+ and imported for all decorators types by _import_plugin_decorators().
181
+ """
182
+ ...
183
+
184
+ @typing.overload
185
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
186
+ """
187
+ Internal decorator to support Fast bakery
188
+ """
189
+ ...
190
+
191
+ @typing.overload
192
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
193
+ ...
194
+
195
+ def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
196
+ """
197
+ Internal decorator to support Fast bakery
198
+ """
199
+ ...
200
+
165
201
  @typing.overload
166
202
  def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
167
203
  """
@@ -222,81 +258,131 @@ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
222
258
  ...
223
259
 
224
260
  @typing.overload
225
- def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
261
+ def model(*, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
226
262
  """
227
- Specifies the resources needed when executing this step.
263
+ Enables loading / saving of models within a step.
228
264
 
229
- Use `@resources` to specify the resource requirements
230
- independently of the specific compute layer (`@batch`, `@kubernetes`).
265
+ > Examples
266
+ - Saving Models
267
+ ```python
268
+ @model
269
+ @step
270
+ def train(self):
271
+ # current.model.save returns a dictionary reference to the model saved
272
+ self.my_model = current.model.save(
273
+ path_to_my_model,
274
+ label="my_model",
275
+ metadata={
276
+ "epochs": 10,
277
+ "batch-size": 32,
278
+ "learning-rate": 0.001,
279
+ }
280
+ )
281
+ self.next(self.test)
231
282
 
232
- You can choose the compute layer on the command line by executing e.g.
233
- ```
234
- python myflow.py run --with batch
235
- ```
236
- or
283
+ @model(load="my_model")
284
+ @step
285
+ def test(self):
286
+ # `current.model.loaded` returns a dictionary of the loaded models
287
+ # where the key is the name of the artifact and the value is the path to the model
288
+ print(os.listdir(current.model.loaded["my_model"]))
289
+ self.next(self.end)
237
290
  ```
238
- python myflow.py run --with kubernetes
291
+
292
+ - Loading models
293
+ ```python
294
+ @step
295
+ def train(self):
296
+ # current.model.load returns the path to the model loaded
297
+ checkpoint_path = current.model.load(
298
+ self.checkpoint_key,
299
+ )
300
+ model_path = current.model.load(
301
+ self.model,
302
+ )
303
+ self.next(self.test)
239
304
  ```
240
- which executes the flow on the desired system using the
241
- requirements specified in `@resources`.
242
305
 
243
306
 
244
307
  Parameters
245
308
  ----------
246
- cpu : int, default 1
247
- Number of CPUs required for this step.
248
- gpu : int, optional, default None
249
- Number of GPUs required for this step.
250
- disk : int, optional, default None
251
- Disk size (in MB) required for this step. Only applies on Kubernetes.
252
- memory : int, default 4096
253
- Memory size (in MB) required for this step.
254
- shared_memory : int, optional, default None
255
- The value for the size (in MiB) of the /dev/shm volume for this step.
256
- This parameter maps to the `--shm-size` option in Docker.
309
+ load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
310
+ Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
311
+ These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
312
+ If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
313
+ the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
314
+ If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
315
+
316
+ temp_dir_root : str, default: None
317
+ The root directory under which `current.model.loaded` will store loaded models
257
318
  """
258
319
  ...
259
320
 
260
321
  @typing.overload
261
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
322
+ def model(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
262
323
  ...
263
324
 
264
325
  @typing.overload
265
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
326
+ def model(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
266
327
  ...
267
328
 
268
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
329
+ def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None):
269
330
  """
270
- Specifies the resources needed when executing this step.
331
+ Enables loading / saving of models within a step.
271
332
 
272
- Use `@resources` to specify the resource requirements
273
- independently of the specific compute layer (`@batch`, `@kubernetes`).
333
+ > Examples
334
+ - Saving Models
335
+ ```python
336
+ @model
337
+ @step
338
+ def train(self):
339
+ # current.model.save returns a dictionary reference to the model saved
340
+ self.my_model = current.model.save(
341
+ path_to_my_model,
342
+ label="my_model",
343
+ metadata={
344
+ "epochs": 10,
345
+ "batch-size": 32,
346
+ "learning-rate": 0.001,
347
+ }
348
+ )
349
+ self.next(self.test)
274
350
 
275
- You can choose the compute layer on the command line by executing e.g.
276
- ```
277
- python myflow.py run --with batch
278
- ```
279
- or
351
+ @model(load="my_model")
352
+ @step
353
+ def test(self):
354
+ # `current.model.loaded` returns a dictionary of the loaded models
355
+ # where the key is the name of the artifact and the value is the path to the model
356
+ print(os.listdir(current.model.loaded["my_model"]))
357
+ self.next(self.end)
280
358
  ```
281
- python myflow.py run --with kubernetes
359
+
360
+ - Loading models
361
+ ```python
362
+ @step
363
+ def train(self):
364
+ # current.model.load returns the path to the model loaded
365
+ checkpoint_path = current.model.load(
366
+ self.checkpoint_key,
367
+ )
368
+ model_path = current.model.load(
369
+ self.model,
370
+ )
371
+ self.next(self.test)
282
372
  ```
283
- which executes the flow on the desired system using the
284
- requirements specified in `@resources`.
285
373
 
286
374
 
287
375
  Parameters
288
376
  ----------
289
- cpu : int, default 1
290
- Number of CPUs required for this step.
291
- gpu : int, optional, default None
292
- Number of GPUs required for this step.
293
- disk : int, optional, default None
294
- Disk size (in MB) required for this step. Only applies on Kubernetes.
295
- memory : int, default 4096
296
- Memory size (in MB) required for this step.
297
- shared_memory : int, optional, default None
298
- The value for the size (in MiB) of the /dev/shm volume for this step.
299
- This parameter maps to the `--shm-size` option in Docker.
377
+ load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
378
+ Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
379
+ These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
380
+ If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
381
+ the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
382
+ If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
383
+
384
+ temp_dir_root : str, default: None
385
+ The root directory under which `current.model.loaded` will store loaded models
300
386
  """
301
387
  ...
302
388
 
@@ -447,7 +533,7 @@ def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None
447
533
  """
448
534
  ...
449
535
 
450
- def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
536
+ def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
451
537
  """
452
538
  Specifies that this step should execute on DGX cloud.
453
539
 
@@ -458,162 +544,63 @@ def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[t
458
544
  Number of GPUs to use.
459
545
  gpu_type : str
460
546
  Type of Nvidia GPU to use.
461
- queue_timeout : int
462
- Time to keep the job in NVCF's queue.
463
547
  """
464
548
  ...
465
549
 
466
550
  @typing.overload
467
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
468
- """
469
- Internal decorator to support Fast bakery
470
- """
471
- ...
472
-
473
- @typing.overload
474
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
475
- ...
476
-
477
- def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
478
- """
479
- Internal decorator to support Fast bakery
480
- """
481
- ...
482
-
483
- @typing.overload
484
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
551
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
485
552
  """
486
- Specifies the Conda environment for the step.
553
+ Specifies the PyPI packages for the step.
487
554
 
488
555
  Information in this decorator will augment any
489
- attributes set in the `@conda_base` flow-level decorator. Hence,
490
- you can use `@conda_base` to set packages required by all
491
- steps and use `@conda` to specify step-specific overrides.
556
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
557
+ you can use `@pypi_base` to set packages required by all
558
+ steps and use `@pypi` to specify step-specific overrides.
492
559
 
493
560
 
494
561
  Parameters
495
562
  ----------
496
- packages : Dict[str, str], default {}
563
+ packages : Dict[str, str], default: {}
497
564
  Packages to use for this step. The key is the name of the package
498
565
  and the value is the version to use.
499
- libraries : Dict[str, str], default {}
500
- Supported for backward compatibility. When used with packages, packages will take precedence.
501
- python : str, optional, default None
566
+ python : str, optional, default: None
502
567
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
503
568
  that the version used will correspond to the version of the Python interpreter used to start the run.
504
- disabled : bool, default False
505
- If set to True, disables @conda.
506
569
  """
507
570
  ...
508
571
 
509
572
  @typing.overload
510
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
573
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
511
574
  ...
512
575
 
513
576
  @typing.overload
514
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
577
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
515
578
  ...
516
579
 
517
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
580
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
518
581
  """
519
- Specifies the Conda environment for the step.
582
+ Specifies the PyPI packages for the step.
520
583
 
521
584
  Information in this decorator will augment any
522
- attributes set in the `@conda_base` flow-level decorator. Hence,
523
- you can use `@conda_base` to set packages required by all
524
- steps and use `@conda` to specify step-specific overrides.
585
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
586
+ you can use `@pypi_base` to set packages required by all
587
+ steps and use `@pypi` to specify step-specific overrides.
525
588
 
526
589
 
527
590
  Parameters
528
591
  ----------
529
- packages : Dict[str, str], default {}
592
+ packages : Dict[str, str], default: {}
530
593
  Packages to use for this step. The key is the name of the package
531
594
  and the value is the version to use.
532
- libraries : Dict[str, str], default {}
533
- Supported for backward compatibility. When used with packages, packages will take precedence.
534
- python : str, optional, default None
595
+ python : str, optional, default: None
535
596
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
536
597
  that the version used will correspond to the version of the Python interpreter used to start the run.
537
- disabled : bool, default False
538
- If set to True, disables @conda.
539
598
  """
540
599
  ...
541
600
 
542
- @typing.overload
543
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
601
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
544
602
  """
545
- Specifies secrets to be retrieved and injected as environment variables prior to
546
- the execution of a step.
547
-
548
-
549
- Parameters
550
- ----------
551
- sources : List[Union[str, Dict[str, Any]]], default: []
552
- List of secret specs, defining how the secrets are to be retrieved
553
- role : str, optional, default: None
554
- Role to use for fetching secrets
555
- """
556
- ...
557
-
558
- @typing.overload
559
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
560
- ...
561
-
562
- @typing.overload
563
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
564
- ...
565
-
566
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
567
- """
568
- Specifies secrets to be retrieved and injected as environment variables prior to
569
- the execution of a step.
570
-
571
-
572
- Parameters
573
- ----------
574
- sources : List[Union[str, Dict[str, Any]]], default: []
575
- List of secret specs, defining how the secrets are to be retrieved
576
- role : str, optional, default: None
577
- Role to use for fetching secrets
578
- """
579
- ...
580
-
581
- @typing.overload
582
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
583
- """
584
- Specifies environment variables to be set prior to the execution of a step.
585
-
586
-
587
- Parameters
588
- ----------
589
- vars : Dict[str, str], default {}
590
- Dictionary of environment variables to set.
591
- """
592
- ...
593
-
594
- @typing.overload
595
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
596
- ...
597
-
598
- @typing.overload
599
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
600
- ...
601
-
602
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
603
- """
604
- Specifies environment variables to be set prior to the execution of a step.
605
-
606
-
607
- Parameters
608
- ----------
609
- vars : Dict[str, str], default {}
610
- Dictionary of environment variables to set.
611
- """
612
- ...
613
-
614
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
615
- """
616
- Specifies that this step should execute on Kubernetes.
603
+ Specifies that this step should execute on Kubernetes.
617
604
 
618
605
 
619
606
  Parameters
@@ -719,212 +706,424 @@ def app_deploy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None
719
706
  """
720
707
  ...
721
708
 
722
- def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
709
+ @typing.overload
710
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
723
711
  """
724
- Decorator that helps cache, version and store models/datasets from huggingface hub.
725
-
726
- > Examples
727
-
728
- **Usage: creating references of models from huggingface that may be loaded in downstream steps**
729
- ```python
730
- @huggingface_hub
731
- @step
732
- def pull_model_from_huggingface(self):
733
- # `current.huggingface_hub.snapshot_download` downloads the model from the Hugging Face Hub
734
- # and saves it in the backend storage based on the model's `repo_id`. If there exists a model
735
- # with the same `repo_id` in the backend storage, it will not download the model again. The return
736
- # value of the function is a reference to the model in the backend storage.
737
- # This reference can be used to load the model in the subsequent steps via `@model(load=["llama_model"])`
738
-
739
- self.model_id = "mistralai/Mistral-7B-Instruct-v0.1"
740
- self.llama_model = current.huggingface_hub.snapshot_download(
741
- repo_id=self.model_id,
742
- allow_patterns=["*.safetensors", "*.json", "tokenizer.*"],
743
- )
744
- self.next(self.train)
745
- ```
746
-
747
- **Usage: loading models directly from huggingface hub or from cache (from metaflow's datastore)**
748
- ```python
749
- @huggingface_hub(load=["mistralai/Mistral-7B-Instruct-v0.1"])
750
- @step
751
- def pull_model_from_huggingface(self):
752
- path_to_model = current.huggingface_hub.loaded["mistralai/Mistral-7B-Instruct-v0.1"]
753
- ```
712
+ Specifies the number of times the task corresponding
713
+ to a step needs to be retried.
754
714
 
755
- ```python
756
- @huggingface_hub(load=[("mistralai/Mistral-7B-Instruct-v0.1", "/my-directory"), ("myorg/mistral-lora, "/my-lora-directory")])
757
- @step
758
- def finetune_model(self):
759
- path_to_model = current.huggingface_hub.loaded["mistralai/Mistral-7B-Instruct-v0.1"]
760
- # path_to_model will be /my-directory
761
- ```
715
+ This decorator is useful for handling transient errors, such as networking issues.
716
+ If your task contains operations that can't be retried safely, e.g. database updates,
717
+ it is advisable to annotate it with `@retry(times=0)`.
762
718
 
763
- ```python
764
- # Takes all the arguments passed to `snapshot_download`
765
- # except for `local_dir`
766
- @huggingface_hub(load=[
767
- {
768
- "repo_id": "mistralai/Mistral-7B-Instruct-v0.1",
769
- },
770
- {
771
- "repo_id": "myorg/mistral-lora",
772
- "repo_type": "model",
773
- },
774
- ])
775
- @step
776
- def finetune_model(self):
777
- path_to_model = current.huggingface_hub.loaded["mistralai/Mistral-7B-Instruct-v0.1"]
778
- # path_to_model will be /my-directory
779
- ```
719
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
720
+ decorator will execute a no-op task after all retries have been exhausted,
721
+ ensuring that the flow execution can continue.
780
722
 
781
723
 
782
724
  Parameters
783
725
  ----------
784
- temp_dir_root : str, optional
785
- The root directory that will hold the temporary directory where objects will be downloaded.
726
+ times : int, default 3
727
+ Number of times to retry this task.
728
+ minutes_between_retries : int, default 2
729
+ Number of minutes between retries.
730
+ """
731
+ ...
732
+
733
+ @typing.overload
734
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
735
+ ...
736
+
737
+ @typing.overload
738
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
739
+ ...
740
+
741
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
742
+ """
743
+ Specifies the number of times the task corresponding
744
+ to a step needs to be retried.
786
745
 
787
- load: Union[List[str], List[Tuple[Dict, str]], List[Tuple[str, str]], List[Dict], None]
788
- The list of repos (models/datasets) to load.
746
+ This decorator is useful for handling transient errors, such as networking issues.
747
+ If your task contains operations that can't be retried safely, e.g. database updates,
748
+ it is advisable to annotate it with `@retry(times=0)`.
789
749
 
790
- Loaded repos can be accessed via `current.huggingface_hub.loaded`. If load is set, then the following happens:
750
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
751
+ decorator will execute a no-op task after all retries have been exhausted,
752
+ ensuring that the flow execution can continue.
791
753
 
792
- - If repo (model/dataset) is not found in the datastore:
793
- - Downloads the repo from Hugging Face Hub to a temporary directory (or uses specified path) for local access
794
- - Stores it in Metaflow's datastore (s3/gcs/azure etc.) with a unique name based on repo_type/repo_id
795
- - All HF models loaded for a `@step` will be cached separately under flow/step/namespace.
796
754
 
797
- - If repo is found in the datastore:
798
- - Loads it directly from datastore to local path (can be temporary directory or specified path)
755
+ Parameters
756
+ ----------
757
+ times : int, default 3
758
+ Number of times to retry this task.
759
+ minutes_between_retries : int, default 2
760
+ Number of minutes between retries.
799
761
  """
800
762
  ...
801
763
 
802
764
  @typing.overload
803
- def model(*, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
765
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
804
766
  """
805
- Enables loading / saving of models within a step.
806
-
807
- > Examples
808
- - Saving Models
809
- ```python
810
- @model
811
- @step
812
- def train(self):
813
- # current.model.save returns a dictionary reference to the model saved
814
- self.my_model = current.model.save(
815
- path_to_my_model,
816
- label="my_model",
817
- metadata={
818
- "epochs": 10,
819
- "batch-size": 32,
820
- "learning-rate": 0.001,
821
- }
822
- )
823
- self.next(self.test)
824
-
825
- @model(load="my_model")
826
- @step
827
- def test(self):
828
- # `current.model.loaded` returns a dictionary of the loaded models
829
- # where the key is the name of the artifact and the value is the path to the model
830
- print(os.listdir(current.model.loaded["my_model"]))
831
- self.next(self.end)
832
- ```
767
+ Specifies the Conda environment for the step.
833
768
 
834
- - Loading models
835
- ```python
836
- @step
837
- def train(self):
838
- # current.model.load returns the path to the model loaded
839
- checkpoint_path = current.model.load(
840
- self.checkpoint_key,
841
- )
842
- model_path = current.model.load(
843
- self.model,
844
- )
845
- self.next(self.test)
846
- ```
769
+ Information in this decorator will augment any
770
+ attributes set in the `@conda_base` flow-level decorator. Hence,
771
+ you can use `@conda_base` to set packages required by all
772
+ steps and use `@conda` to specify step-specific overrides.
847
773
 
848
774
 
849
775
  Parameters
850
776
  ----------
851
- load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
852
- Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
853
- These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
854
- If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
855
- the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
856
- If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
857
-
858
- temp_dir_root : str, default: None
859
- The root directory under which `current.model.loaded` will store loaded models
777
+ packages : Dict[str, str], default {}
778
+ Packages to use for this step. The key is the name of the package
779
+ and the value is the version to use.
780
+ libraries : Dict[str, str], default {}
781
+ Supported for backward compatibility. When used with packages, packages will take precedence.
782
+ python : str, optional, default None
783
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
784
+ that the version used will correspond to the version of the Python interpreter used to start the run.
785
+ disabled : bool, default False
786
+ If set to True, disables @conda.
860
787
  """
861
788
  ...
862
789
 
863
790
  @typing.overload
864
- def model(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
791
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
865
792
  ...
866
793
 
867
794
  @typing.overload
868
- def model(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
795
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
869
796
  ...
870
797
 
871
- def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None):
798
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
872
799
  """
873
- Enables loading / saving of models within a step.
800
+ Specifies the Conda environment for the step.
874
801
 
875
- > Examples
876
- - Saving Models
877
- ```python
878
- @model
879
- @step
880
- def train(self):
881
- # current.model.save returns a dictionary reference to the model saved
882
- self.my_model = current.model.save(
883
- path_to_my_model,
884
- label="my_model",
885
- metadata={
886
- "epochs": 10,
887
- "batch-size": 32,
888
- "learning-rate": 0.001,
889
- }
890
- )
891
- self.next(self.test)
802
+ Information in this decorator will augment any
803
+ attributes set in the `@conda_base` flow-level decorator. Hence,
804
+ you can use `@conda_base` to set packages required by all
805
+ steps and use `@conda` to specify step-specific overrides.
892
806
 
893
- @model(load="my_model")
894
- @step
895
- def test(self):
896
- # `current.model.loaded` returns a dictionary of the loaded models
897
- # where the key is the name of the artifact and the value is the path to the model
898
- print(os.listdir(current.model.loaded["my_model"]))
899
- self.next(self.end)
807
+
808
+ Parameters
809
+ ----------
810
+ packages : Dict[str, str], default {}
811
+ Packages to use for this step. The key is the name of the package
812
+ and the value is the version to use.
813
+ libraries : Dict[str, str], default {}
814
+ Supported for backward compatibility. When used with packages, packages will take precedence.
815
+ python : str, optional, default None
816
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
817
+ that the version used will correspond to the version of the Python interpreter used to start the run.
818
+ disabled : bool, default False
819
+ If set to True, disables @conda.
820
+ """
821
+ ...
822
+
823
+ def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
824
+ """
825
+ Specifies that this step should execute on DGX cloud.
826
+
827
+
828
+ Parameters
829
+ ----------
830
+ gpu : int
831
+ Number of GPUs to use.
832
+ gpu_type : str
833
+ Type of Nvidia GPU to use.
834
+ queue_timeout : int
835
+ Time to keep the job in NVCF's queue.
836
+ """
837
+ ...
838
+
839
+ @typing.overload
840
+ def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
841
+ """
842
+ Specifies the resources needed when executing this step.
843
+
844
+ Use `@resources` to specify the resource requirements
845
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
846
+
847
+ You can choose the compute layer on the command line by executing e.g.
900
848
  ```
849
+ python myflow.py run --with batch
850
+ ```
851
+ or
852
+ ```
853
+ python myflow.py run --with kubernetes
854
+ ```
855
+ which executes the flow on the desired system using the
856
+ requirements specified in `@resources`.
901
857
 
902
- - Loading models
858
+
859
+ Parameters
860
+ ----------
861
+ cpu : int, default 1
862
+ Number of CPUs required for this step.
863
+ gpu : int, optional, default None
864
+ Number of GPUs required for this step.
865
+ disk : int, optional, default None
866
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
867
+ memory : int, default 4096
868
+ Memory size (in MB) required for this step.
869
+ shared_memory : int, optional, default None
870
+ The value for the size (in MiB) of the /dev/shm volume for this step.
871
+ This parameter maps to the `--shm-size` option in Docker.
872
+ """
873
+ ...
874
+
875
+ @typing.overload
876
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
877
+ ...
878
+
879
+ @typing.overload
880
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
881
+ ...
882
+
883
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
884
+ """
885
+ Specifies the resources needed when executing this step.
886
+
887
+ Use `@resources` to specify the resource requirements
888
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
889
+
890
+ You can choose the compute layer on the command line by executing e.g.
891
+ ```
892
+ python myflow.py run --with batch
893
+ ```
894
+ or
895
+ ```
896
+ python myflow.py run --with kubernetes
897
+ ```
898
+ which executes the flow on the desired system using the
899
+ requirements specified in `@resources`.
900
+
901
+
902
+ Parameters
903
+ ----------
904
+ cpu : int, default 1
905
+ Number of CPUs required for this step.
906
+ gpu : int, optional, default None
907
+ Number of GPUs required for this step.
908
+ disk : int, optional, default None
909
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
910
+ memory : int, default 4096
911
+ Memory size (in MB) required for this step.
912
+ shared_memory : int, optional, default None
913
+ The value for the size (in MiB) of the /dev/shm volume for this step.
914
+ This parameter maps to the `--shm-size` option in Docker.
915
+ """
916
+ ...
917
+
918
+ def vllm(*, model: str, backend: str, openai_api_server: bool, debug: bool, card_refresh_interval: int, max_retries: int, retry_alert_frequency: int, engine_args: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
919
+ """
920
+ This decorator is used to run vllm APIs as Metaflow task sidecars.
921
+
922
+ User code call
923
+ --------------
924
+ @vllm(
925
+ model="...",
926
+ ...
927
+ )
928
+
929
+ Valid backend options
930
+ ---------------------
931
+ - 'local': Run as a separate process on the local task machine.
932
+
933
+ Valid model options
934
+ -------------------
935
+ Any HuggingFace model identifier, e.g. 'meta-llama/Llama-3.2-1B'
936
+
937
+ NOTE: vLLM's OpenAI-compatible server serves ONE model per server instance.
938
+ If you need multiple models, you must create multiple @vllm decorators.
939
+
940
+
941
+ Parameters
942
+ ----------
943
+ model: str
944
+ HuggingFace model identifier to be served by vLLM.
945
+ backend: str
946
+ Determines where and how to run the vLLM process.
947
+ openai_api_server: bool
948
+ Whether to use OpenAI-compatible API server mode (subprocess) instead of native engine.
949
+ Default is False (uses native engine).
950
+ Set to True for backward compatibility with existing code.
951
+ debug: bool
952
+ Whether to turn on verbose debugging logs.
953
+ card_refresh_interval: int
954
+ Interval in seconds for refreshing the vLLM status card.
955
+ Only used when openai_api_server=True.
956
+ max_retries: int
957
+ Maximum number of retries checking for vLLM server startup.
958
+ Only used when openai_api_server=True.
959
+ retry_alert_frequency: int
960
+ Frequency of alert logs for vLLM server startup retries.
961
+ Only used when openai_api_server=True.
962
+ engine_args : dict
963
+ Additional keyword arguments to pass to the vLLM engine.
964
+ For example, `tensor_parallel_size=2`.
965
+ """
966
+ ...
967
+
968
+ @typing.overload
969
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
970
+ """
971
+ Specifies environment variables to be set prior to the execution of a step.
972
+
973
+
974
+ Parameters
975
+ ----------
976
+ vars : Dict[str, str], default {}
977
+ Dictionary of environment variables to set.
978
+ """
979
+ ...
980
+
981
+ @typing.overload
982
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
983
+ ...
984
+
985
+ @typing.overload
986
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
987
+ ...
988
+
989
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
990
+ """
991
+ Specifies environment variables to be set prior to the execution of a step.
992
+
993
+
994
+ Parameters
995
+ ----------
996
+ vars : Dict[str, str], default {}
997
+ Dictionary of environment variables to set.
998
+ """
999
+ ...
1000
+
1001
+ def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1002
+ """
1003
+ Decorator that helps cache, version and store models/datasets from huggingface hub.
1004
+
1005
+ > Examples
1006
+
1007
+ **Usage: creating references of models from huggingface that may be loaded in downstream steps**
903
1008
  ```python
904
- @step
905
- def train(self):
906
- # current.model.load returns the path to the model loaded
907
- checkpoint_path = current.model.load(
908
- self.checkpoint_key,
909
- )
910
- model_path = current.model.load(
911
- self.model,
912
- )
913
- self.next(self.test)
1009
+ @huggingface_hub
1010
+ @step
1011
+ def pull_model_from_huggingface(self):
1012
+ # `current.huggingface_hub.snapshot_download` downloads the model from the Hugging Face Hub
1013
+ # and saves it in the backend storage based on the model's `repo_id`. If there exists a model
1014
+ # with the same `repo_id` in the backend storage, it will not download the model again. The return
1015
+ # value of the function is a reference to the model in the backend storage.
1016
+ # This reference can be used to load the model in the subsequent steps via `@model(load=["llama_model"])`
1017
+
1018
+ self.model_id = "mistralai/Mistral-7B-Instruct-v0.1"
1019
+ self.llama_model = current.huggingface_hub.snapshot_download(
1020
+ repo_id=self.model_id,
1021
+ allow_patterns=["*.safetensors", "*.json", "tokenizer.*"],
1022
+ )
1023
+ self.next(self.train)
1024
+ ```
1025
+
1026
+ **Usage: loading models directly from huggingface hub or from cache (from metaflow's datastore)**
1027
+ ```python
1028
+ @huggingface_hub(load=["mistralai/Mistral-7B-Instruct-v0.1"])
1029
+ @step
1030
+ def pull_model_from_huggingface(self):
1031
+ path_to_model = current.huggingface_hub.loaded["mistralai/Mistral-7B-Instruct-v0.1"]
1032
+ ```
1033
+
1034
+ ```python
1035
+ @huggingface_hub(load=[("mistralai/Mistral-7B-Instruct-v0.1", "/my-directory"), ("myorg/mistral-lora, "/my-lora-directory")])
1036
+ @step
1037
+ def finetune_model(self):
1038
+ path_to_model = current.huggingface_hub.loaded["mistralai/Mistral-7B-Instruct-v0.1"]
1039
+ # path_to_model will be /my-directory
1040
+ ```
1041
+
1042
+ ```python
1043
+ # Takes all the arguments passed to `snapshot_download`
1044
+ # except for `local_dir`
1045
+ @huggingface_hub(load=[
1046
+ {
1047
+ "repo_id": "mistralai/Mistral-7B-Instruct-v0.1",
1048
+ },
1049
+ {
1050
+ "repo_id": "myorg/mistral-lora",
1051
+ "repo_type": "model",
1052
+ },
1053
+ ])
1054
+ @step
1055
+ def finetune_model(self):
1056
+ path_to_model = current.huggingface_hub.loaded["mistralai/Mistral-7B-Instruct-v0.1"]
1057
+ # path_to_model will be /my-directory
914
1058
  ```
915
1059
 
916
1060
 
917
1061
  Parameters
918
1062
  ----------
919
- load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
920
- Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
921
- These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
922
- If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
923
- the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
924
- If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
1063
+ temp_dir_root : str, optional
1064
+ The root directory that will hold the temporary directory where objects will be downloaded.
925
1065
 
926
- temp_dir_root : str, default: None
927
- The root directory under which `current.model.loaded` will store loaded models
1066
+ load: Union[List[str], List[Tuple[Dict, str]], List[Tuple[str, str]], List[Dict], None]
1067
+ The list of repos (models/datasets) to load.
1068
+
1069
+ Loaded repos can be accessed via `current.huggingface_hub.loaded`. If load is set, then the following happens:
1070
+
1071
+ - If repo (model/dataset) is not found in the datastore:
1072
+ - Downloads the repo from Hugging Face Hub to a temporary directory (or uses specified path) for local access
1073
+ - Stores it in Metaflow's datastore (s3/gcs/azure etc.) with a unique name based on repo_type/repo_id
1074
+ - All HF models loaded for a `@step` will be cached separately under flow/step/namespace.
1075
+
1076
+ - If repo is found in the datastore:
1077
+ - Loads it directly from datastore to local path (can be temporary directory or specified path)
1078
+ """
1079
+ ...
1080
+
1081
+ @typing.overload
1082
+ def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1083
+ """
1084
+ Creates a human-readable report, a Metaflow Card, after this step completes.
1085
+
1086
+ Note that you may add multiple `@card` decorators in a step with different parameters.
1087
+
1088
+
1089
+ Parameters
1090
+ ----------
1091
+ type : str, default 'default'
1092
+ Card type.
1093
+ id : str, optional, default None
1094
+ If multiple cards are present, use this id to identify this card.
1095
+ options : Dict[str, Any], default {}
1096
+ Options passed to the card. The contents depend on the card type.
1097
+ timeout : int, default 45
1098
+ Interrupt reporting if it takes more than this many seconds.
1099
+ """
1100
+ ...
1101
+
1102
+ @typing.overload
1103
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1104
+ ...
1105
+
1106
+ @typing.overload
1107
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1108
+ ...
1109
+
1110
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
1111
+ """
1112
+ Creates a human-readable report, a Metaflow Card, after this step completes.
1113
+
1114
+ Note that you may add multiple `@card` decorators in a step with different parameters.
1115
+
1116
+
1117
+ Parameters
1118
+ ----------
1119
+ type : str, default 'default'
1120
+ Card type.
1121
+ id : str, optional, default None
1122
+ If multiple cards are present, use this id to identify this card.
1123
+ options : Dict[str, Any], default {}
1124
+ Options passed to the card. The contents depend on the card type.
1125
+ timeout : int, default 45
1126
+ Interrupt reporting if it takes more than this many seconds.
928
1127
  """
929
1128
  ...
930
1129
 
@@ -979,17 +1178,42 @@ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
979
1178
  """
980
1179
  ...
981
1180
 
982
- def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1181
+ @typing.overload
1182
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
983
1183
  """
984
- Specifies that this step should execute on DGX cloud.
1184
+ Specifies secrets to be retrieved and injected as environment variables prior to
1185
+ the execution of a step.
985
1186
 
986
1187
 
987
1188
  Parameters
988
1189
  ----------
989
- gpu : int
990
- Number of GPUs to use.
991
- gpu_type : str
992
- Type of Nvidia GPU to use.
1190
+ sources : List[Union[str, Dict[str, Any]]], default: []
1191
+ List of secret specs, defining how the secrets are to be retrieved
1192
+ role : str, optional, default: None
1193
+ Role to use for fetching secrets
1194
+ """
1195
+ ...
1196
+
1197
+ @typing.overload
1198
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1199
+ ...
1200
+
1201
+ @typing.overload
1202
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1203
+ ...
1204
+
1205
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
1206
+ """
1207
+ Specifies secrets to be retrieved and injected as environment variables prior to
1208
+ the execution of a step.
1209
+
1210
+
1211
+ Parameters
1212
+ ----------
1213
+ sources : List[Union[str, Dict[str, Any]]], default: []
1214
+ List of secret specs, defining how the secrets are to be retrieved
1215
+ role : str, optional, default: None
1216
+ Role to use for fetching secrets
993
1217
  """
994
1218
  ...
995
1219
 
@@ -1037,269 +1261,154 @@ def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy:
1037
1261
  ...
1038
1262
 
1039
1263
  @typing.overload
1040
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1041
- """
1042
- Specifies the number of times the task corresponding
1043
- to a step needs to be retried.
1044
-
1045
- This decorator is useful for handling transient errors, such as networking issues.
1046
- If your task contains operations that can't be retried safely, e.g. database updates,
1047
- it is advisable to annotate it with `@retry(times=0)`.
1048
-
1049
- This can be used in conjunction with the `@catch` decorator. The `@catch`
1050
- decorator will execute a no-op task after all retries have been exhausted,
1051
- ensuring that the flow execution can continue.
1052
-
1053
-
1054
- Parameters
1055
- ----------
1056
- times : int, default 3
1057
- Number of times to retry this task.
1058
- minutes_between_retries : int, default 2
1059
- Number of minutes between retries.
1060
- """
1061
- ...
1062
-
1063
- @typing.overload
1064
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1065
- ...
1066
-
1067
- @typing.overload
1068
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1069
- ...
1070
-
1071
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
1072
- """
1073
- Specifies the number of times the task corresponding
1074
- to a step needs to be retried.
1075
-
1076
- This decorator is useful for handling transient errors, such as networking issues.
1077
- If your task contains operations that can't be retried safely, e.g. database updates,
1078
- it is advisable to annotate it with `@retry(times=0)`.
1079
-
1080
- This can be used in conjunction with the `@catch` decorator. The `@catch`
1081
- decorator will execute a no-op task after all retries have been exhausted,
1082
- ensuring that the flow execution can continue.
1083
-
1084
-
1085
- Parameters
1086
- ----------
1087
- times : int, default 3
1088
- Number of times to retry this task.
1089
- minutes_between_retries : int, default 2
1090
- Number of minutes between retries.
1091
- """
1092
- ...
1093
-
1094
- @typing.overload
1095
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1096
- """
1097
- Decorator prototype for all step decorators. This function gets specialized
1098
- and imported for all decorators types by _import_plugin_decorators().
1099
- """
1100
- ...
1101
-
1102
- @typing.overload
1103
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1104
- ...
1105
-
1106
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1107
- """
1108
- Decorator prototype for all step decorators. This function gets specialized
1109
- and imported for all decorators types by _import_plugin_decorators().
1110
- """
1111
- ...
1112
-
1113
- @typing.overload
1114
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1264
+ def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1115
1265
  """
1116
- Specifies the PyPI packages for the step.
1266
+ Specifies the flow(s) that this flow depends on.
1117
1267
 
1118
- Information in this decorator will augment any
1119
- attributes set in the `@pyi_base` flow-level decorator. Hence,
1120
- you can use `@pypi_base` to set packages required by all
1121
- steps and use `@pypi` to specify step-specific overrides.
1268
+ ```
1269
+ @trigger_on_finish(flow='FooFlow')
1270
+ ```
1271
+ or
1272
+ ```
1273
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1274
+ ```
1275
+ This decorator respects the @project decorator and triggers the flow
1276
+ when upstream runs within the same namespace complete successfully
1122
1277
 
1278
+ Additionally, you can specify project aware upstream flow dependencies
1279
+ by specifying the fully qualified project_flow_name.
1280
+ ```
1281
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1282
+ ```
1283
+ or
1284
+ ```
1285
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1286
+ ```
1123
1287
 
1124
- Parameters
1125
- ----------
1126
- packages : Dict[str, str], default: {}
1127
- Packages to use for this step. The key is the name of the package
1128
- and the value is the version to use.
1129
- python : str, optional, default: None
1130
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1131
- that the version used will correspond to the version of the Python interpreter used to start the run.
1132
- """
1133
- ...
1134
-
1135
- @typing.overload
1136
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1137
- ...
1138
-
1139
- @typing.overload
1140
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1141
- ...
1142
-
1143
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1144
- """
1145
- Specifies the PyPI packages for the step.
1288
+ You can also specify just the project or project branch (other values will be
1289
+ inferred from the current project or project branch):
1290
+ ```
1291
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1292
+ ```
1146
1293
 
1147
- Information in this decorator will augment any
1148
- attributes set in the `@pyi_base` flow-level decorator. Hence,
1149
- you can use `@pypi_base` to set packages required by all
1150
- steps and use `@pypi` to specify step-specific overrides.
1294
+ Note that `branch` is typically one of:
1295
+ - `prod`
1296
+ - `user.bob`
1297
+ - `test.my_experiment`
1298
+ - `prod.staging`
1151
1299
 
1152
1300
 
1153
1301
  Parameters
1154
1302
  ----------
1155
- packages : Dict[str, str], default: {}
1156
- Packages to use for this step. The key is the name of the package
1157
- and the value is the version to use.
1158
- python : str, optional, default: None
1159
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1160
- that the version used will correspond to the version of the Python interpreter used to start the run.
1303
+ flow : Union[str, Dict[str, str]], optional, default None
1304
+ Upstream flow dependency for this flow.
1305
+ flows : List[Union[str, Dict[str, str]]], default []
1306
+ Upstream flow dependencies for this flow.
1307
+ options : Dict[str, Any], default {}
1308
+ Backend-specific configuration for tuning eventing behavior.
1161
1309
  """
1162
1310
  ...
1163
1311
 
1164
- def vllm(*, model: str, backend: str, openai_api_server: bool, debug: bool, card_refresh_interval: int, max_retries: int, retry_alert_frequency: int, engine_args: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1312
+ @typing.overload
1313
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1314
+ ...
1315
+
1316
+ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1165
1317
  """
1166
- This decorator is used to run vllm APIs as Metaflow task sidecars.
1318
+ Specifies the flow(s) that this flow depends on.
1167
1319
 
1168
- User code call
1169
- --------------
1170
- @vllm(
1171
- model="...",
1172
- ...
1173
- )
1320
+ ```
1321
+ @trigger_on_finish(flow='FooFlow')
1322
+ ```
1323
+ or
1324
+ ```
1325
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1326
+ ```
1327
+ This decorator respects the @project decorator and triggers the flow
1328
+ when upstream runs within the same namespace complete successfully
1174
1329
 
1175
- Valid backend options
1176
- ---------------------
1177
- - 'local': Run as a separate process on the local task machine.
1330
+ Additionally, you can specify project aware upstream flow dependencies
1331
+ by specifying the fully qualified project_flow_name.
1332
+ ```
1333
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1334
+ ```
1335
+ or
1336
+ ```
1337
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1338
+ ```
1178
1339
 
1179
- Valid model options
1180
- -------------------
1181
- Any HuggingFace model identifier, e.g. 'meta-llama/Llama-3.2-1B'
1340
+ You can also specify just the project or project branch (other values will be
1341
+ inferred from the current project or project branch):
1342
+ ```
1343
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1344
+ ```
1182
1345
 
1183
- NOTE: vLLM's OpenAI-compatible server serves ONE model per server instance.
1184
- If you need multiple models, you must create multiple @vllm decorators.
1346
+ Note that `branch` is typically one of:
1347
+ - `prod`
1348
+ - `user.bob`
1349
+ - `test.my_experiment`
1350
+ - `prod.staging`
1185
1351
 
1186
1352
 
1187
1353
  Parameters
1188
1354
  ----------
1189
- model: str
1190
- HuggingFace model identifier to be served by vLLM.
1191
- backend: str
1192
- Determines where and how to run the vLLM process.
1193
- openai_api_server: bool
1194
- Whether to use OpenAI-compatible API server mode (subprocess) instead of native engine.
1195
- Default is False (uses native engine).
1196
- Set to True for backward compatibility with existing code.
1197
- debug: bool
1198
- Whether to turn on verbose debugging logs.
1199
- card_refresh_interval: int
1200
- Interval in seconds for refreshing the vLLM status card.
1201
- Only used when openai_api_server=True.
1202
- max_retries: int
1203
- Maximum number of retries checking for vLLM server startup.
1204
- Only used when openai_api_server=True.
1205
- retry_alert_frequency: int
1206
- Frequency of alert logs for vLLM server startup retries.
1207
- Only used when openai_api_server=True.
1208
- engine_args : dict
1209
- Additional keyword arguments to pass to the vLLM engine.
1210
- For example, `tensor_parallel_size=2`.
1355
+ flow : Union[str, Dict[str, str]], optional, default None
1356
+ Upstream flow dependency for this flow.
1357
+ flows : List[Union[str, Dict[str, str]]], default []
1358
+ Upstream flow dependencies for this flow.
1359
+ options : Dict[str, Any], default {}
1360
+ Backend-specific configuration for tuning eventing behavior.
1211
1361
  """
1212
1362
  ...
1213
1363
 
1214
1364
  @typing.overload
1215
- def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1365
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1216
1366
  """
1217
- Creates a human-readable report, a Metaflow Card, after this step completes.
1218
-
1219
- Note that you may add multiple `@card` decorators in a step with different parameters.
1367
+ Specifies the times when the flow should be run when running on a
1368
+ production scheduler.
1220
1369
 
1221
1370
 
1222
1371
  Parameters
1223
1372
  ----------
1224
- type : str, default 'default'
1225
- Card type.
1226
- id : str, optional, default None
1227
- If multiple cards are present, use this id to identify this card.
1228
- options : Dict[str, Any], default {}
1229
- Options passed to the card. The contents depend on the card type.
1230
- timeout : int, default 45
1231
- Interrupt reporting if it takes more than this many seconds.
1373
+ hourly : bool, default False
1374
+ Run the workflow hourly.
1375
+ daily : bool, default True
1376
+ Run the workflow daily.
1377
+ weekly : bool, default False
1378
+ Run the workflow weekly.
1379
+ cron : str, optional, default None
1380
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1381
+ specified by this expression.
1382
+ timezone : str, optional, default None
1383
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1384
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1232
1385
  """
1233
1386
  ...
1234
1387
 
1235
1388
  @typing.overload
1236
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1237
- ...
1238
-
1239
- @typing.overload
1240
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1241
- ...
1242
-
1243
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
1244
- """
1245
- Creates a human-readable report, a Metaflow Card, after this step completes.
1246
-
1247
- Note that you may add multiple `@card` decorators in a step with different parameters.
1248
-
1249
-
1250
- Parameters
1251
- ----------
1252
- type : str, default 'default'
1253
- Card type.
1254
- id : str, optional, default None
1255
- If multiple cards are present, use this id to identify this card.
1256
- options : Dict[str, Any], default {}
1257
- Options passed to the card. The contents depend on the card type.
1258
- timeout : int, default 45
1259
- Interrupt reporting if it takes more than this many seconds.
1260
- """
1389
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1261
1390
  ...
1262
1391
 
1263
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1392
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1264
1393
  """
1265
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1266
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1267
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1268
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
1269
- starts only after all sensors finish.
1394
+ Specifies the times when the flow should be run when running on a
1395
+ production scheduler.
1270
1396
 
1271
1397
 
1272
1398
  Parameters
1273
1399
  ----------
1274
- timeout : int
1275
- Time, in seconds before the task times out and fails. (Default: 3600)
1276
- poke_interval : int
1277
- Time in seconds that the job should wait in between each try. (Default: 60)
1278
- mode : str
1279
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1280
- exponential_backoff : bool
1281
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1282
- pool : str
1283
- the slot pool this task should run in,
1284
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1285
- soft_fail : bool
1286
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1287
- name : str
1288
- Name of the sensor on Airflow
1289
- description : str
1290
- Description of sensor in the Airflow UI
1291
- bucket_key : Union[str, List[str]]
1292
- The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1293
- When it's specified as a full s3:// url, please leave `bucket_name` as None
1294
- bucket_name : str
1295
- Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1296
- When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1297
- wildcard_match : bool
1298
- whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1299
- aws_conn_id : str
1300
- a reference to the s3 connection on Airflow. (Default: None)
1301
- verify : bool
1302
- Whether or not to verify SSL certificates for S3 connection. (Default: None)
1400
+ hourly : bool, default False
1401
+ Run the workflow hourly.
1402
+ daily : bool, default True
1403
+ Run the workflow daily.
1404
+ weekly : bool, default False
1405
+ Run the workflow weekly.
1406
+ cron : str, optional, default None
1407
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1408
+ specified by this expression.
1409
+ timezone : str, optional, default None
1410
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1411
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1303
1412
  """
1304
1413
  ...
1305
1414
 
@@ -1417,54 +1526,38 @@ def with_artifact_store(f: typing.Optional[typing.Type[FlowSpecDerived]] = None)
1417
1526
  """
1418
1527
  ...
1419
1528
 
1420
- @typing.overload
1421
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1529
+ def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1422
1530
  """
1423
- Specifies the times when the flow should be run when running on a
1424
- production scheduler.
1425
-
1531
+ Specifies what flows belong to the same project.
1426
1532
 
1427
- Parameters
1428
- ----------
1429
- hourly : bool, default False
1430
- Run the workflow hourly.
1431
- daily : bool, default True
1432
- Run the workflow daily.
1433
- weekly : bool, default False
1434
- Run the workflow weekly.
1435
- cron : str, optional, default None
1436
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1437
- specified by this expression.
1438
- timezone : str, optional, default None
1439
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1440
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1441
- """
1442
- ...
1443
-
1444
- @typing.overload
1445
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1446
- ...
1447
-
1448
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1449
- """
1450
- Specifies the times when the flow should be run when running on a
1451
- production scheduler.
1533
+ A project-specific namespace is created for all flows that
1534
+ use the same `@project(name)`.
1452
1535
 
1453
1536
 
1454
1537
  Parameters
1455
1538
  ----------
1456
- hourly : bool, default False
1457
- Run the workflow hourly.
1458
- daily : bool, default True
1459
- Run the workflow daily.
1460
- weekly : bool, default False
1461
- Run the workflow weekly.
1462
- cron : str, optional, default None
1463
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1464
- specified by this expression.
1465
- timezone : str, optional, default None
1466
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1467
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1539
+ name : str
1540
+ Project name. Make sure that the name is unique amongst all
1541
+ projects that use the same production scheduler. The name may
1542
+ contain only lowercase alphanumeric characters and underscores.
1543
+
1544
+ branch : Optional[str], default None
1545
+ The branch to use. If not specified, the branch is set to
1546
+ `user.<username>` unless `production` is set to `True`. This can
1547
+ also be set on the command line using `--branch` as a top-level option.
1548
+ It is an error to specify `branch` in the decorator and on the command line.
1549
+
1550
+ production : bool, default False
1551
+ Whether or not the branch is the production branch. This can also be set on the
1552
+ command line using `--production` as a top-level option. It is an error to specify
1553
+ `production` in the decorator and on the command line.
1554
+ The project branch name will be:
1555
+ - if `branch` is specified:
1556
+ - if `production` is True: `prod.<branch>`
1557
+ - if `production` is False: `test.<branch>`
1558
+ - if `branch` is not specified:
1559
+ - if `production` is True: `prod`
1560
+ - if `production` is False: `user.<username>`
1468
1561
  """
1469
1562
  ...
1470
1563
 
@@ -1512,138 +1605,53 @@ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str,
1512
1605
  ...
1513
1606
 
1514
1607
  @typing.overload
1515
- def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1608
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1516
1609
  """
1517
- Specifies the flow(s) that this flow depends on.
1518
-
1519
- ```
1520
- @trigger_on_finish(flow='FooFlow')
1521
- ```
1522
- or
1523
- ```
1524
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1525
- ```
1526
- This decorator respects the @project decorator and triggers the flow
1527
- when upstream runs within the same namespace complete successfully
1528
-
1529
- Additionally, you can specify project aware upstream flow dependencies
1530
- by specifying the fully qualified project_flow_name.
1531
- ```
1532
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1533
- ```
1534
- or
1535
- ```
1536
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1537
- ```
1538
-
1539
- You can also specify just the project or project branch (other values will be
1540
- inferred from the current project or project branch):
1541
- ```
1542
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1543
- ```
1610
+ Specifies the Conda environment for all steps of the flow.
1544
1611
 
1545
- Note that `branch` is typically one of:
1546
- - `prod`
1547
- - `user.bob`
1548
- - `test.my_experiment`
1549
- - `prod.staging`
1612
+ Use `@conda_base` to set common libraries required by all
1613
+ steps and use `@conda` to specify step-specific additions.
1550
1614
 
1551
1615
 
1552
1616
  Parameters
1553
1617
  ----------
1554
- flow : Union[str, Dict[str, str]], optional, default None
1555
- Upstream flow dependency for this flow.
1556
- flows : List[Union[str, Dict[str, str]]], default []
1557
- Upstream flow dependencies for this flow.
1558
- options : Dict[str, Any], default {}
1559
- Backend-specific configuration for tuning eventing behavior.
1618
+ packages : Dict[str, str], default {}
1619
+ Packages to use for this flow. The key is the name of the package
1620
+ and the value is the version to use.
1621
+ libraries : Dict[str, str], default {}
1622
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1623
+ python : str, optional, default None
1624
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1625
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1626
+ disabled : bool, default False
1627
+ If set to True, disables Conda.
1560
1628
  """
1561
1629
  ...
1562
1630
 
1563
1631
  @typing.overload
1564
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1565
- ...
1566
-
1567
- def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1568
- """
1569
- Specifies the flow(s) that this flow depends on.
1570
-
1571
- ```
1572
- @trigger_on_finish(flow='FooFlow')
1573
- ```
1574
- or
1575
- ```
1576
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1577
- ```
1578
- This decorator respects the @project decorator and triggers the flow
1579
- when upstream runs within the same namespace complete successfully
1580
-
1581
- Additionally, you can specify project aware upstream flow dependencies
1582
- by specifying the fully qualified project_flow_name.
1583
- ```
1584
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1585
- ```
1586
- or
1587
- ```
1588
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1589
- ```
1590
-
1591
- You can also specify just the project or project branch (other values will be
1592
- inferred from the current project or project branch):
1593
- ```
1594
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1595
- ```
1596
-
1597
- Note that `branch` is typically one of:
1598
- - `prod`
1599
- - `user.bob`
1600
- - `test.my_experiment`
1601
- - `prod.staging`
1602
-
1603
-
1604
- Parameters
1605
- ----------
1606
- flow : Union[str, Dict[str, str]], optional, default None
1607
- Upstream flow dependency for this flow.
1608
- flows : List[Union[str, Dict[str, str]]], default []
1609
- Upstream flow dependencies for this flow.
1610
- options : Dict[str, Any], default {}
1611
- Backend-specific configuration for tuning eventing behavior.
1612
- """
1632
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1613
1633
  ...
1614
1634
 
1615
- def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1635
+ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1616
1636
  """
1617
- Specifies what flows belong to the same project.
1637
+ Specifies the Conda environment for all steps of the flow.
1618
1638
 
1619
- A project-specific namespace is created for all flows that
1620
- use the same `@project(name)`.
1639
+ Use `@conda_base` to set common libraries required by all
1640
+ steps and use `@conda` to specify step-specific additions.
1621
1641
 
1622
1642
 
1623
1643
  Parameters
1624
1644
  ----------
1625
- name : str
1626
- Project name. Make sure that the name is unique amongst all
1627
- projects that use the same production scheduler. The name may
1628
- contain only lowercase alphanumeric characters and underscores.
1629
-
1630
- branch : Optional[str], default None
1631
- The branch to use. If not specified, the branch is set to
1632
- `user.<username>` unless `production` is set to `True`. This can
1633
- also be set on the command line using `--branch` as a top-level option.
1634
- It is an error to specify `branch` in the decorator and on the command line.
1635
-
1636
- production : bool, default False
1637
- Whether or not the branch is the production branch. This can also be set on the
1638
- command line using `--production` as a top-level option. It is an error to specify
1639
- `production` in the decorator and on the command line.
1640
- The project branch name will be:
1641
- - if `branch` is specified:
1642
- - if `production` is True: `prod.<branch>`
1643
- - if `production` is False: `test.<branch>`
1644
- - if `branch` is not specified:
1645
- - if `production` is True: `prod`
1646
- - if `production` is False: `user.<username>`
1645
+ packages : Dict[str, str], default {}
1646
+ Packages to use for this flow. The key is the name of the package
1647
+ and the value is the version to use.
1648
+ libraries : Dict[str, str], default {}
1649
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1650
+ python : str, optional, default None
1651
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1652
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1653
+ disabled : bool, default False
1654
+ If set to True, disables Conda.
1647
1655
  """
1648
1656
  ...
1649
1657
 
@@ -1740,30 +1748,51 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
1740
1748
  """
1741
1749
  ...
1742
1750
 
1743
- @typing.overload
1744
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1751
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1745
1752
  """
1746
- Specifies the PyPI packages for all steps of the flow.
1753
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1754
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1755
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1756
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
1757
+ starts only after all sensors finish.
1747
1758
 
1748
- Use `@pypi_base` to set common packages required by all
1749
- steps and use `@pypi` to specify step-specific overrides.
1750
1759
 
1751
1760
  Parameters
1752
1761
  ----------
1753
- packages : Dict[str, str], default: {}
1754
- Packages to use for this flow. The key is the name of the package
1755
- and the value is the version to use.
1756
- python : str, optional, default: None
1757
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1758
- that the version used will correspond to the version of the Python interpreter used to start the run.
1762
+ timeout : int
1763
+ Time, in seconds before the task times out and fails. (Default: 3600)
1764
+ poke_interval : int
1765
+ Time in seconds that the job should wait in between each try. (Default: 60)
1766
+ mode : str
1767
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1768
+ exponential_backoff : bool
1769
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1770
+ pool : str
1771
+ the slot pool this task should run in,
1772
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1773
+ soft_fail : bool
1774
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1775
+ name : str
1776
+ Name of the sensor on Airflow
1777
+ description : str
1778
+ Description of sensor in the Airflow UI
1779
+ bucket_key : Union[str, List[str]]
1780
+ The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1781
+ When it's specified as a full s3:// url, please leave `bucket_name` as None
1782
+ bucket_name : str
1783
+ Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1784
+ When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1785
+ wildcard_match : bool
1786
+ whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1787
+ aws_conn_id : str
1788
+ a reference to the s3 connection on Airflow. (Default: None)
1789
+ verify : bool
1790
+ Whether or not to verify SSL certificates for S3 connection. (Default: None)
1759
1791
  """
1760
1792
  ...
1761
1793
 
1762
1794
  @typing.overload
1763
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1764
- ...
1765
-
1766
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1795
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1767
1796
  """
1768
1797
  Specifies the PyPI packages for all steps of the flow.
1769
1798
 
@@ -1782,53 +1811,24 @@ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packag
1782
1811
  ...
1783
1812
 
1784
1813
  @typing.overload
1785
- def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1786
- """
1787
- Specifies the Conda environment for all steps of the flow.
1788
-
1789
- Use `@conda_base` to set common libraries required by all
1790
- steps and use `@conda` to specify step-specific additions.
1791
-
1792
-
1793
- Parameters
1794
- ----------
1795
- packages : Dict[str, str], default {}
1796
- Packages to use for this flow. The key is the name of the package
1797
- and the value is the version to use.
1798
- libraries : Dict[str, str], default {}
1799
- Supported for backward compatibility. When used with packages, packages will take precedence.
1800
- python : str, optional, default None
1801
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1802
- that the version used will correspond to the version of the Python interpreter used to start the run.
1803
- disabled : bool, default False
1804
- If set to True, disables Conda.
1805
- """
1806
- ...
1807
-
1808
- @typing.overload
1809
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1814
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1810
1815
  ...
1811
1816
 
1812
- def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1817
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1813
1818
  """
1814
- Specifies the Conda environment for all steps of the flow.
1815
-
1816
- Use `@conda_base` to set common libraries required by all
1817
- steps and use `@conda` to specify step-specific additions.
1819
+ Specifies the PyPI packages for all steps of the flow.
1818
1820
 
1821
+ Use `@pypi_base` to set common packages required by all
1822
+ steps and use `@pypi` to specify step-specific overrides.
1819
1823
 
1820
1824
  Parameters
1821
1825
  ----------
1822
- packages : Dict[str, str], default {}
1826
+ packages : Dict[str, str], default: {}
1823
1827
  Packages to use for this flow. The key is the name of the package
1824
1828
  and the value is the version to use.
1825
- libraries : Dict[str, str], default {}
1826
- Supported for backward compatibility. When used with packages, packages will take precedence.
1827
- python : str, optional, default None
1829
+ python : str, optional, default: None
1828
1830
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1829
1831
  that the version used will correspond to the version of the Python interpreter used to start the run.
1830
- disabled : bool, default False
1831
- If set to True, disables Conda.
1832
1832
  """
1833
1833
  ...
1834
1834