ob-metaflow-stubs 6.0.4.1rc0__py2.py3-none-any.whl → 6.0.4.1rc1__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (260) hide show
  1. metaflow-stubs/__init__.pyi +738 -738
  2. metaflow-stubs/cards.pyi +1 -1
  3. metaflow-stubs/cli.pyi +1 -1
  4. metaflow-stubs/cli_components/__init__.pyi +1 -1
  5. metaflow-stubs/cli_components/utils.pyi +1 -1
  6. metaflow-stubs/client/__init__.pyi +1 -1
  7. metaflow-stubs/client/core.pyi +5 -5
  8. metaflow-stubs/client/filecache.pyi +2 -2
  9. metaflow-stubs/events.pyi +1 -1
  10. metaflow-stubs/exception.pyi +1 -1
  11. metaflow-stubs/flowspec.pyi +5 -5
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +3 -3
  14. metaflow-stubs/meta_files.pyi +1 -1
  15. metaflow-stubs/metadata_provider/__init__.pyi +1 -1
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +1 -1
  17. metaflow-stubs/metadata_provider/metadata.pyi +2 -2
  18. metaflow-stubs/metadata_provider/util.pyi +1 -1
  19. metaflow-stubs/metaflow_config.pyi +1 -1
  20. metaflow-stubs/metaflow_current.pyi +29 -29
  21. metaflow-stubs/metaflow_git.pyi +1 -1
  22. metaflow-stubs/mf_extensions/__init__.pyi +1 -1
  23. metaflow-stubs/mf_extensions/obcheckpoint/__init__.pyi +1 -1
  24. metaflow-stubs/mf_extensions/obcheckpoint/plugins/__init__.pyi +1 -1
  25. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/__init__.pyi +1 -1
  26. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/__init__.pyi +1 -1
  27. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/async_cards.pyi +1 -1
  28. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/deco_injection_mixin.pyi +1 -1
  29. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/extra_components.pyi +2 -2
  30. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/__init__.pyi +1 -1
  31. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/__init__.pyi +1 -1
  32. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/checkpoint_lister.pyi +3 -3
  33. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/lineage_card.pyi +1 -1
  34. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/checkpoint_storage.pyi +4 -4
  35. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/constructors.pyi +1 -1
  36. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/core.pyi +3 -3
  37. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/decorator.pyi +3 -3
  38. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/exceptions.pyi +1 -1
  39. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/final_api.pyi +1 -1
  40. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/lineage.pyi +1 -1
  41. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/__init__.pyi +1 -1
  42. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/context.pyi +3 -3
  43. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/core.pyi +2 -2
  44. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/decorator.pyi +1 -1
  45. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/exceptions.pyi +1 -1
  46. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/task_utils.pyi +3 -3
  47. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/utils.pyi +1 -1
  48. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastructures.pyi +2 -2
  49. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/exceptions.pyi +1 -1
  50. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/__init__.pyi +1 -1
  51. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/decorator.pyi +2 -2
  52. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/__init__.pyi +1 -1
  53. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/core.pyi +2 -2
  54. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/exceptions.pyi +1 -1
  55. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/model_storage.pyi +3 -3
  56. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/__init__.pyi +1 -1
  57. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/flowspec_utils.pyi +1 -1
  58. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/general.pyi +1 -1
  59. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/identity_utils.pyi +2 -2
  60. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/__init__.pyi +1 -1
  61. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/base.pyi +1 -1
  62. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/tar.pyi +2 -2
  63. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/tar_utils.pyi +2 -2
  64. metaflow-stubs/mf_extensions/outerbounds/__init__.pyi +1 -1
  65. metaflow-stubs/mf_extensions/outerbounds/plugins/__init__.pyi +1 -1
  66. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/__init__.pyi +1 -1
  67. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/__init__.pyi +1 -1
  68. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_state_machine.pyi +1 -1
  69. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/__init__.pyi +1 -1
  70. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/spinner/__init__.pyi +1 -1
  71. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/spinner/spinners.pyi +1 -1
  72. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/app_cli.pyi +2 -2
  73. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/app_config.pyi +1 -1
  74. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/capsule.pyi +2 -2
  75. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/click_importer.pyi +1 -1
  76. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/code_package/__init__.pyi +1 -1
  77. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/code_package/code_packager.pyi +2 -2
  78. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/__init__.pyi +1 -1
  79. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/cli_generator.pyi +1 -1
  80. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/config_utils.pyi +3 -3
  81. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/schema_export.pyi +1 -1
  82. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/typed_configs.pyi +2 -2
  83. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/unified_config.pyi +2 -2
  84. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/dependencies.pyi +3 -3
  85. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/deployer.pyi +2 -2
  86. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/experimental/__init__.pyi +1 -1
  87. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/perimeters.pyi +1 -1
  88. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/utils.pyi +2 -2
  89. metaflow-stubs/mf_extensions/outerbounds/plugins/aws/__init__.pyi +1 -1
  90. metaflow-stubs/mf_extensions/outerbounds/plugins/aws/assume_role_decorator.pyi +2 -2
  91. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/__init__.pyi +1 -1
  92. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/async_cards.pyi +1 -1
  93. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/injector.pyi +1 -1
  94. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/__init__.pyi +1 -1
  95. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/coreweave.pyi +2 -2
  96. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/nebius.pyi +2 -2
  97. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/__init__.pyi +1 -1
  98. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/baker.pyi +4 -4
  99. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/docker_environment.pyi +2 -2
  100. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/fast_bakery.pyi +1 -1
  101. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/__init__.pyi +1 -1
  102. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/pod_killer.pyi +1 -1
  103. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/__init__.pyi +1 -1
  104. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/constants.pyi +1 -1
  105. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/exceptions.pyi +1 -1
  106. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/ollama.pyi +1 -1
  107. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/status_card.pyi +1 -1
  108. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/__init__.pyi +1 -1
  109. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/snowflake.pyi +1 -1
  110. metaflow-stubs/mf_extensions/outerbounds/profilers/__init__.pyi +1 -1
  111. metaflow-stubs/mf_extensions/outerbounds/profilers/gpu.pyi +1 -1
  112. metaflow-stubs/mf_extensions/outerbounds/remote_config.pyi +1 -1
  113. metaflow-stubs/mf_extensions/outerbounds/toplevel/__init__.pyi +1 -1
  114. metaflow-stubs/mf_extensions/outerbounds/toplevel/global_aliases_for_metaflow_package.pyi +1 -1
  115. metaflow-stubs/multicore_utils.pyi +1 -1
  116. metaflow-stubs/ob_internal.pyi +1 -1
  117. metaflow-stubs/packaging_sys/__init__.pyi +6 -6
  118. metaflow-stubs/packaging_sys/backend.pyi +2 -2
  119. metaflow-stubs/packaging_sys/distribution_support.pyi +4 -4
  120. metaflow-stubs/packaging_sys/tar_backend.pyi +2 -2
  121. metaflow-stubs/packaging_sys/utils.pyi +1 -1
  122. metaflow-stubs/packaging_sys/v1.pyi +2 -2
  123. metaflow-stubs/parameters.pyi +3 -3
  124. metaflow-stubs/plugins/__init__.pyi +14 -14
  125. metaflow-stubs/plugins/airflow/__init__.pyi +1 -1
  126. metaflow-stubs/plugins/airflow/airflow_utils.pyi +1 -1
  127. metaflow-stubs/plugins/airflow/exception.pyi +1 -1
  128. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +1 -1
  129. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +1 -1
  130. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +1 -1
  131. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +1 -1
  132. metaflow-stubs/plugins/argo/__init__.pyi +1 -1
  133. metaflow-stubs/plugins/argo/argo_client.pyi +1 -1
  134. metaflow-stubs/plugins/argo/argo_events.pyi +1 -1
  135. metaflow-stubs/plugins/argo/argo_workflows.pyi +3 -3
  136. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +3 -3
  137. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +2 -2
  138. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +3 -3
  139. metaflow-stubs/plugins/argo/exit_hooks.pyi +2 -2
  140. metaflow-stubs/plugins/aws/__init__.pyi +1 -1
  141. metaflow-stubs/plugins/aws/aws_client.pyi +1 -1
  142. metaflow-stubs/plugins/aws/aws_utils.pyi +1 -1
  143. metaflow-stubs/plugins/aws/batch/__init__.pyi +1 -1
  144. metaflow-stubs/plugins/aws/batch/batch.pyi +1 -1
  145. metaflow-stubs/plugins/aws/batch/batch_client.pyi +1 -1
  146. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +1 -1
  147. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +1 -1
  148. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +2 -2
  149. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +1 -1
  150. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +1 -1
  151. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +1 -1
  152. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +1 -1
  153. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +1 -1
  154. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +3 -3
  155. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +3 -3
  156. metaflow-stubs/plugins/azure/__init__.pyi +1 -1
  157. metaflow-stubs/plugins/azure/azure_credential.pyi +1 -1
  158. metaflow-stubs/plugins/azure/azure_exceptions.pyi +1 -1
  159. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +2 -2
  160. metaflow-stubs/plugins/azure/azure_utils.pyi +1 -1
  161. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +1 -1
  162. metaflow-stubs/plugins/azure/includefile_support.pyi +1 -1
  163. metaflow-stubs/plugins/cards/__init__.pyi +1 -1
  164. metaflow-stubs/plugins/cards/card_client.pyi +2 -2
  165. metaflow-stubs/plugins/cards/card_creator.pyi +1 -1
  166. metaflow-stubs/plugins/cards/card_datastore.pyi +1 -1
  167. metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
  168. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +1 -1
  169. metaflow-stubs/plugins/cards/card_modules/basic.pyi +2 -2
  170. metaflow-stubs/plugins/cards/card_modules/card.pyi +1 -1
  171. metaflow-stubs/plugins/cards/card_modules/components.pyi +3 -3
  172. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +1 -1
  173. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +1 -1
  174. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +1 -1
  175. metaflow-stubs/plugins/cards/card_resolver.pyi +1 -1
  176. metaflow-stubs/plugins/cards/component_serializer.pyi +1 -1
  177. metaflow-stubs/plugins/cards/exception.pyi +1 -1
  178. metaflow-stubs/plugins/catch_decorator.pyi +2 -2
  179. metaflow-stubs/plugins/datatools/__init__.pyi +1 -1
  180. metaflow-stubs/plugins/datatools/local.pyi +1 -1
  181. metaflow-stubs/plugins/datatools/s3/__init__.pyi +1 -1
  182. metaflow-stubs/plugins/datatools/s3/s3.pyi +4 -4
  183. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +1 -1
  184. metaflow-stubs/plugins/datatools/s3/s3util.pyi +1 -1
  185. metaflow-stubs/plugins/debug_logger.pyi +1 -1
  186. metaflow-stubs/plugins/debug_monitor.pyi +1 -1
  187. metaflow-stubs/plugins/environment_decorator.pyi +1 -1
  188. metaflow-stubs/plugins/events_decorator.pyi +1 -1
  189. metaflow-stubs/plugins/exit_hook/__init__.pyi +1 -1
  190. metaflow-stubs/plugins/exit_hook/exit_hook_decorator.pyi +1 -1
  191. metaflow-stubs/plugins/frameworks/__init__.pyi +1 -1
  192. metaflow-stubs/plugins/frameworks/pytorch.pyi +1 -1
  193. metaflow-stubs/plugins/gcp/__init__.pyi +1 -1
  194. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +2 -2
  195. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +1 -1
  196. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +1 -1
  197. metaflow-stubs/plugins/gcp/gs_utils.pyi +1 -1
  198. metaflow-stubs/plugins/gcp/includefile_support.pyi +1 -1
  199. metaflow-stubs/plugins/kubernetes/__init__.pyi +1 -1
  200. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +2 -2
  201. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +1 -1
  202. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +1 -1
  203. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +1 -1
  204. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +1 -1
  205. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +1 -1
  206. metaflow-stubs/plugins/ollama/__init__.pyi +2 -2
  207. metaflow-stubs/plugins/parallel_decorator.pyi +1 -1
  208. metaflow-stubs/plugins/perimeters.pyi +1 -1
  209. metaflow-stubs/plugins/project_decorator.pyi +1 -1
  210. metaflow-stubs/plugins/pypi/__init__.pyi +1 -1
  211. metaflow-stubs/plugins/pypi/conda_decorator.pyi +1 -1
  212. metaflow-stubs/plugins/pypi/conda_environment.pyi +3 -3
  213. metaflow-stubs/plugins/pypi/parsers.pyi +1 -1
  214. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +1 -1
  215. metaflow-stubs/plugins/pypi/pypi_environment.pyi +1 -1
  216. metaflow-stubs/plugins/pypi/utils.pyi +1 -1
  217. metaflow-stubs/plugins/resources_decorator.pyi +1 -1
  218. metaflow-stubs/plugins/retry_decorator.pyi +1 -1
  219. metaflow-stubs/plugins/secrets/__init__.pyi +1 -1
  220. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +1 -1
  221. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +1 -1
  222. metaflow-stubs/plugins/secrets/secrets_func.pyi +1 -1
  223. metaflow-stubs/plugins/secrets/secrets_spec.pyi +1 -1
  224. metaflow-stubs/plugins/secrets/utils.pyi +1 -1
  225. metaflow-stubs/plugins/snowflake/__init__.pyi +1 -1
  226. metaflow-stubs/plugins/storage_executor.pyi +1 -1
  227. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +2 -2
  228. metaflow-stubs/plugins/timeout_decorator.pyi +2 -2
  229. metaflow-stubs/plugins/torchtune/__init__.pyi +1 -1
  230. metaflow-stubs/plugins/uv/__init__.pyi +1 -1
  231. metaflow-stubs/plugins/uv/uv_environment.pyi +2 -2
  232. metaflow-stubs/profilers/__init__.pyi +1 -1
  233. metaflow-stubs/pylint_wrapper.pyi +1 -1
  234. metaflow-stubs/runner/__init__.pyi +1 -1
  235. metaflow-stubs/runner/deployer.pyi +29 -29
  236. metaflow-stubs/runner/deployer_impl.pyi +2 -2
  237. metaflow-stubs/runner/metaflow_runner.pyi +3 -3
  238. metaflow-stubs/runner/nbdeploy.pyi +1 -1
  239. metaflow-stubs/runner/nbrun.pyi +1 -1
  240. metaflow-stubs/runner/subprocess_manager.pyi +1 -1
  241. metaflow-stubs/runner/utils.pyi +3 -3
  242. metaflow-stubs/system/__init__.pyi +1 -1
  243. metaflow-stubs/system/system_logger.pyi +1 -1
  244. metaflow-stubs/system/system_monitor.pyi +1 -1
  245. metaflow-stubs/tagging_util.pyi +1 -1
  246. metaflow-stubs/tuple_util.pyi +1 -1
  247. metaflow-stubs/user_configs/__init__.pyi +1 -1
  248. metaflow-stubs/user_configs/config_options.pyi +3 -3
  249. metaflow-stubs/user_configs/config_parameters.pyi +4 -4
  250. metaflow-stubs/user_decorators/__init__.pyi +1 -1
  251. metaflow-stubs/user_decorators/common.pyi +1 -1
  252. metaflow-stubs/user_decorators/mutable_flow.pyi +5 -5
  253. metaflow-stubs/user_decorators/mutable_step.pyi +4 -4
  254. metaflow-stubs/user_decorators/user_flow_decorator.pyi +4 -4
  255. metaflow-stubs/user_decorators/user_step_decorator.pyi +5 -5
  256. {ob_metaflow_stubs-6.0.4.1rc0.dist-info → ob_metaflow_stubs-6.0.4.1rc1.dist-info}/METADATA +1 -1
  257. ob_metaflow_stubs-6.0.4.1rc1.dist-info/RECORD +260 -0
  258. ob_metaflow_stubs-6.0.4.1rc0.dist-info/RECORD +0 -260
  259. {ob_metaflow_stubs-6.0.4.1rc0.dist-info → ob_metaflow_stubs-6.0.4.1rc1.dist-info}/WHEEL +0 -0
  260. {ob_metaflow_stubs-6.0.4.1rc0.dist-info → ob_metaflow_stubs-6.0.4.1rc1.dist-info}/top_level.txt +0 -0
@@ -1,15 +1,15 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
3
  # MF version: 2.16.0.1+obcheckpoint(0.2.4);ob(v1) #
4
- # Generated on 2025-07-14T20:03:25.730478 #
4
+ # Generated on 2025-07-14T20:15:55.146353 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import datetime
12
11
  import typing
12
+ import datetime
13
13
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
14
14
  StepFlag = typing.NewType("StepFlag", bool)
15
15
 
@@ -39,17 +39,17 @@ from .user_decorators.user_step_decorator import UserStepDecorator as UserStepDe
39
39
  from .user_decorators.user_step_decorator import StepMutator as StepMutator
40
40
  from .user_decorators.user_step_decorator import user_step_decorator as user_step_decorator
41
41
  from .user_decorators.user_flow_decorator import FlowMutator as FlowMutator
42
- from . import cards as cards
43
42
  from . import tuple_util as tuple_util
44
- from . import events as events
43
+ from . import cards as cards
45
44
  from . import metaflow_git as metaflow_git
45
+ from . import events as events
46
46
  from . import runner as runner
47
47
  from . import plugins as plugins
48
48
  from .mf_extensions.outerbounds.toplevel.global_aliases_for_metaflow_package import S3 as S3
49
49
  from . import includefile as includefile
50
50
  from .includefile import IncludeFile as IncludeFile
51
- from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
52
51
  from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
52
+ from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
53
53
  from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
54
54
  from . import client as client
55
55
  from .client.core import namespace as namespace
@@ -162,272 +162,223 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
162
162
  """
163
163
  ...
164
164
 
165
- @typing.overload
166
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
165
+ def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
167
166
  """
168
- Specifies that the step will success under all circumstances.
167
+ Decorator that helps cache, version and store models/datasets from huggingface hub.
169
168
 
170
- The decorator will create an optional artifact, specified by `var`, which
171
- contains the exception raised. You can use it to detect the presence
172
- of errors, indicating that all happy-path artifacts produced by the step
173
- are missing.
169
+ > Examples
170
+
171
+ **Usage: creating references of models from huggingface that may be loaded in downstream steps**
172
+ ```python
173
+ @huggingface_hub
174
+ @step
175
+ def pull_model_from_huggingface(self):
176
+ # `current.huggingface_hub.snapshot_download` downloads the model from the Hugging Face Hub
177
+ # and saves it in the backend storage based on the model's `repo_id`. If there exists a model
178
+ # with the same `repo_id` in the backend storage, it will not download the model again. The return
179
+ # value of the function is a reference to the model in the backend storage.
180
+ # This reference can be used to load the model in the subsequent steps via `@model(load=["llama_model"])`
181
+
182
+ self.model_id = "mistralai/Mistral-7B-Instruct-v0.1"
183
+ self.llama_model = current.huggingface_hub.snapshot_download(
184
+ repo_id=self.model_id,
185
+ allow_patterns=["*.safetensors", "*.json", "tokenizer.*"],
186
+ )
187
+ self.next(self.train)
188
+ ```
189
+
190
+ **Usage: loading models directly from huggingface hub or from cache (from metaflow's datastore)**
191
+ ```python
192
+ @huggingface_hub(load=["mistralai/Mistral-7B-Instruct-v0.1"])
193
+ @step
194
+ def pull_model_from_huggingface(self):
195
+ path_to_model = current.huggingface_hub.loaded["mistralai/Mistral-7B-Instruct-v0.1"]
196
+ ```
197
+
198
+ ```python
199
+ @huggingface_hub(load=[("mistralai/Mistral-7B-Instruct-v0.1", "/my-directory"), ("myorg/mistral-lora, "/my-lora-directory")])
200
+ @step
201
+ def finetune_model(self):
202
+ path_to_model = current.huggingface_hub.loaded["mistralai/Mistral-7B-Instruct-v0.1"]
203
+ # path_to_model will be /my-directory
204
+ ```
205
+
206
+ ```python
207
+ # Takes all the arguments passed to `snapshot_download`
208
+ # except for `local_dir`
209
+ @huggingface_hub(load=[
210
+ {
211
+ "repo_id": "mistralai/Mistral-7B-Instruct-v0.1",
212
+ },
213
+ {
214
+ "repo_id": "myorg/mistral-lora",
215
+ "repo_type": "model",
216
+ },
217
+ ])
218
+ @step
219
+ def finetune_model(self):
220
+ path_to_model = current.huggingface_hub.loaded["mistralai/Mistral-7B-Instruct-v0.1"]
221
+ # path_to_model will be /my-directory
222
+ ```
174
223
 
175
224
 
176
225
  Parameters
177
226
  ----------
178
- var : str, optional, default None
179
- Name of the artifact in which to store the caught exception.
180
- If not specified, the exception is not stored.
181
- print_exception : bool, default True
182
- Determines whether or not the exception is printed to
183
- stdout when caught.
227
+ temp_dir_root : str, optional
228
+ The root directory that will hold the temporary directory where objects will be downloaded.
229
+
230
+ load: Union[List[str], List[Tuple[Dict, str]], List[Tuple[str, str]], List[Dict], None]
231
+ The list of repos (models/datasets) to load.
232
+
233
+ Loaded repos can be accessed via `current.huggingface_hub.loaded`. If load is set, then the following happens:
234
+
235
+ - If repo (model/dataset) is not found in the datastore:
236
+ - Downloads the repo from Hugging Face Hub to a temporary directory (or uses specified path) for local access
237
+ - Stores it in Metaflow's datastore (s3/gcs/azure etc.) with a unique name based on repo_type/repo_id
238
+ - All HF models loaded for a `@step` will be cached separately under flow/step/namespace.
239
+
240
+ - If repo is found in the datastore:
241
+ - Loads it directly from datastore to local path (can be temporary directory or specified path)
184
242
  """
185
243
  ...
186
244
 
187
245
  @typing.overload
188
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
189
- ...
190
-
191
- @typing.overload
192
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
193
- ...
194
-
195
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
246
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
196
247
  """
197
- Specifies that the step will success under all circumstances.
198
-
199
- The decorator will create an optional artifact, specified by `var`, which
200
- contains the exception raised. You can use it to detect the presence
201
- of errors, indicating that all happy-path artifacts produced by the step
202
- are missing.
248
+ Specifies environment variables to be set prior to the execution of a step.
203
249
 
204
250
 
205
251
  Parameters
206
252
  ----------
207
- var : str, optional, default None
208
- Name of the artifact in which to store the caught exception.
209
- If not specified, the exception is not stored.
210
- print_exception : bool, default True
211
- Determines whether or not the exception is printed to
212
- stdout when caught.
253
+ vars : Dict[str, str], default {}
254
+ Dictionary of environment variables to set.
213
255
  """
214
256
  ...
215
257
 
216
258
  @typing.overload
217
- def app_deploy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
218
- """
219
- Decorator prototype for all step decorators. This function gets specialized
220
- and imported for all decorators types by _import_plugin_decorators().
221
- """
259
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
222
260
  ...
223
261
 
224
262
  @typing.overload
225
- def app_deploy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
263
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
226
264
  ...
227
265
 
228
- def app_deploy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
266
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
229
267
  """
230
- Decorator prototype for all step decorators. This function gets specialized
231
- and imported for all decorators types by _import_plugin_decorators().
268
+ Specifies environment variables to be set prior to the execution of a step.
269
+
270
+
271
+ Parameters
272
+ ----------
273
+ vars : Dict[str, str], default {}
274
+ Dictionary of environment variables to set.
232
275
  """
233
276
  ...
234
277
 
235
278
  @typing.overload
236
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
279
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
237
280
  """
238
- Specifies the number of times the task corresponding
239
- to a step needs to be retried.
240
-
241
- This decorator is useful for handling transient errors, such as networking issues.
242
- If your task contains operations that can't be retried safely, e.g. database updates,
243
- it is advisable to annotate it with `@retry(times=0)`.
281
+ Specifies the Conda environment for the step.
244
282
 
245
- This can be used in conjunction with the `@catch` decorator. The `@catch`
246
- decorator will execute a no-op task after all retries have been exhausted,
247
- ensuring that the flow execution can continue.
283
+ Information in this decorator will augment any
284
+ attributes set in the `@conda_base` flow-level decorator. Hence,
285
+ you can use `@conda_base` to set packages required by all
286
+ steps and use `@conda` to specify step-specific overrides.
248
287
 
249
288
 
250
289
  Parameters
251
290
  ----------
252
- times : int, default 3
253
- Number of times to retry this task.
254
- minutes_between_retries : int, default 2
255
- Number of minutes between retries.
291
+ packages : Dict[str, str], default {}
292
+ Packages to use for this step. The key is the name of the package
293
+ and the value is the version to use.
294
+ libraries : Dict[str, str], default {}
295
+ Supported for backward compatibility. When used with packages, packages will take precedence.
296
+ python : str, optional, default None
297
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
298
+ that the version used will correspond to the version of the Python interpreter used to start the run.
299
+ disabled : bool, default False
300
+ If set to True, disables @conda.
256
301
  """
257
302
  ...
258
303
 
259
304
  @typing.overload
260
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
305
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
261
306
  ...
262
307
 
263
308
  @typing.overload
264
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
309
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
265
310
  ...
266
311
 
267
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
312
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
268
313
  """
269
- Specifies the number of times the task corresponding
270
- to a step needs to be retried.
271
-
272
- This decorator is useful for handling transient errors, such as networking issues.
273
- If your task contains operations that can't be retried safely, e.g. database updates,
274
- it is advisable to annotate it with `@retry(times=0)`.
314
+ Specifies the Conda environment for the step.
275
315
 
276
- This can be used in conjunction with the `@catch` decorator. The `@catch`
277
- decorator will execute a no-op task after all retries have been exhausted,
278
- ensuring that the flow execution can continue.
316
+ Information in this decorator will augment any
317
+ attributes set in the `@conda_base` flow-level decorator. Hence,
318
+ you can use `@conda_base` to set packages required by all
319
+ steps and use `@conda` to specify step-specific overrides.
279
320
 
280
321
 
281
322
  Parameters
282
323
  ----------
283
- times : int, default 3
284
- Number of times to retry this task.
285
- minutes_between_retries : int, default 2
286
- Number of minutes between retries.
324
+ packages : Dict[str, str], default {}
325
+ Packages to use for this step. The key is the name of the package
326
+ and the value is the version to use.
327
+ libraries : Dict[str, str], default {}
328
+ Supported for backward compatibility. When used with packages, packages will take precedence.
329
+ python : str, optional, default None
330
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
331
+ that the version used will correspond to the version of the Python interpreter used to start the run.
332
+ disabled : bool, default False
333
+ If set to True, disables @conda.
287
334
  """
288
335
  ...
289
336
 
290
- def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy: str, force_cache_update: bool, debug: bool, circuit_breaker_config: dict, timeout_config: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
337
+ @typing.overload
338
+ def model(*, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
291
339
  """
292
- This decorator is used to run Ollama APIs as Metaflow task sidecars.
340
+ Enables loading / saving of models within a step.
293
341
 
294
- User code call
295
- --------------
296
- @ollama(
297
- models=[...],
298
- ...
299
- )
342
+ > Examples
343
+ - Saving Models
344
+ ```python
345
+ @model
346
+ @step
347
+ def train(self):
348
+ # current.model.save returns a dictionary reference to the model saved
349
+ self.my_model = current.model.save(
350
+ path_to_my_model,
351
+ label="my_model",
352
+ metadata={
353
+ "epochs": 10,
354
+ "batch-size": 32,
355
+ "learning-rate": 0.001,
356
+ }
357
+ )
358
+ self.next(self.test)
300
359
 
301
- Valid backend options
302
- ---------------------
303
- - 'local': Run as a separate process on the local task machine.
304
- - (TODO) 'managed': Outerbounds hosts and selects compute provider.
305
- - (TODO) 'remote': Spin up separate instance to serve Ollama models.
360
+ @model(load="my_model")
361
+ @step
362
+ def test(self):
363
+ # `current.model.loaded` returns a dictionary of the loaded models
364
+ # where the key is the name of the artifact and the value is the path to the model
365
+ print(os.listdir(current.model.loaded["my_model"]))
366
+ self.next(self.end)
367
+ ```
306
368
 
307
- Valid model options
308
- -------------------
309
- Any model here https://ollama.com/search, e.g. 'llama3.2', 'llama3.3'
310
-
311
-
312
- Parameters
313
- ----------
314
- models: list[str]
315
- List of Ollama containers running models in sidecars.
316
- backend: str
317
- Determines where and how to run the Ollama process.
318
- force_pull: bool
319
- Whether to run `ollama pull` no matter what, or first check the remote cache in Metaflow datastore for this model key.
320
- cache_update_policy: str
321
- Cache update policy: "auto", "force", or "never".
322
- force_cache_update: bool
323
- Simple override for "force" cache update policy.
324
- debug: bool
325
- Whether to turn on verbose debugging logs.
326
- circuit_breaker_config: dict
327
- Configuration for circuit breaker protection. Keys: failure_threshold, recovery_timeout, reset_timeout.
328
- timeout_config: dict
329
- Configuration for various operation timeouts. Keys: pull, stop, health_check, install, server_startup.
330
- """
331
- ...
332
-
333
- @typing.overload
334
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
335
- """
336
- Specifies secrets to be retrieved and injected as environment variables prior to
337
- the execution of a step.
338
-
339
-
340
- Parameters
341
- ----------
342
- sources : List[Union[str, Dict[str, Any]]], default: []
343
- List of secret specs, defining how the secrets are to be retrieved
344
- role : str, optional, default: None
345
- Role to use for fetching secrets
346
- """
347
- ...
348
-
349
- @typing.overload
350
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
351
- ...
352
-
353
- @typing.overload
354
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
355
- ...
356
-
357
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
358
- """
359
- Specifies secrets to be retrieved and injected as environment variables prior to
360
- the execution of a step.
361
-
362
-
363
- Parameters
364
- ----------
365
- sources : List[Union[str, Dict[str, Any]]], default: []
366
- List of secret specs, defining how the secrets are to be retrieved
367
- role : str, optional, default: None
368
- Role to use for fetching secrets
369
- """
370
- ...
371
-
372
- def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
373
- """
374
- Specifies that this step should execute on DGX cloud.
375
-
376
-
377
- Parameters
378
- ----------
379
- gpu : int
380
- Number of GPUs to use.
381
- gpu_type : str
382
- Type of Nvidia GPU to use.
383
- """
384
- ...
385
-
386
- @typing.overload
387
- def model(*, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
388
- """
389
- Enables loading / saving of models within a step.
390
-
391
- > Examples
392
- - Saving Models
393
- ```python
394
- @model
395
- @step
396
- def train(self):
397
- # current.model.save returns a dictionary reference to the model saved
398
- self.my_model = current.model.save(
399
- path_to_my_model,
400
- label="my_model",
401
- metadata={
402
- "epochs": 10,
403
- "batch-size": 32,
404
- "learning-rate": 0.001,
405
- }
406
- )
407
- self.next(self.test)
408
-
409
- @model(load="my_model")
410
- @step
411
- def test(self):
412
- # `current.model.loaded` returns a dictionary of the loaded models
413
- # where the key is the name of the artifact and the value is the path to the model
414
- print(os.listdir(current.model.loaded["my_model"]))
415
- self.next(self.end)
416
- ```
417
-
418
- - Loading models
419
- ```python
420
- @step
421
- def train(self):
422
- # current.model.load returns the path to the model loaded
423
- checkpoint_path = current.model.load(
424
- self.checkpoint_key,
425
- )
426
- model_path = current.model.load(
427
- self.model,
428
- )
429
- self.next(self.test)
430
- ```
369
+ - Loading models
370
+ ```python
371
+ @step
372
+ def train(self):
373
+ # current.model.load returns the path to the model loaded
374
+ checkpoint_path = current.model.load(
375
+ self.checkpoint_key,
376
+ )
377
+ model_path = current.model.load(
378
+ self.model,
379
+ )
380
+ self.next(self.test)
381
+ ```
431
382
 
432
383
 
433
384
  Parameters
@@ -512,331 +463,302 @@ def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
512
463
  """
513
464
  ...
514
465
 
515
- @typing.overload
516
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
466
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
517
467
  """
518
- Specifies a timeout for your step.
519
-
520
- This decorator is useful if this step may hang indefinitely.
521
-
522
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
523
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
524
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
525
-
526
- Note that all the values specified in parameters are added together so if you specify
527
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
468
+ Specifies that this step should execute on Kubernetes.
528
469
 
529
470
 
530
471
  Parameters
531
472
  ----------
532
- seconds : int, default 0
533
- Number of seconds to wait prior to timing out.
534
- minutes : int, default 0
535
- Number of minutes to wait prior to timing out.
536
- hours : int, default 0
537
- Number of hours to wait prior to timing out.
473
+ cpu : int, default 1
474
+ Number of CPUs required for this step. If `@resources` is
475
+ also present, the maximum value from all decorators is used.
476
+ memory : int, default 4096
477
+ Memory size (in MB) required for this step. If
478
+ `@resources` is also present, the maximum value from all decorators is
479
+ used.
480
+ disk : int, default 10240
481
+ Disk size (in MB) required for this step. If
482
+ `@resources` is also present, the maximum value from all decorators is
483
+ used.
484
+ image : str, optional, default None
485
+ Docker image to use when launching on Kubernetes. If not specified, and
486
+ METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
487
+ not, a default Docker image mapping to the current version of Python is used.
488
+ image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
489
+ If given, the imagePullPolicy to be applied to the Docker image of the step.
490
+ image_pull_secrets: List[str], default []
491
+ The default is extracted from METAFLOW_KUBERNETES_IMAGE_PULL_SECRETS.
492
+ Kubernetes image pull secrets to use when pulling container images
493
+ in Kubernetes.
494
+ service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
495
+ Kubernetes service account to use when launching pod in Kubernetes.
496
+ secrets : List[str], optional, default None
497
+ Kubernetes secrets to use when launching pod in Kubernetes. These
498
+ secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
499
+ in Metaflow configuration.
500
+ node_selector: Union[Dict[str,str], str], optional, default None
501
+ Kubernetes node selector(s) to apply to the pod running the task.
502
+ Can be passed in as a comma separated string of values e.g.
503
+ 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
504
+ {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
505
+ namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
506
+ Kubernetes namespace to use when launching pod in Kubernetes.
507
+ gpu : int, optional, default None
508
+ Number of GPUs required for this step. A value of zero implies that
509
+ the scheduled node should not have GPUs.
510
+ gpu_vendor : str, default KUBERNETES_GPU_VENDOR
511
+ The vendor of the GPUs to be used for this step.
512
+ tolerations : List[str], default []
513
+ The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
514
+ Kubernetes tolerations to use when launching pod in Kubernetes.
515
+ labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
516
+ Kubernetes labels to use when launching pod in Kubernetes.
517
+ annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
518
+ Kubernetes annotations to use when launching pod in Kubernetes.
519
+ use_tmpfs : bool, default False
520
+ This enables an explicit tmpfs mount for this step.
521
+ tmpfs_tempdir : bool, default True
522
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
523
+ tmpfs_size : int, optional, default: None
524
+ The value for the size (in MiB) of the tmpfs mount for this step.
525
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
526
+ memory allocated for this step.
527
+ tmpfs_path : str, optional, default /metaflow_temp
528
+ Path to tmpfs mount for this step.
529
+ persistent_volume_claims : Dict[str, str], optional, default None
530
+ A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
531
+ volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
532
+ shared_memory: int, optional
533
+ Shared memory size (in MiB) required for this step
534
+ port: int, optional
535
+ Port number to specify in the Kubernetes job object
536
+ compute_pool : str, optional, default None
537
+ Compute pool to be used for for this step.
538
+ If not specified, any accessible compute pool within the perimeter is used.
539
+ hostname_resolution_timeout: int, default 10 * 60
540
+ Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
541
+ Only applicable when @parallel is used.
542
+ qos: str, default: Burstable
543
+ Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
544
+
545
+ security_context: Dict[str, Any], optional, default None
546
+ Container security context. Applies to the task container. Allows the following keys:
547
+ - privileged: bool, optional, default None
548
+ - allow_privilege_escalation: bool, optional, default None
549
+ - run_as_user: int, optional, default None
550
+ - run_as_group: int, optional, default None
551
+ - run_as_non_root: bool, optional, default None
538
552
  """
539
553
  ...
540
554
 
541
- @typing.overload
542
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
543
- ...
544
-
545
- @typing.overload
546
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
547
- ...
548
-
549
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
555
+ def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy: str, force_cache_update: bool, debug: bool, circuit_breaker_config: dict, timeout_config: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
550
556
  """
551
- Specifies a timeout for your step.
557
+ This decorator is used to run Ollama APIs as Metaflow task sidecars.
552
558
 
553
- This decorator is useful if this step may hang indefinitely.
559
+ User code call
560
+ --------------
561
+ @ollama(
562
+ models=[...],
563
+ ...
564
+ )
554
565
 
555
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
556
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
557
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
566
+ Valid backend options
567
+ ---------------------
568
+ - 'local': Run as a separate process on the local task machine.
569
+ - (TODO) 'managed': Outerbounds hosts and selects compute provider.
570
+ - (TODO) 'remote': Spin up separate instance to serve Ollama models.
558
571
 
559
- Note that all the values specified in parameters are added together so if you specify
560
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
572
+ Valid model options
573
+ -------------------
574
+ Any model here https://ollama.com/search, e.g. 'llama3.2', 'llama3.3'
561
575
 
562
576
 
563
577
  Parameters
564
578
  ----------
565
- seconds : int, default 0
566
- Number of seconds to wait prior to timing out.
567
- minutes : int, default 0
568
- Number of minutes to wait prior to timing out.
569
- hours : int, default 0
570
- Number of hours to wait prior to timing out.
571
- """
572
- ...
573
-
574
- def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
575
- """
576
- Decorator that helps cache, version and store models/datasets from huggingface hub.
577
-
578
- > Examples
579
-
580
- **Usage: creating references of models from huggingface that may be loaded in downstream steps**
581
- ```python
582
- @huggingface_hub
583
- @step
584
- def pull_model_from_huggingface(self):
585
- # `current.huggingface_hub.snapshot_download` downloads the model from the Hugging Face Hub
586
- # and saves it in the backend storage based on the model's `repo_id`. If there exists a model
587
- # with the same `repo_id` in the backend storage, it will not download the model again. The return
588
- # value of the function is a reference to the model in the backend storage.
589
- # This reference can be used to load the model in the subsequent steps via `@model(load=["llama_model"])`
590
-
591
- self.model_id = "mistralai/Mistral-7B-Instruct-v0.1"
592
- self.llama_model = current.huggingface_hub.snapshot_download(
593
- repo_id=self.model_id,
594
- allow_patterns=["*.safetensors", "*.json", "tokenizer.*"],
595
- )
596
- self.next(self.train)
597
- ```
598
-
599
- **Usage: loading models directly from huggingface hub or from cache (from metaflow's datastore)**
600
- ```python
601
- @huggingface_hub(load=["mistralai/Mistral-7B-Instruct-v0.1"])
602
- @step
603
- def pull_model_from_huggingface(self):
604
- path_to_model = current.huggingface_hub.loaded["mistralai/Mistral-7B-Instruct-v0.1"]
605
- ```
606
-
607
- ```python
608
- @huggingface_hub(load=[("mistralai/Mistral-7B-Instruct-v0.1", "/my-directory"), ("myorg/mistral-lora, "/my-lora-directory")])
609
- @step
610
- def finetune_model(self):
611
- path_to_model = current.huggingface_hub.loaded["mistralai/Mistral-7B-Instruct-v0.1"]
612
- # path_to_model will be /my-directory
613
- ```
614
-
615
- ```python
616
- # Takes all the arguments passed to `snapshot_download`
617
- # except for `local_dir`
618
- @huggingface_hub(load=[
619
- {
620
- "repo_id": "mistralai/Mistral-7B-Instruct-v0.1",
621
- },
622
- {
623
- "repo_id": "myorg/mistral-lora",
624
- "repo_type": "model",
625
- },
626
- ])
627
- @step
628
- def finetune_model(self):
629
- path_to_model = current.huggingface_hub.loaded["mistralai/Mistral-7B-Instruct-v0.1"]
630
- # path_to_model will be /my-directory
631
- ```
632
-
633
-
634
- Parameters
635
- ----------
636
- temp_dir_root : str, optional
637
- The root directory that will hold the temporary directory where objects will be downloaded.
638
-
639
- load: Union[List[str], List[Tuple[Dict, str]], List[Tuple[str, str]], List[Dict], None]
640
- The list of repos (models/datasets) to load.
641
-
642
- Loaded repos can be accessed via `current.huggingface_hub.loaded`. If load is set, then the following happens:
643
-
644
- - If repo (model/dataset) is not found in the datastore:
645
- - Downloads the repo from Hugging Face Hub to a temporary directory (or uses specified path) for local access
646
- - Stores it in Metaflow's datastore (s3/gcs/azure etc.) with a unique name based on repo_type/repo_id
647
- - All HF models loaded for a `@step` will be cached separately under flow/step/namespace.
648
-
649
- - If repo is found in the datastore:
650
- - Loads it directly from datastore to local path (can be temporary directory or specified path)
579
+ models: list[str]
580
+ List of Ollama containers running models in sidecars.
581
+ backend: str
582
+ Determines where and how to run the Ollama process.
583
+ force_pull: bool
584
+ Whether to run `ollama pull` no matter what, or first check the remote cache in Metaflow datastore for this model key.
585
+ cache_update_policy: str
586
+ Cache update policy: "auto", "force", or "never".
587
+ force_cache_update: bool
588
+ Simple override for "force" cache update policy.
589
+ debug: bool
590
+ Whether to turn on verbose debugging logs.
591
+ circuit_breaker_config: dict
592
+ Configuration for circuit breaker protection. Keys: failure_threshold, recovery_timeout, reset_timeout.
593
+ timeout_config: dict
594
+ Configuration for various operation timeouts. Keys: pull, stop, health_check, install, server_startup.
651
595
  """
652
596
  ...
653
597
 
654
598
  @typing.overload
655
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
599
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
656
600
  """
657
- Specifies the Conda environment for the step.
658
-
659
- Information in this decorator will augment any
660
- attributes set in the `@conda_base` flow-level decorator. Hence,
661
- you can use `@conda_base` to set packages required by all
662
- steps and use `@conda` to specify step-specific overrides.
601
+ Specifies secrets to be retrieved and injected as environment variables prior to
602
+ the execution of a step.
663
603
 
664
604
 
665
605
  Parameters
666
606
  ----------
667
- packages : Dict[str, str], default {}
668
- Packages to use for this step. The key is the name of the package
669
- and the value is the version to use.
670
- libraries : Dict[str, str], default {}
671
- Supported for backward compatibility. When used with packages, packages will take precedence.
672
- python : str, optional, default None
673
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
674
- that the version used will correspond to the version of the Python interpreter used to start the run.
675
- disabled : bool, default False
676
- If set to True, disables @conda.
607
+ sources : List[Union[str, Dict[str, Any]]], default: []
608
+ List of secret specs, defining how the secrets are to be retrieved
609
+ role : str, optional, default: None
610
+ Role to use for fetching secrets
677
611
  """
678
612
  ...
679
613
 
680
614
  @typing.overload
681
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
615
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
682
616
  ...
683
617
 
684
618
  @typing.overload
685
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
619
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
686
620
  ...
687
621
 
688
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
622
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
689
623
  """
690
- Specifies the Conda environment for the step.
691
-
692
- Information in this decorator will augment any
693
- attributes set in the `@conda_base` flow-level decorator. Hence,
694
- you can use `@conda_base` to set packages required by all
695
- steps and use `@conda` to specify step-specific overrides.
624
+ Specifies secrets to be retrieved and injected as environment variables prior to
625
+ the execution of a step.
696
626
 
697
627
 
698
628
  Parameters
699
629
  ----------
700
- packages : Dict[str, str], default {}
701
- Packages to use for this step. The key is the name of the package
702
- and the value is the version to use.
703
- libraries : Dict[str, str], default {}
704
- Supported for backward compatibility. When used with packages, packages will take precedence.
705
- python : str, optional, default None
706
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
707
- that the version used will correspond to the version of the Python interpreter used to start the run.
708
- disabled : bool, default False
709
- If set to True, disables @conda.
630
+ sources : List[Union[str, Dict[str, Any]]], default: []
631
+ List of secret specs, defining how the secrets are to be retrieved
632
+ role : str, optional, default: None
633
+ Role to use for fetching secrets
710
634
  """
711
635
  ...
712
636
 
713
637
  @typing.overload
714
- def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
638
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
715
639
  """
716
- Specifies the resources needed when executing this step.
640
+ Specifies a timeout for your step.
717
641
 
718
- Use `@resources` to specify the resource requirements
719
- independently of the specific compute layer (`@batch`, `@kubernetes`).
642
+ This decorator is useful if this step may hang indefinitely.
720
643
 
721
- You can choose the compute layer on the command line by executing e.g.
722
- ```
723
- python myflow.py run --with batch
724
- ```
725
- or
726
- ```
727
- python myflow.py run --with kubernetes
728
- ```
729
- which executes the flow on the desired system using the
730
- requirements specified in `@resources`.
644
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
645
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
646
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
647
+
648
+ Note that all the values specified in parameters are added together so if you specify
649
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
731
650
 
732
651
 
733
652
  Parameters
734
653
  ----------
735
- cpu : int, default 1
736
- Number of CPUs required for this step.
737
- gpu : int, optional, default None
738
- Number of GPUs required for this step.
739
- disk : int, optional, default None
740
- Disk size (in MB) required for this step. Only applies on Kubernetes.
741
- memory : int, default 4096
742
- Memory size (in MB) required for this step.
743
- shared_memory : int, optional, default None
744
- The value for the size (in MiB) of the /dev/shm volume for this step.
745
- This parameter maps to the `--shm-size` option in Docker.
654
+ seconds : int, default 0
655
+ Number of seconds to wait prior to timing out.
656
+ minutes : int, default 0
657
+ Number of minutes to wait prior to timing out.
658
+ hours : int, default 0
659
+ Number of hours to wait prior to timing out.
746
660
  """
747
661
  ...
748
662
 
749
663
  @typing.overload
750
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
664
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
751
665
  ...
752
666
 
753
667
  @typing.overload
754
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
668
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
755
669
  ...
756
670
 
757
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
671
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
758
672
  """
759
- Specifies the resources needed when executing this step.
673
+ Specifies a timeout for your step.
760
674
 
761
- Use `@resources` to specify the resource requirements
762
- independently of the specific compute layer (`@batch`, `@kubernetes`).
675
+ This decorator is useful if this step may hang indefinitely.
763
676
 
764
- You can choose the compute layer on the command line by executing e.g.
765
- ```
766
- python myflow.py run --with batch
767
- ```
768
- or
769
- ```
770
- python myflow.py run --with kubernetes
771
- ```
772
- which executes the flow on the desired system using the
773
- requirements specified in `@resources`.
677
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
678
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
679
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
680
+
681
+ Note that all the values specified in parameters are added together so if you specify
682
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
774
683
 
775
684
 
776
685
  Parameters
777
686
  ----------
778
- cpu : int, default 1
779
- Number of CPUs required for this step.
780
- gpu : int, optional, default None
781
- Number of GPUs required for this step.
782
- disk : int, optional, default None
783
- Disk size (in MB) required for this step. Only applies on Kubernetes.
784
- memory : int, default 4096
785
- Memory size (in MB) required for this step.
786
- shared_memory : int, optional, default None
787
- The value for the size (in MiB) of the /dev/shm volume for this step.
788
- This parameter maps to the `--shm-size` option in Docker.
687
+ seconds : int, default 0
688
+ Number of seconds to wait prior to timing out.
689
+ minutes : int, default 0
690
+ Number of minutes to wait prior to timing out.
691
+ hours : int, default 0
692
+ Number of hours to wait prior to timing out.
789
693
  """
790
694
  ...
791
695
 
792
696
  @typing.overload
793
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
697
+ def app_deploy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
794
698
  """
795
- Specifies the PyPI packages for the step.
796
-
797
- Information in this decorator will augment any
798
- attributes set in the `@pyi_base` flow-level decorator. Hence,
799
- you can use `@pypi_base` to set packages required by all
800
- steps and use `@pypi` to specify step-specific overrides.
801
-
802
-
803
- Parameters
804
- ----------
805
- packages : Dict[str, str], default: {}
806
- Packages to use for this step. The key is the name of the package
807
- and the value is the version to use.
808
- python : str, optional, default: None
809
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
810
- that the version used will correspond to the version of the Python interpreter used to start the run.
699
+ Decorator prototype for all step decorators. This function gets specialized
700
+ and imported for all decorators types by _import_plugin_decorators().
811
701
  """
812
702
  ...
813
703
 
814
704
  @typing.overload
815
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
705
+ def app_deploy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
816
706
  ...
817
707
 
818
- @typing.overload
819
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
708
+ def app_deploy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
709
+ """
710
+ Decorator prototype for all step decorators. This function gets specialized
711
+ and imported for all decorators types by _import_plugin_decorators().
712
+ """
820
713
  ...
821
714
 
822
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
715
+ def vllm(*, model: str, backend: str, openai_api_server: bool, debug: bool, card_refresh_interval: int, max_retries: int, retry_alert_frequency: int, engine_args: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
823
716
  """
824
- Specifies the PyPI packages for the step.
717
+ This decorator is used to run vllm APIs as Metaflow task sidecars.
825
718
 
826
- Information in this decorator will augment any
827
- attributes set in the `@pyi_base` flow-level decorator. Hence,
828
- you can use `@pypi_base` to set packages required by all
829
- steps and use `@pypi` to specify step-specific overrides.
719
+ User code call
720
+ --------------
721
+ @vllm(
722
+ model="...",
723
+ ...
724
+ )
725
+
726
+ Valid backend options
727
+ ---------------------
728
+ - 'local': Run as a separate process on the local task machine.
729
+
730
+ Valid model options
731
+ -------------------
732
+ Any HuggingFace model identifier, e.g. 'meta-llama/Llama-3.2-1B'
733
+
734
+ NOTE: vLLM's OpenAI-compatible server serves ONE model per server instance.
735
+ If you need multiple models, you must create multiple @vllm decorators.
830
736
 
831
737
 
832
738
  Parameters
833
739
  ----------
834
- packages : Dict[str, str], default: {}
835
- Packages to use for this step. The key is the name of the package
836
- and the value is the version to use.
837
- python : str, optional, default: None
838
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
839
- that the version used will correspond to the version of the Python interpreter used to start the run.
740
+ model: str
741
+ HuggingFace model identifier to be served by vLLM.
742
+ backend: str
743
+ Determines where and how to run the vLLM process.
744
+ openai_api_server: bool
745
+ Whether to use OpenAI-compatible API server mode (subprocess) instead of native engine.
746
+ Default is False (uses native engine).
747
+ Set to True for backward compatibility with existing code.
748
+ debug: bool
749
+ Whether to turn on verbose debugging logs.
750
+ card_refresh_interval: int
751
+ Interval in seconds for refreshing the vLLM status card.
752
+ Only used when openai_api_server=True.
753
+ max_retries: int
754
+ Maximum number of retries checking for vLLM server startup.
755
+ Only used when openai_api_server=True.
756
+ retry_alert_frequency: int
757
+ Frequency of alert logs for vLLM server startup retries.
758
+ Only used when openai_api_server=True.
759
+ engine_args : dict
760
+ Additional keyword arguments to pass to the vLLM engine.
761
+ For example, `tensor_parallel_size=2`.
840
762
  """
841
763
  ...
842
764
 
@@ -889,92 +811,74 @@ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
889
811
  """
890
812
  ...
891
813
 
892
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
814
+ def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
893
815
  """
894
- Specifies that this step should execute on Kubernetes.
816
+ Specifies that this step should execute on DGX cloud.
895
817
 
896
818
 
897
819
  Parameters
898
820
  ----------
899
- cpu : int, default 1
900
- Number of CPUs required for this step. If `@resources` is
901
- also present, the maximum value from all decorators is used.
902
- memory : int, default 4096
903
- Memory size (in MB) required for this step. If
904
- `@resources` is also present, the maximum value from all decorators is
905
- used.
906
- disk : int, default 10240
907
- Disk size (in MB) required for this step. If
908
- `@resources` is also present, the maximum value from all decorators is
909
- used.
910
- image : str, optional, default None
911
- Docker image to use when launching on Kubernetes. If not specified, and
912
- METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
913
- not, a default Docker image mapping to the current version of Python is used.
914
- image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
915
- If given, the imagePullPolicy to be applied to the Docker image of the step.
916
- image_pull_secrets: List[str], default []
917
- The default is extracted from METAFLOW_KUBERNETES_IMAGE_PULL_SECRETS.
918
- Kubernetes image pull secrets to use when pulling container images
919
- in Kubernetes.
920
- service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
921
- Kubernetes service account to use when launching pod in Kubernetes.
922
- secrets : List[str], optional, default None
923
- Kubernetes secrets to use when launching pod in Kubernetes. These
924
- secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
925
- in Metaflow configuration.
926
- node_selector: Union[Dict[str,str], str], optional, default None
927
- Kubernetes node selector(s) to apply to the pod running the task.
928
- Can be passed in as a comma separated string of values e.g.
929
- 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
930
- {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
931
- namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
932
- Kubernetes namespace to use when launching pod in Kubernetes.
933
- gpu : int, optional, default None
934
- Number of GPUs required for this step. A value of zero implies that
935
- the scheduled node should not have GPUs.
936
- gpu_vendor : str, default KUBERNETES_GPU_VENDOR
937
- The vendor of the GPUs to be used for this step.
938
- tolerations : List[str], default []
939
- The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
940
- Kubernetes tolerations to use when launching pod in Kubernetes.
941
- labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
942
- Kubernetes labels to use when launching pod in Kubernetes.
943
- annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
944
- Kubernetes annotations to use when launching pod in Kubernetes.
945
- use_tmpfs : bool, default False
946
- This enables an explicit tmpfs mount for this step.
947
- tmpfs_tempdir : bool, default True
948
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
949
- tmpfs_size : int, optional, default: None
950
- The value for the size (in MiB) of the tmpfs mount for this step.
951
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
952
- memory allocated for this step.
953
- tmpfs_path : str, optional, default /metaflow_temp
954
- Path to tmpfs mount for this step.
955
- persistent_volume_claims : Dict[str, str], optional, default None
956
- A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
957
- volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
958
- shared_memory: int, optional
959
- Shared memory size (in MiB) required for this step
960
- port: int, optional
961
- Port number to specify in the Kubernetes job object
962
- compute_pool : str, optional, default None
963
- Compute pool to be used for for this step.
964
- If not specified, any accessible compute pool within the perimeter is used.
965
- hostname_resolution_timeout: int, default 10 * 60
966
- Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
967
- Only applicable when @parallel is used.
968
- qos: str, default: Burstable
969
- Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
821
+ gpu : int
822
+ Number of GPUs to use.
823
+ gpu_type : str
824
+ Type of Nvidia GPU to use.
825
+ queue_timeout : int
826
+ Time to keep the job in NVCF's queue.
827
+ """
828
+ ...
829
+
830
+ @typing.overload
831
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
832
+ """
833
+ Specifies the number of times the task corresponding
834
+ to a step needs to be retried.
970
835
 
971
- security_context: Dict[str, Any], optional, default None
972
- Container security context. Applies to the task container. Allows the following keys:
973
- - privileged: bool, optional, default None
974
- - allow_privilege_escalation: bool, optional, default None
975
- - run_as_user: int, optional, default None
976
- - run_as_group: int, optional, default None
977
- - run_as_non_root: bool, optional, default None
836
+ This decorator is useful for handling transient errors, such as networking issues.
837
+ If your task contains operations that can't be retried safely, e.g. database updates,
838
+ it is advisable to annotate it with `@retry(times=0)`.
839
+
840
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
841
+ decorator will execute a no-op task after all retries have been exhausted,
842
+ ensuring that the flow execution can continue.
843
+
844
+
845
+ Parameters
846
+ ----------
847
+ times : int, default 3
848
+ Number of times to retry this task.
849
+ minutes_between_retries : int, default 2
850
+ Number of minutes between retries.
851
+ """
852
+ ...
853
+
854
+ @typing.overload
855
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
856
+ ...
857
+
858
+ @typing.overload
859
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
860
+ ...
861
+
862
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
863
+ """
864
+ Specifies the number of times the task corresponding
865
+ to a step needs to be retried.
866
+
867
+ This decorator is useful for handling transient errors, such as networking issues.
868
+ If your task contains operations that can't be retried safely, e.g. database updates,
869
+ it is advisable to annotate it with `@retry(times=0)`.
870
+
871
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
872
+ decorator will execute a no-op task after all retries have been exhausted,
873
+ ensuring that the flow execution can continue.
874
+
875
+
876
+ Parameters
877
+ ----------
878
+ times : int, default 3
879
+ Number of times to retry this task.
880
+ minutes_between_retries : int, default 2
881
+ Number of minutes between retries.
978
882
  """
979
883
  ...
980
884
 
@@ -998,56 +902,136 @@ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
998
902
  ...
999
903
 
1000
904
  @typing.overload
1001
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
905
+ def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1002
906
  """
1003
- Internal decorator to support Fast bakery
907
+ Specifies the resources needed when executing this step.
908
+
909
+ Use `@resources` to specify the resource requirements
910
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
911
+
912
+ You can choose the compute layer on the command line by executing e.g.
913
+ ```
914
+ python myflow.py run --with batch
915
+ ```
916
+ or
917
+ ```
918
+ python myflow.py run --with kubernetes
919
+ ```
920
+ which executes the flow on the desired system using the
921
+ requirements specified in `@resources`.
922
+
923
+
924
+ Parameters
925
+ ----------
926
+ cpu : int, default 1
927
+ Number of CPUs required for this step.
928
+ gpu : int, optional, default None
929
+ Number of GPUs required for this step.
930
+ disk : int, optional, default None
931
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
932
+ memory : int, default 4096
933
+ Memory size (in MB) required for this step.
934
+ shared_memory : int, optional, default None
935
+ The value for the size (in MiB) of the /dev/shm volume for this step.
936
+ This parameter maps to the `--shm-size` option in Docker.
1004
937
  """
1005
938
  ...
1006
939
 
1007
940
  @typing.overload
1008
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
941
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1009
942
  ...
1010
943
 
1011
- def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
944
+ @typing.overload
945
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
946
+ ...
947
+
948
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
1012
949
  """
1013
- Internal decorator to support Fast bakery
950
+ Specifies the resources needed when executing this step.
951
+
952
+ Use `@resources` to specify the resource requirements
953
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
954
+
955
+ You can choose the compute layer on the command line by executing e.g.
956
+ ```
957
+ python myflow.py run --with batch
958
+ ```
959
+ or
960
+ ```
961
+ python myflow.py run --with kubernetes
962
+ ```
963
+ which executes the flow on the desired system using the
964
+ requirements specified in `@resources`.
965
+
966
+
967
+ Parameters
968
+ ----------
969
+ cpu : int, default 1
970
+ Number of CPUs required for this step.
971
+ gpu : int, optional, default None
972
+ Number of GPUs required for this step.
973
+ disk : int, optional, default None
974
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
975
+ memory : int, default 4096
976
+ Memory size (in MB) required for this step.
977
+ shared_memory : int, optional, default None
978
+ The value for the size (in MiB) of the /dev/shm volume for this step.
979
+ This parameter maps to the `--shm-size` option in Docker.
1014
980
  """
1015
981
  ...
1016
982
 
1017
983
  @typing.overload
1018
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
984
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1019
985
  """
1020
- Specifies environment variables to be set prior to the execution of a step.
986
+ Specifies that the step will success under all circumstances.
987
+
988
+ The decorator will create an optional artifact, specified by `var`, which
989
+ contains the exception raised. You can use it to detect the presence
990
+ of errors, indicating that all happy-path artifacts produced by the step
991
+ are missing.
1021
992
 
1022
993
 
1023
994
  Parameters
1024
995
  ----------
1025
- vars : Dict[str, str], default {}
1026
- Dictionary of environment variables to set.
996
+ var : str, optional, default None
997
+ Name of the artifact in which to store the caught exception.
998
+ If not specified, the exception is not stored.
999
+ print_exception : bool, default True
1000
+ Determines whether or not the exception is printed to
1001
+ stdout when caught.
1027
1002
  """
1028
1003
  ...
1029
1004
 
1030
1005
  @typing.overload
1031
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1006
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1032
1007
  ...
1033
1008
 
1034
1009
  @typing.overload
1035
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1010
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1036
1011
  ...
1037
1012
 
1038
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
1013
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
1039
1014
  """
1040
- Specifies environment variables to be set prior to the execution of a step.
1015
+ Specifies that the step will success under all circumstances.
1016
+
1017
+ The decorator will create an optional artifact, specified by `var`, which
1018
+ contains the exception raised. You can use it to detect the presence
1019
+ of errors, indicating that all happy-path artifacts produced by the step
1020
+ are missing.
1041
1021
 
1042
1022
 
1043
1023
  Parameters
1044
1024
  ----------
1045
- vars : Dict[str, str], default {}
1046
- Dictionary of environment variables to set.
1025
+ var : str, optional, default None
1026
+ Name of the artifact in which to store the caught exception.
1027
+ If not specified, the exception is not stored.
1028
+ print_exception : bool, default True
1029
+ Determines whether or not the exception is printed to
1030
+ stdout when caught.
1047
1031
  """
1048
1032
  ...
1049
1033
 
1050
- def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1034
+ def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1051
1035
  """
1052
1036
  Specifies that this step should execute on DGX cloud.
1053
1037
 
@@ -1058,8 +1042,57 @@ def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[t
1058
1042
  Number of GPUs to use.
1059
1043
  gpu_type : str
1060
1044
  Type of Nvidia GPU to use.
1061
- queue_timeout : int
1062
- Time to keep the job in NVCF's queue.
1045
+ """
1046
+ ...
1047
+
1048
+ @typing.overload
1049
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1050
+ """
1051
+ Specifies the PyPI packages for the step.
1052
+
1053
+ Information in this decorator will augment any
1054
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
1055
+ you can use `@pypi_base` to set packages required by all
1056
+ steps and use `@pypi` to specify step-specific overrides.
1057
+
1058
+
1059
+ Parameters
1060
+ ----------
1061
+ packages : Dict[str, str], default: {}
1062
+ Packages to use for this step. The key is the name of the package
1063
+ and the value is the version to use.
1064
+ python : str, optional, default: None
1065
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1066
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1067
+ """
1068
+ ...
1069
+
1070
+ @typing.overload
1071
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1072
+ ...
1073
+
1074
+ @typing.overload
1075
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1076
+ ...
1077
+
1078
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1079
+ """
1080
+ Specifies the PyPI packages for the step.
1081
+
1082
+ Information in this decorator will augment any
1083
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
1084
+ you can use `@pypi_base` to set packages required by all
1085
+ steps and use `@pypi` to specify step-specific overrides.
1086
+
1087
+
1088
+ Parameters
1089
+ ----------
1090
+ packages : Dict[str, str], default: {}
1091
+ Packages to use for this step. The key is the name of the package
1092
+ and the value is the version to use.
1093
+ python : str, optional, default: None
1094
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1095
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1063
1096
  """
1064
1097
  ...
1065
1098
 
@@ -1210,88 +1243,63 @@ def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None
1210
1243
  """
1211
1244
  ...
1212
1245
 
1213
- def vllm(*, model: str, backend: str, openai_api_server: bool, debug: bool, card_refresh_interval: int, max_retries: int, retry_alert_frequency: int, engine_args: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1246
+ @typing.overload
1247
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1214
1248
  """
1215
- This decorator is used to run vllm APIs as Metaflow task sidecars.
1216
-
1217
- User code call
1218
- --------------
1219
- @vllm(
1220
- model="...",
1221
- ...
1222
- )
1223
-
1224
- Valid backend options
1225
- ---------------------
1226
- - 'local': Run as a separate process on the local task machine.
1227
-
1228
- Valid model options
1229
- -------------------
1230
- Any HuggingFace model identifier, e.g. 'meta-llama/Llama-3.2-1B'
1231
-
1232
- NOTE: vLLM's OpenAI-compatible server serves ONE model per server instance.
1233
- If you need multiple models, you must create multiple @vllm decorators.
1234
-
1235
-
1236
- Parameters
1237
- ----------
1238
- model: str
1239
- HuggingFace model identifier to be served by vLLM.
1240
- backend: str
1241
- Determines where and how to run the vLLM process.
1242
- openai_api_server: bool
1243
- Whether to use OpenAI-compatible API server mode (subprocess) instead of native engine.
1244
- Default is False (uses native engine).
1245
- Set to True for backward compatibility with existing code.
1246
- debug: bool
1247
- Whether to turn on verbose debugging logs.
1248
- card_refresh_interval: int
1249
- Interval in seconds for refreshing the vLLM status card.
1250
- Only used when openai_api_server=True.
1251
- max_retries: int
1252
- Maximum number of retries checking for vLLM server startup.
1253
- Only used when openai_api_server=True.
1254
- retry_alert_frequency: int
1255
- Frequency of alert logs for vLLM server startup retries.
1256
- Only used when openai_api_server=True.
1257
- engine_args : dict
1258
- Additional keyword arguments to pass to the vLLM engine.
1259
- For example, `tensor_parallel_size=2`.
1249
+ Internal decorator to support Fast bakery
1260
1250
  """
1261
1251
  ...
1262
1252
 
1263
- def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1253
+ @typing.overload
1254
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1255
+ ...
1256
+
1257
+ def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1264
1258
  """
1265
- Specifies what flows belong to the same project.
1266
-
1267
- A project-specific namespace is created for all flows that
1268
- use the same `@project(name)`.
1259
+ Internal decorator to support Fast bakery
1260
+ """
1261
+ ...
1262
+
1263
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1264
+ """
1265
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1266
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1267
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1268
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
1269
+ starts only after all sensors finish.
1269
1270
 
1270
1271
 
1271
1272
  Parameters
1272
1273
  ----------
1274
+ timeout : int
1275
+ Time, in seconds before the task times out and fails. (Default: 3600)
1276
+ poke_interval : int
1277
+ Time in seconds that the job should wait in between each try. (Default: 60)
1278
+ mode : str
1279
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1280
+ exponential_backoff : bool
1281
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1282
+ pool : str
1283
+ the slot pool this task should run in,
1284
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1285
+ soft_fail : bool
1286
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1273
1287
  name : str
1274
- Project name. Make sure that the name is unique amongst all
1275
- projects that use the same production scheduler. The name may
1276
- contain only lowercase alphanumeric characters and underscores.
1277
-
1278
- branch : Optional[str], default None
1279
- The branch to use. If not specified, the branch is set to
1280
- `user.<username>` unless `production` is set to `True`. This can
1281
- also be set on the command line using `--branch` as a top-level option.
1282
- It is an error to specify `branch` in the decorator and on the command line.
1283
-
1284
- production : bool, default False
1285
- Whether or not the branch is the production branch. This can also be set on the
1286
- command line using `--production` as a top-level option. It is an error to specify
1287
- `production` in the decorator and on the command line.
1288
- The project branch name will be:
1289
- - if `branch` is specified:
1290
- - if `production` is True: `prod.<branch>`
1291
- - if `production` is False: `test.<branch>`
1292
- - if `branch` is not specified:
1293
- - if `production` is True: `prod`
1294
- - if `production` is False: `user.<username>`
1288
+ Name of the sensor on Airflow
1289
+ description : str
1290
+ Description of sensor in the Airflow UI
1291
+ bucket_key : Union[str, List[str]]
1292
+ The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1293
+ When it's specified as a full s3:// url, please leave `bucket_name` as None
1294
+ bucket_name : str
1295
+ Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1296
+ When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1297
+ wildcard_match : bool
1298
+ whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1299
+ aws_conn_id : str
1300
+ a reference to the s3 connection on Airflow. (Default: None)
1301
+ verify : bool
1302
+ Whether or not to verify SSL certificates for S3 connection. (Default: None)
1295
1303
  """
1296
1304
  ...
1297
1305
 
@@ -1397,53 +1405,53 @@ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *
1397
1405
  ...
1398
1406
 
1399
1407
  @typing.overload
1400
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1408
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1401
1409
  """
1402
- Specifies the times when the flow should be run when running on a
1403
- production scheduler.
1410
+ Specifies the Conda environment for all steps of the flow.
1411
+
1412
+ Use `@conda_base` to set common libraries required by all
1413
+ steps and use `@conda` to specify step-specific additions.
1404
1414
 
1405
1415
 
1406
1416
  Parameters
1407
1417
  ----------
1408
- hourly : bool, default False
1409
- Run the workflow hourly.
1410
- daily : bool, default True
1411
- Run the workflow daily.
1412
- weekly : bool, default False
1413
- Run the workflow weekly.
1414
- cron : str, optional, default None
1415
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1416
- specified by this expression.
1417
- timezone : str, optional, default None
1418
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1419
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1418
+ packages : Dict[str, str], default {}
1419
+ Packages to use for this flow. The key is the name of the package
1420
+ and the value is the version to use.
1421
+ libraries : Dict[str, str], default {}
1422
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1423
+ python : str, optional, default None
1424
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1425
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1426
+ disabled : bool, default False
1427
+ If set to True, disables Conda.
1420
1428
  """
1421
1429
  ...
1422
1430
 
1423
1431
  @typing.overload
1424
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1432
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1425
1433
  ...
1426
1434
 
1427
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1435
+ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1428
1436
  """
1429
- Specifies the times when the flow should be run when running on a
1430
- production scheduler.
1437
+ Specifies the Conda environment for all steps of the flow.
1438
+
1439
+ Use `@conda_base` to set common libraries required by all
1440
+ steps and use `@conda` to specify step-specific additions.
1431
1441
 
1432
1442
 
1433
1443
  Parameters
1434
1444
  ----------
1435
- hourly : bool, default False
1436
- Run the workflow hourly.
1437
- daily : bool, default True
1438
- Run the workflow daily.
1439
- weekly : bool, default False
1440
- Run the workflow weekly.
1441
- cron : str, optional, default None
1442
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1443
- specified by this expression.
1444
- timezone : str, optional, default None
1445
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1446
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1445
+ packages : Dict[str, str], default {}
1446
+ Packages to use for this flow. The key is the name of the package
1447
+ and the value is the version to use.
1448
+ libraries : Dict[str, str], default {}
1449
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1450
+ python : str, optional, default None
1451
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1452
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1453
+ disabled : bool, default False
1454
+ If set to True, disables Conda.
1447
1455
  """
1448
1456
  ...
1449
1457
 
@@ -1654,6 +1662,57 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
1654
1662
  """
1655
1663
  ...
1656
1664
 
1665
+ @typing.overload
1666
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1667
+ """
1668
+ Specifies the times when the flow should be run when running on a
1669
+ production scheduler.
1670
+
1671
+
1672
+ Parameters
1673
+ ----------
1674
+ hourly : bool, default False
1675
+ Run the workflow hourly.
1676
+ daily : bool, default True
1677
+ Run the workflow daily.
1678
+ weekly : bool, default False
1679
+ Run the workflow weekly.
1680
+ cron : str, optional, default None
1681
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1682
+ specified by this expression.
1683
+ timezone : str, optional, default None
1684
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1685
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1686
+ """
1687
+ ...
1688
+
1689
+ @typing.overload
1690
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1691
+ ...
1692
+
1693
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1694
+ """
1695
+ Specifies the times when the flow should be run when running on a
1696
+ production scheduler.
1697
+
1698
+
1699
+ Parameters
1700
+ ----------
1701
+ hourly : bool, default False
1702
+ Run the workflow hourly.
1703
+ daily : bool, default True
1704
+ Run the workflow daily.
1705
+ weekly : bool, default False
1706
+ Run the workflow weekly.
1707
+ cron : str, optional, default None
1708
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1709
+ specified by this expression.
1710
+ timezone : str, optional, default None
1711
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1712
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1713
+ """
1714
+ ...
1715
+
1657
1716
  def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1658
1717
  """
1659
1718
  The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
@@ -1697,54 +1756,38 @@ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str,
1697
1756
  """
1698
1757
  ...
1699
1758
 
1700
- @typing.overload
1701
- def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1759
+ def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1702
1760
  """
1703
- Specifies the Conda environment for all steps of the flow.
1761
+ Specifies what flows belong to the same project.
1704
1762
 
1705
- Use `@conda_base` to set common libraries required by all
1706
- steps and use `@conda` to specify step-specific additions.
1763
+ A project-specific namespace is created for all flows that
1764
+ use the same `@project(name)`.
1707
1765
 
1708
1766
 
1709
1767
  Parameters
1710
1768
  ----------
1711
- packages : Dict[str, str], default {}
1712
- Packages to use for this flow. The key is the name of the package
1713
- and the value is the version to use.
1714
- libraries : Dict[str, str], default {}
1715
- Supported for backward compatibility. When used with packages, packages will take precedence.
1716
- python : str, optional, default None
1717
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1718
- that the version used will correspond to the version of the Python interpreter used to start the run.
1719
- disabled : bool, default False
1720
- If set to True, disables Conda.
1721
- """
1722
- ...
1723
-
1724
- @typing.overload
1725
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1726
- ...
1727
-
1728
- def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1729
- """
1730
- Specifies the Conda environment for all steps of the flow.
1731
-
1732
- Use `@conda_base` to set common libraries required by all
1733
- steps and use `@conda` to specify step-specific additions.
1769
+ name : str
1770
+ Project name. Make sure that the name is unique amongst all
1771
+ projects that use the same production scheduler. The name may
1772
+ contain only lowercase alphanumeric characters and underscores.
1734
1773
 
1774
+ branch : Optional[str], default None
1775
+ The branch to use. If not specified, the branch is set to
1776
+ `user.<username>` unless `production` is set to `True`. This can
1777
+ also be set on the command line using `--branch` as a top-level option.
1778
+ It is an error to specify `branch` in the decorator and on the command line.
1735
1779
 
1736
- Parameters
1737
- ----------
1738
- packages : Dict[str, str], default {}
1739
- Packages to use for this flow. The key is the name of the package
1740
- and the value is the version to use.
1741
- libraries : Dict[str, str], default {}
1742
- Supported for backward compatibility. When used with packages, packages will take precedence.
1743
- python : str, optional, default None
1744
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1745
- that the version used will correspond to the version of the Python interpreter used to start the run.
1746
- disabled : bool, default False
1747
- If set to True, disables Conda.
1780
+ production : bool, default False
1781
+ Whether or not the branch is the production branch. This can also be set on the
1782
+ command line using `--production` as a top-level option. It is an error to specify
1783
+ `production` in the decorator and on the command line.
1784
+ The project branch name will be:
1785
+ - if `branch` is specified:
1786
+ - if `production` is True: `prod.<branch>`
1787
+ - if `production` is False: `test.<branch>`
1788
+ - if `branch` is not specified:
1789
+ - if `production` is True: `prod`
1790
+ - if `production` is False: `user.<username>`
1748
1791
  """
1749
1792
  ...
1750
1793
 
@@ -1789,48 +1832,5 @@ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packag
1789
1832
  """
1790
1833
  ...
1791
1834
 
1792
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1793
- """
1794
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1795
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1796
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1797
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
1798
- starts only after all sensors finish.
1799
-
1800
-
1801
- Parameters
1802
- ----------
1803
- timeout : int
1804
- Time, in seconds before the task times out and fails. (Default: 3600)
1805
- poke_interval : int
1806
- Time in seconds that the job should wait in between each try. (Default: 60)
1807
- mode : str
1808
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1809
- exponential_backoff : bool
1810
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1811
- pool : str
1812
- the slot pool this task should run in,
1813
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1814
- soft_fail : bool
1815
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1816
- name : str
1817
- Name of the sensor on Airflow
1818
- description : str
1819
- Description of sensor in the Airflow UI
1820
- bucket_key : Union[str, List[str]]
1821
- The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1822
- When it's specified as a full s3:// url, please leave `bucket_name` as None
1823
- bucket_name : str
1824
- Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1825
- When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1826
- wildcard_match : bool
1827
- whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1828
- aws_conn_id : str
1829
- a reference to the s3 connection on Airflow. (Default: None)
1830
- verify : bool
1831
- Whether or not to verify SSL certificates for S3 connection. (Default: None)
1832
- """
1833
- ...
1834
-
1835
1835
  pkg_name: str
1836
1836