ob-metaflow-stubs 6.0.10.2rc0__py2.py3-none-any.whl → 6.0.10.4__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ob-metaflow-stubs might be problematic. Click here for more details.

Files changed (262) hide show
  1. metaflow-stubs/__init__.pyi +777 -731
  2. metaflow-stubs/cards.pyi +2 -2
  3. metaflow-stubs/cli.pyi +2 -2
  4. metaflow-stubs/cli_components/__init__.pyi +2 -2
  5. metaflow-stubs/cli_components/utils.pyi +2 -2
  6. metaflow-stubs/client/__init__.pyi +2 -2
  7. metaflow-stubs/client/core.pyi +6 -6
  8. metaflow-stubs/client/filecache.pyi +2 -2
  9. metaflow-stubs/events.pyi +3 -3
  10. metaflow-stubs/exception.pyi +2 -2
  11. metaflow-stubs/flowspec.pyi +5 -5
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +4 -4
  14. metaflow-stubs/meta_files.pyi +2 -2
  15. metaflow-stubs/metadata_provider/__init__.pyi +2 -2
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +2 -2
  17. metaflow-stubs/metadata_provider/metadata.pyi +3 -3
  18. metaflow-stubs/metadata_provider/util.pyi +2 -2
  19. metaflow-stubs/metaflow_config.pyi +2 -2
  20. metaflow-stubs/metaflow_current.pyi +92 -48
  21. metaflow-stubs/metaflow_git.pyi +2 -2
  22. metaflow-stubs/mf_extensions/__init__.pyi +2 -2
  23. metaflow-stubs/mf_extensions/obcheckpoint/__init__.pyi +2 -2
  24. metaflow-stubs/mf_extensions/obcheckpoint/plugins/__init__.pyi +2 -2
  25. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/__init__.pyi +2 -2
  26. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/__init__.pyi +2 -2
  27. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/async_cards.pyi +2 -2
  28. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/deco_injection_mixin.pyi +2 -2
  29. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/extra_components.pyi +3 -3
  30. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/__init__.pyi +2 -2
  31. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/__init__.pyi +2 -2
  32. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/checkpoint_lister.pyi +5 -5
  33. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/lineage_card.pyi +2 -2
  34. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/checkpoint_storage.pyi +10 -4
  35. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/constructors.pyi +2 -2
  36. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/core.pyi +6 -4
  37. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/decorator.pyi +5 -5
  38. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/exceptions.pyi +2 -2
  39. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/final_api.pyi +2 -2
  40. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/lineage.pyi +2 -2
  41. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/__init__.pyi +2 -2
  42. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/context.pyi +2 -2
  43. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/core.pyi +2 -2
  44. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/decorator.pyi +2 -2
  45. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/exceptions.pyi +2 -2
  46. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/task_utils.pyi +3 -3
  47. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/utils.pyi +2 -2
  48. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastructures.pyi +3 -3
  49. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/exceptions.pyi +2 -2
  50. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/__init__.pyi +2 -2
  51. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/decorator.pyi +129 -14
  52. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/__init__.pyi +2 -2
  53. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/core.pyi +3 -3
  54. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/exceptions.pyi +2 -2
  55. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/model_storage.pyi +4 -4
  56. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/__init__.pyi +2 -2
  57. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/flowspec_utils.pyi +2 -2
  58. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/general.pyi +2 -2
  59. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/identity_utils.pyi +2 -2
  60. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/__init__.pyi +2 -2
  61. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/base.pyi +2 -2
  62. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/tar.pyi +3 -3
  63. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/tar_utils.pyi +3 -3
  64. metaflow-stubs/mf_extensions/outerbounds/__init__.pyi +2 -2
  65. metaflow-stubs/mf_extensions/outerbounds/plugins/__init__.pyi +2 -2
  66. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/__init__.pyi +2 -2
  67. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/__init__.pyi +2 -3
  68. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_state_machine.pyi +2 -2
  69. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/__init__.pyi +2 -2
  70. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/spinner/__init__.pyi +2 -2
  71. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/spinner/spinners.pyi +2 -2
  72. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/app_cli.pyi +3 -3
  73. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/app_config.pyi +3 -3
  74. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/capsule.pyi +3 -3
  75. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/click_importer.pyi +2 -2
  76. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/code_package/__init__.pyi +2 -2
  77. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/code_package/code_packager.pyi +2 -2
  78. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/__init__.pyi +2 -2
  79. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/cli_generator.pyi +2 -2
  80. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/config_utils.pyi +4 -4
  81. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/schema_export.pyi +2 -2
  82. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/typed_configs.pyi +3 -3
  83. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/unified_config.pyi +4 -4
  84. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/dependencies.pyi +3 -3
  85. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/deployer.pyi +5 -11
  86. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/experimental/__init__.pyi +2 -2
  87. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/perimeters.pyi +2 -2
  88. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/utils.pyi +3 -3
  89. metaflow-stubs/mf_extensions/outerbounds/plugins/aws/__init__.pyi +2 -2
  90. metaflow-stubs/mf_extensions/outerbounds/plugins/aws/assume_role_decorator.pyi +2 -2
  91. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/__init__.pyi +2 -2
  92. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/async_cards.pyi +2 -2
  93. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/injector.pyi +2 -2
  94. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/__init__.pyi +2 -2
  95. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/coreweave.pyi +4 -4
  96. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/nebius.pyi +4 -4
  97. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/__init__.pyi +2 -2
  98. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/baker.pyi +5 -5
  99. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/docker_environment.pyi +2 -2
  100. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/fast_bakery.pyi +2 -2
  101. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/__init__.pyi +2 -2
  102. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/pod_killer.pyi +2 -2
  103. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/__init__.pyi +2 -2
  104. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/constants.pyi +2 -2
  105. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/exceptions.pyi +2 -2
  106. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/ollama.pyi +2 -2
  107. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/status_card.pyi +2 -2
  108. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/__init__.pyi +2 -2
  109. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/snowflake.pyi +2 -2
  110. metaflow-stubs/mf_extensions/outerbounds/profilers/__init__.pyi +2 -2
  111. metaflow-stubs/mf_extensions/outerbounds/profilers/gpu.pyi +2 -2
  112. metaflow-stubs/mf_extensions/outerbounds/remote_config.pyi +4 -4
  113. metaflow-stubs/mf_extensions/outerbounds/toplevel/__init__.pyi +2 -2
  114. metaflow-stubs/mf_extensions/outerbounds/toplevel/global_aliases_for_metaflow_package.pyi +2 -3
  115. metaflow-stubs/mf_extensions/outerbounds/toplevel/s3_proxy.pyi +2 -2
  116. metaflow-stubs/multicore_utils.pyi +2 -2
  117. metaflow-stubs/ob_internal.pyi +2 -3
  118. metaflow-stubs/packaging_sys/__init__.pyi +7 -7
  119. metaflow-stubs/packaging_sys/backend.pyi +4 -4
  120. metaflow-stubs/packaging_sys/distribution_support.pyi +5 -5
  121. metaflow-stubs/packaging_sys/tar_backend.pyi +5 -5
  122. metaflow-stubs/packaging_sys/utils.pyi +2 -2
  123. metaflow-stubs/packaging_sys/v1.pyi +3 -3
  124. metaflow-stubs/parameters.pyi +4 -4
  125. metaflow-stubs/plugins/__init__.pyi +12 -12
  126. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  127. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  128. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  129. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  130. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
  131. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
  132. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
  133. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  134. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  135. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  136. metaflow-stubs/plugins/argo/argo_workflows.pyi +3 -3
  137. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +3 -3
  138. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +3 -3
  139. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +3 -3
  140. metaflow-stubs/plugins/argo/exit_hooks.pyi +3 -3
  141. metaflow-stubs/plugins/aws/__init__.pyi +2 -2
  142. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  143. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  144. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  145. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  146. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  147. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
  148. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  149. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +4 -4
  150. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  151. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  152. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  153. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  154. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  155. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +3 -3
  156. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +4 -4
  157. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  158. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  159. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  160. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +4 -4
  161. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  162. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  163. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  164. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  165. metaflow-stubs/plugins/cards/card_client.pyi +3 -3
  166. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  167. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  168. metaflow-stubs/plugins/cards/card_decorator.pyi +3 -3
  169. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  170. metaflow-stubs/plugins/cards/card_modules/basic.pyi +2 -2
  171. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  172. metaflow-stubs/plugins/cards/card_modules/components.pyi +3 -3
  173. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  174. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  175. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  176. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  177. metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
  178. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  179. metaflow-stubs/plugins/catch_decorator.pyi +3 -3
  180. metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
  181. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  182. metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
  183. metaflow-stubs/plugins/datatools/s3/s3.pyi +3 -3
  184. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  185. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  186. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  187. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  188. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  189. metaflow-stubs/plugins/events_decorator.pyi +2 -2
  190. metaflow-stubs/plugins/exit_hook/__init__.pyi +2 -2
  191. metaflow-stubs/plugins/exit_hook/exit_hook_decorator.pyi +2 -2
  192. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  193. metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
  194. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  195. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +4 -4
  196. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  197. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  198. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  199. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  200. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  201. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +3 -3
  202. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
  203. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  204. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
  205. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
  206. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +2 -2
  207. metaflow-stubs/plugins/ollama/__init__.pyi +3 -3
  208. metaflow-stubs/plugins/optuna/__init__.pyi +2 -2
  209. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  210. metaflow-stubs/plugins/perimeters.pyi +2 -2
  211. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  212. metaflow-stubs/plugins/pypi/__init__.pyi +3 -3
  213. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  214. metaflow-stubs/plugins/pypi/conda_environment.pyi +6 -6
  215. metaflow-stubs/plugins/pypi/parsers.pyi +2 -2
  216. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  217. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  218. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  219. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  220. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  221. metaflow-stubs/plugins/secrets/__init__.pyi +3 -3
  222. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +4 -4
  223. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  224. metaflow-stubs/plugins/secrets/secrets_func.pyi +2 -2
  225. metaflow-stubs/plugins/secrets/secrets_spec.pyi +2 -2
  226. metaflow-stubs/plugins/secrets/utils.pyi +2 -2
  227. metaflow-stubs/plugins/snowflake/__init__.pyi +2 -2
  228. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  229. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +2 -2
  230. metaflow-stubs/plugins/timeout_decorator.pyi +3 -3
  231. metaflow-stubs/plugins/torchtune/__init__.pyi +2 -2
  232. metaflow-stubs/plugins/uv/__init__.pyi +2 -2
  233. metaflow-stubs/plugins/uv/uv_environment.pyi +2 -2
  234. metaflow-stubs/profilers/__init__.pyi +2 -2
  235. metaflow-stubs/pylint_wrapper.pyi +2 -2
  236. metaflow-stubs/runner/__init__.pyi +2 -2
  237. metaflow-stubs/runner/deployer.pyi +6 -6
  238. metaflow-stubs/runner/deployer_impl.pyi +2 -2
  239. metaflow-stubs/runner/metaflow_runner.pyi +4 -4
  240. metaflow-stubs/runner/nbdeploy.pyi +2 -2
  241. metaflow-stubs/runner/nbrun.pyi +2 -2
  242. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  243. metaflow-stubs/runner/utils.pyi +3 -3
  244. metaflow-stubs/system/__init__.pyi +2 -2
  245. metaflow-stubs/system/system_logger.pyi +2 -2
  246. metaflow-stubs/system/system_monitor.pyi +2 -2
  247. metaflow-stubs/tagging_util.pyi +2 -2
  248. metaflow-stubs/tuple_util.pyi +2 -2
  249. metaflow-stubs/user_configs/__init__.pyi +2 -2
  250. metaflow-stubs/user_configs/config_options.pyi +4 -4
  251. metaflow-stubs/user_configs/config_parameters.pyi +7 -7
  252. metaflow-stubs/user_decorators/__init__.pyi +2 -2
  253. metaflow-stubs/user_decorators/common.pyi +2 -2
  254. metaflow-stubs/user_decorators/mutable_flow.pyi +4 -4
  255. metaflow-stubs/user_decorators/mutable_step.pyi +4 -4
  256. metaflow-stubs/user_decorators/user_flow_decorator.pyi +4 -4
  257. metaflow-stubs/user_decorators/user_step_decorator.pyi +4 -4
  258. {ob_metaflow_stubs-6.0.10.2rc0.dist-info → ob_metaflow_stubs-6.0.10.4.dist-info}/METADATA +1 -1
  259. ob_metaflow_stubs-6.0.10.4.dist-info/RECORD +262 -0
  260. ob_metaflow_stubs-6.0.10.2rc0.dist-info/RECORD +0 -262
  261. {ob_metaflow_stubs-6.0.10.2rc0.dist-info → ob_metaflow_stubs-6.0.10.4.dist-info}/WHEEL +0 -0
  262. {ob_metaflow_stubs-6.0.10.2rc0.dist-info → ob_metaflow_stubs-6.0.10.4.dist-info}/top_level.txt +0 -0
@@ -1,7 +1,7 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.18.3.2+obcheckpoint(0.2.4);ob(v1) #
4
- # Generated on 2025-09-09T23:55:12.839647 #
3
+ # MF version: 2.18.5.1+obcheckpoint(0.2.6);ob(v1) #
4
+ # Generated on 2025-09-16T23:23:08.891416 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
@@ -39,8 +39,8 @@ from .user_decorators.user_step_decorator import UserStepDecorator as UserStepDe
39
39
  from .user_decorators.user_step_decorator import StepMutator as StepMutator
40
40
  from .user_decorators.user_step_decorator import user_step_decorator as user_step_decorator
41
41
  from .user_decorators.user_flow_decorator import FlowMutator as FlowMutator
42
- from . import tuple_util as tuple_util
43
42
  from . import cards as cards
43
+ from . import tuple_util as tuple_util
44
44
  from . import metaflow_git as metaflow_git
45
45
  from . import events as events
46
46
  from . import runner as runner
@@ -48,9 +48,9 @@ from . import plugins as plugins
48
48
  from .mf_extensions.outerbounds.toplevel.global_aliases_for_metaflow_package import S3 as S3
49
49
  from . import includefile as includefile
50
50
  from .includefile import IncludeFile as IncludeFile
51
+ from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
51
52
  from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
52
53
  from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
53
- from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
54
54
  from . import client as client
55
55
  from .client.core import namespace as namespace
56
56
  from .client.core import get_namespace as get_namespace
@@ -83,7 +83,6 @@ from .mf_extensions.outerbounds.plugins.checkpoint_datastores.nebius import nebi
83
83
  from .mf_extensions.outerbounds.plugins.checkpoint_datastores.coreweave import coreweave_checkpoints as coreweave_checkpoints
84
84
  from .mf_extensions.outerbounds.plugins.aws.assume_role_decorator import assume_role as assume_role
85
85
  from .mf_extensions.outerbounds.plugins.apps.core.deployer import AppDeployer as AppDeployer
86
- from .mf_extensions.outerbounds.plugins.apps.core.deployer import DeployedApp as DeployedApp
87
86
  from . import system as system
88
87
  from . import cli_components as cli_components
89
88
  from . import pylint_wrapper as pylint_wrapper
@@ -169,65 +168,23 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
169
168
  ...
170
169
 
171
170
  @typing.overload
172
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
171
+ def coreweave_s3_proxy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
173
172
  """
174
- Specifies environment variables to be set prior to the execution of a step.
175
-
176
-
177
- Parameters
178
- ----------
179
- vars : Dict[str, str], default {}
180
- Dictionary of environment variables to set.
173
+ CoreWeave-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
174
+ It exists to make it easier for users to know that this decorator should only be used with
175
+ a Neo Cloud like CoreWeave.
181
176
  """
182
177
  ...
183
178
 
184
179
  @typing.overload
185
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
186
- ...
187
-
188
- @typing.overload
189
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
190
- ...
191
-
192
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
193
- """
194
- Specifies environment variables to be set prior to the execution of a step.
195
-
196
-
197
- Parameters
198
- ----------
199
- vars : Dict[str, str], default {}
200
- Dictionary of environment variables to set.
201
- """
202
- ...
203
-
204
- def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
205
- """
206
- Specifies that this step should execute on DGX cloud.
207
-
208
-
209
- Parameters
210
- ----------
211
- gpu : int
212
- Number of GPUs to use.
213
- gpu_type : str
214
- Type of Nvidia GPU to use.
215
- queue_timeout : int
216
- Time to keep the job in NVCF's queue.
217
- """
180
+ def coreweave_s3_proxy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
218
181
  ...
219
182
 
220
- def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
183
+ def coreweave_s3_proxy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
221
184
  """
222
- Specifies that this step should execute on DGX cloud.
223
-
224
-
225
- Parameters
226
- ----------
227
- gpu : int
228
- Number of GPUs to use.
229
- gpu_type : str
230
- Type of Nvidia GPU to use.
185
+ CoreWeave-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
186
+ It exists to make it easier for users to know that this decorator should only be used with
187
+ a Neo Cloud like CoreWeave.
231
188
  """
232
189
  ...
233
190
 
@@ -248,133 +205,136 @@ def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepF
248
205
  """
249
206
  ...
250
207
 
251
- def s3_proxy(*, integration_name: typing.Optional[str] = None, write_mode: typing.Optional[str] = None, debug: typing.Optional[bool] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
208
+ @typing.overload
209
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
252
210
  """
253
- S3 Proxy decorator for routing S3 requests through a local proxy service.
254
-
255
-
256
- Parameters
257
- ----------
258
- integration_name : str, optional
259
- Name of the S3 proxy integration. If not specified, will use the only
260
- available S3 proxy integration in the namespace (fails if multiple exist).
261
- write_mode : str, optional
262
- The desired behavior during write operations to target (origin) S3 bucket.
263
- allowed options are:
264
- "origin-and-cache" -> write to both the target S3 bucket and local object
265
- storage
266
- "origin" -> only write to the target S3 bucket
267
- "cache" -> only write to the object storage service used for caching
268
- debug : bool, optional
269
- Enable debug logging for proxy operations.
211
+ Decorator prototype for all step decorators. This function gets specialized
212
+ and imported for all decorators types by _import_plugin_decorators().
270
213
  """
271
214
  ...
272
215
 
273
216
  @typing.overload
274
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
217
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
218
+ ...
219
+
220
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
275
221
  """
276
- Specifies a timeout for your step.
277
-
278
- This decorator is useful if this step may hang indefinitely.
222
+ Decorator prototype for all step decorators. This function gets specialized
223
+ and imported for all decorators types by _import_plugin_decorators().
224
+ """
225
+ ...
226
+
227
+ @typing.overload
228
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
229
+ """
230
+ Specifies the number of times the task corresponding
231
+ to a step needs to be retried.
279
232
 
280
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
281
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
282
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
233
+ This decorator is useful for handling transient errors, such as networking issues.
234
+ If your task contains operations that can't be retried safely, e.g. database updates,
235
+ it is advisable to annotate it with `@retry(times=0)`.
283
236
 
284
- Note that all the values specified in parameters are added together so if you specify
285
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
237
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
238
+ decorator will execute a no-op task after all retries have been exhausted,
239
+ ensuring that the flow execution can continue.
286
240
 
287
241
 
288
242
  Parameters
289
243
  ----------
290
- seconds : int, default 0
291
- Number of seconds to wait prior to timing out.
292
- minutes : int, default 0
293
- Number of minutes to wait prior to timing out.
294
- hours : int, default 0
295
- Number of hours to wait prior to timing out.
244
+ times : int, default 3
245
+ Number of times to retry this task.
246
+ minutes_between_retries : int, default 2
247
+ Number of minutes between retries.
296
248
  """
297
249
  ...
298
250
 
299
251
  @typing.overload
300
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
252
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
301
253
  ...
302
254
 
303
255
  @typing.overload
304
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
256
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
305
257
  ...
306
258
 
307
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
259
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
308
260
  """
309
- Specifies a timeout for your step.
310
-
311
- This decorator is useful if this step may hang indefinitely.
261
+ Specifies the number of times the task corresponding
262
+ to a step needs to be retried.
312
263
 
313
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
314
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
315
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
264
+ This decorator is useful for handling transient errors, such as networking issues.
265
+ If your task contains operations that can't be retried safely, e.g. database updates,
266
+ it is advisable to annotate it with `@retry(times=0)`.
316
267
 
317
- Note that all the values specified in parameters are added together so if you specify
318
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
268
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
269
+ decorator will execute a no-op task after all retries have been exhausted,
270
+ ensuring that the flow execution can continue.
319
271
 
320
272
 
321
273
  Parameters
322
274
  ----------
323
- seconds : int, default 0
324
- Number of seconds to wait prior to timing out.
325
- minutes : int, default 0
326
- Number of minutes to wait prior to timing out.
327
- hours : int, default 0
328
- Number of hours to wait prior to timing out.
275
+ times : int, default 3
276
+ Number of times to retry this task.
277
+ minutes_between_retries : int, default 2
278
+ Number of minutes between retries.
329
279
  """
330
280
  ...
331
281
 
332
282
  @typing.overload
333
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
283
+ def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
334
284
  """
335
- Specifies secrets to be retrieved and injected as environment variables prior to
336
- the execution of a step.
285
+ Creates a human-readable report, a Metaflow Card, after this step completes.
286
+
287
+ Note that you may add multiple `@card` decorators in a step with different parameters.
337
288
 
338
289
 
339
290
  Parameters
340
291
  ----------
341
- sources : List[Union[str, Dict[str, Any]]], default: []
342
- List of secret specs, defining how the secrets are to be retrieved
343
- role : str, optional, default: None
344
- Role to use for fetching secrets
292
+ type : str, default 'default'
293
+ Card type.
294
+ id : str, optional, default None
295
+ If multiple cards are present, use this id to identify this card.
296
+ options : Dict[str, Any], default {}
297
+ Options passed to the card. The contents depend on the card type.
298
+ timeout : int, default 45
299
+ Interrupt reporting if it takes more than this many seconds.
345
300
  """
346
301
  ...
347
302
 
348
303
  @typing.overload
349
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
304
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
350
305
  ...
351
306
 
352
307
  @typing.overload
353
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
308
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
354
309
  ...
355
310
 
356
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
311
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
357
312
  """
358
- Specifies secrets to be retrieved and injected as environment variables prior to
359
- the execution of a step.
313
+ Creates a human-readable report, a Metaflow Card, after this step completes.
314
+
315
+ Note that you may add multiple `@card` decorators in a step with different parameters.
360
316
 
361
317
 
362
318
  Parameters
363
319
  ----------
364
- sources : List[Union[str, Dict[str, Any]]], default: []
365
- List of secret specs, defining how the secrets are to be retrieved
366
- role : str, optional, default: None
367
- Role to use for fetching secrets
320
+ type : str, default 'default'
321
+ Card type.
322
+ id : str, optional, default None
323
+ If multiple cards are present, use this id to identify this card.
324
+ options : Dict[str, Any], default {}
325
+ Options passed to the card. The contents depend on the card type.
326
+ timeout : int, default 45
327
+ Interrupt reporting if it takes more than this many seconds.
368
328
  """
369
329
  ...
370
330
 
371
- def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
331
+ def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, cache_scope: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
372
332
  """
373
- Decorator that helps cache, version and store models/datasets from huggingface hub.
333
+ Decorator that helps cache, version, and store models/datasets from the Hugging Face Hub.
374
334
 
375
335
  > Examples
376
336
 
377
- **Usage: creating references of models from huggingface that may be loaded in downstream steps**
337
+ **Usage: creating references to models from the Hugging Face Hub that may be loaded in downstream steps**
378
338
  ```python
379
339
  @huggingface_hub
380
340
  @step
@@ -393,7 +353,23 @@ def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.
393
353
  self.next(self.train)
394
354
  ```
395
355
 
396
- **Usage: loading models directly from huggingface hub or from cache (from metaflow's datastore)**
356
+ **Usage: explicitly loading models at runtime from the Hugging Face Hub or from cache (from Metaflow's datastore)**
357
+ ```python
358
+ @huggingface_hub
359
+ @step
360
+ def run_training(self):
361
+ # Temporary directory (auto-cleaned on exit)
362
+ with current.huggingface_hub.load(
363
+ repo_id="google-bert/bert-base-uncased",
364
+ allow_patterns=["*.bin"],
365
+ ) as local_path:
366
+ # Use files under local_path
367
+ train_model(local_path)
368
+ ...
369
+
370
+ ```
371
+
372
+ **Usage: loading models directly from the Hugging Face Hub or from cache (from Metaflow's datastore)**
397
373
  ```python
398
374
  @huggingface_hub(load=["mistralai/Mistral-7B-Instruct-v0.1"])
399
375
  @step
@@ -402,7 +378,7 @@ def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.
402
378
  ```
403
379
 
404
380
  ```python
405
- @huggingface_hub(load=[("mistralai/Mistral-7B-Instruct-v0.1", "/my-directory"), ("myorg/mistral-lora, "/my-lora-directory")])
381
+ @huggingface_hub(load=[("mistralai/Mistral-7B-Instruct-v0.1", "/my-directory"), ("myorg/mistral-lora", "/my-lora-directory")])
406
382
  @step
407
383
  def finetune_model(self):
408
384
  path_to_model = current.huggingface_hub.loaded["mistralai/Mistral-7B-Instruct-v0.1"]
@@ -433,6 +409,37 @@ def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.
433
409
  temp_dir_root : str, optional
434
410
  The root directory that will hold the temporary directory where objects will be downloaded.
435
411
 
412
+ cache_scope : str, optional
413
+ The scope of the cache. Can be `checkpoint` / `flow` / `global`.
414
+
415
+ - `checkpoint` (default): All repos are stored like objects saved by `@checkpoint`.
416
+ i.e., the cached path is derived from the namespace, flow, step, and Metaflow foreach iteration.
417
+ Any repo downloaded under this scope will only be retrieved from the cache when the step runs under the same namespace in the same flow (at the same foreach index).
418
+
419
+ - `flow`: All repos are cached under the flow, regardless of namespace.
420
+ i.e., the cached path is derived solely from the flow name.
421
+ When to use this mode:
422
+ - Multiple users are executing the same flow and want shared access to the repos cached by the decorator.
423
+ - Multiple versions of a flow are deployed, all needing access to the same repos cached by the decorator.
424
+
425
+ - `global`: All repos are cached under a globally static path.
426
+ i.e., the base path of the cache is static and all repos are stored under it.
427
+ When to use this mode:
428
+ - All repos from the Hugging Face Hub need to be shared by users across all flow executions.
429
+
430
+ Each caching scope comes with its own trade-offs:
431
+ - `checkpoint`:
432
+ - Has explicit control over when caches are populated (controlled by the same flow that has the `@huggingface_hub` decorator) but ends up hitting the Hugging Face Hub more often if there are many users/namespaces/steps.
433
+ - Since objects are written on a `namespace/flow/step` basis, the blast radius of a bad checkpoint is limited to a particular flow in a namespace.
434
+ - `flow`:
435
+ - Has less control over when caches are populated (can be written by any execution instance of a flow from any namespace) but results in more cache hits.
436
+ - The blast radius of a bad checkpoint is limited to all runs of a particular flow.
437
+ - It doesn't promote cache reuse across flows.
438
+ - `global`:
439
+ - Has no control over when caches are populated (can be written by any flow execution) but has the highest cache hit rate.
440
+ - It promotes cache reuse across flows.
441
+ - The blast radius of a bad checkpoint spans every flow that could be using a particular repo.
442
+
436
443
  load: Union[List[str], List[Tuple[Dict, str]], List[Tuple[str, str]], List[Dict], None]
437
444
  The list of repos (models/datasets) to load.
438
445
 
@@ -448,252 +455,171 @@ def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.
448
455
  """
449
456
  ...
450
457
 
451
- @typing.overload
452
- def coreweave_s3_proxy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
453
- """
454
- CoreWeave-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
455
- It exists to make it easier for users to know that this decorator should only be used with
456
- a Neo Cloud like CoreWeave.
457
- """
458
- ...
459
-
460
- @typing.overload
461
- def coreweave_s3_proxy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
462
- ...
463
-
464
- def coreweave_s3_proxy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
465
- """
466
- CoreWeave-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
467
- It exists to make it easier for users to know that this decorator should only be used with
468
- a Neo Cloud like CoreWeave.
469
- """
470
- ...
471
-
472
- @typing.overload
473
- def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
458
+ def vllm(*, model: str, backend: str, openai_api_server: bool, debug: bool, card_refresh_interval: int, max_retries: int, retry_alert_frequency: int, engine_args: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
474
459
  """
475
- Creates a human-readable report, a Metaflow Card, after this step completes.
460
+ This decorator is used to run vllm APIs as Metaflow task sidecars.
476
461
 
477
- Note that you may add multiple `@card` decorators in a step with different parameters.
462
+ User code call
463
+ --------------
464
+ @vllm(
465
+ model="...",
466
+ ...
467
+ )
468
+
469
+ Valid backend options
470
+ ---------------------
471
+ - 'local': Run as a separate process on the local task machine.
472
+
473
+ Valid model options
474
+ -------------------
475
+ Any HuggingFace model identifier, e.g. 'meta-llama/Llama-3.2-1B'
476
+
477
+ NOTE: vLLM's OpenAI-compatible server serves ONE model per server instance.
478
+ If you need multiple models, you must create multiple @vllm decorators.
478
479
 
479
480
 
480
481
  Parameters
481
482
  ----------
482
- type : str, default 'default'
483
- Card type.
484
- id : str, optional, default None
485
- If multiple cards are present, use this id to identify this card.
486
- options : Dict[str, Any], default {}
487
- Options passed to the card. The contents depend on the card type.
488
- timeout : int, default 45
489
- Interrupt reporting if it takes more than this many seconds.
483
+ model: str
484
+ HuggingFace model identifier to be served by vLLM.
485
+ backend: str
486
+ Determines where and how to run the vLLM process.
487
+ openai_api_server: bool
488
+ Whether to use OpenAI-compatible API server mode (subprocess) instead of native engine.
489
+ Default is False (uses native engine).
490
+ Set to True for backward compatibility with existing code.
491
+ debug: bool
492
+ Whether to turn on verbose debugging logs.
493
+ card_refresh_interval: int
494
+ Interval in seconds for refreshing the vLLM status card.
495
+ Only used when openai_api_server=True.
496
+ max_retries: int
497
+ Maximum number of retries checking for vLLM server startup.
498
+ Only used when openai_api_server=True.
499
+ retry_alert_frequency: int
500
+ Frequency of alert logs for vLLM server startup retries.
501
+ Only used when openai_api_server=True.
502
+ engine_args : dict
503
+ Additional keyword arguments to pass to the vLLM engine.
504
+ For example, `tensor_parallel_size=2`.
490
505
  """
491
506
  ...
492
507
 
493
508
  @typing.overload
494
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
495
- ...
496
-
497
- @typing.overload
498
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
499
- ...
500
-
501
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
509
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
502
510
  """
503
- Creates a human-readable report, a Metaflow Card, after this step completes.
504
-
505
- Note that you may add multiple `@card` decorators in a step with different parameters.
511
+ Specifies secrets to be retrieved and injected as environment variables prior to
512
+ the execution of a step.
506
513
 
507
514
 
508
515
  Parameters
509
516
  ----------
510
- type : str, default 'default'
511
- Card type.
512
- id : str, optional, default None
513
- If multiple cards are present, use this id to identify this card.
514
- options : Dict[str, Any], default {}
515
- Options passed to the card. The contents depend on the card type.
516
- timeout : int, default 45
517
- Interrupt reporting if it takes more than this many seconds.
517
+ sources : List[Union[str, Dict[str, Any]]], default: []
518
+ List of secret specs, defining how the secrets are to be retrieved
519
+ role : str, optional, default: None
520
+ Role to use for fetching secrets
518
521
  """
519
522
  ...
520
523
 
521
524
  @typing.overload
522
- def model(*, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
525
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
526
+ ...
527
+
528
+ @typing.overload
529
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
530
+ ...
531
+
532
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
523
533
  """
524
- Enables loading / saving of models within a step.
525
-
526
- > Examples
527
- - Saving Models
528
- ```python
529
- @model
530
- @step
531
- def train(self):
532
- # current.model.save returns a dictionary reference to the model saved
533
- self.my_model = current.model.save(
534
- path_to_my_model,
535
- label="my_model",
536
- metadata={
537
- "epochs": 10,
538
- "batch-size": 32,
539
- "learning-rate": 0.001,
540
- }
541
- )
542
- self.next(self.test)
543
-
544
- @model(load="my_model")
545
- @step
546
- def test(self):
547
- # `current.model.loaded` returns a dictionary of the loaded models
548
- # where the key is the name of the artifact and the value is the path to the model
549
- print(os.listdir(current.model.loaded["my_model"]))
550
- self.next(self.end)
551
- ```
552
-
553
- - Loading models
554
- ```python
555
- @step
556
- def train(self):
557
- # current.model.load returns the path to the model loaded
558
- checkpoint_path = current.model.load(
559
- self.checkpoint_key,
560
- )
561
- model_path = current.model.load(
562
- self.model,
563
- )
564
- self.next(self.test)
565
- ```
534
+ Specifies secrets to be retrieved and injected as environment variables prior to
535
+ the execution of a step.
566
536
 
567
537
 
568
538
  Parameters
569
539
  ----------
570
- load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
571
- Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
572
- These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
573
- If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
574
- the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
575
- If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
576
-
577
- temp_dir_root : str, default: None
578
- The root directory under which `current.model.loaded` will store loaded models
540
+ sources : List[Union[str, Dict[str, Any]]], default: []
541
+ List of secret specs, defining how the secrets are to be retrieved
542
+ role : str, optional, default: None
543
+ Role to use for fetching secrets
579
544
  """
580
545
  ...
581
546
 
582
- @typing.overload
583
- def model(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
584
- ...
585
-
586
- @typing.overload
587
- def model(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
588
- ...
589
-
590
- def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None):
547
+ def s3_proxy(*, integration_name: typing.Optional[str] = None, write_mode: typing.Optional[str] = None, debug: typing.Optional[bool] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
591
548
  """
592
- Enables loading / saving of models within a step.
593
-
594
- > Examples
595
- - Saving Models
596
- ```python
597
- @model
598
- @step
599
- def train(self):
600
- # current.model.save returns a dictionary reference to the model saved
601
- self.my_model = current.model.save(
602
- path_to_my_model,
603
- label="my_model",
604
- metadata={
605
- "epochs": 10,
606
- "batch-size": 32,
607
- "learning-rate": 0.001,
608
- }
609
- )
610
- self.next(self.test)
611
-
612
- @model(load="my_model")
613
- @step
614
- def test(self):
615
- # `current.model.loaded` returns a dictionary of the loaded models
616
- # where the key is the name of the artifact and the value is the path to the model
617
- print(os.listdir(current.model.loaded["my_model"]))
618
- self.next(self.end)
619
- ```
620
-
621
- - Loading models
622
- ```python
623
- @step
624
- def train(self):
625
- # current.model.load returns the path to the model loaded
626
- checkpoint_path = current.model.load(
627
- self.checkpoint_key,
628
- )
629
- model_path = current.model.load(
630
- self.model,
631
- )
632
- self.next(self.test)
633
- ```
549
+ S3 Proxy decorator for routing S3 requests through a local proxy service.
634
550
 
635
551
 
636
552
  Parameters
637
553
  ----------
638
- load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
639
- Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
640
- These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
641
- If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
642
- the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
643
- If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
644
-
645
- temp_dir_root : str, default: None
646
- The root directory under which `current.model.loaded` will store loaded models
554
+ integration_name : str, optional
555
+ Name of the S3 proxy integration. If not specified, will use the only
556
+ available S3 proxy integration in the namespace (fails if multiple exist).
557
+ write_mode : str, optional
558
+ The desired behavior during write operations to target (origin) S3 bucket.
559
+ allowed options are:
560
+ "origin-and-cache" -> write to both the target S3 bucket and local object
561
+ storage
562
+ "origin" -> only write to the target S3 bucket
563
+ "cache" -> only write to the object storage service used for caching
564
+ debug : bool, optional
565
+ Enable debug logging for proxy operations.
647
566
  """
648
567
  ...
649
568
 
650
- def vllm(*, model: str, backend: str, openai_api_server: bool, debug: bool, card_refresh_interval: int, max_retries: int, retry_alert_frequency: int, engine_args: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
569
+ def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy: str, force_cache_update: bool, debug: bool, circuit_breaker_config: dict, timeout_config: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
651
570
  """
652
- This decorator is used to run vllm APIs as Metaflow task sidecars.
571
+ This decorator is used to run Ollama APIs as Metaflow task sidecars.
653
572
 
654
573
  User code call
655
574
  --------------
656
- @vllm(
657
- model="...",
575
+ @ollama(
576
+ models=[...],
658
577
  ...
659
578
  )
660
579
 
661
580
  Valid backend options
662
581
  ---------------------
663
582
  - 'local': Run as a separate process on the local task machine.
583
+ - (TODO) 'managed': Outerbounds hosts and selects compute provider.
584
+ - (TODO) 'remote': Spin up separate instance to serve Ollama models.
664
585
 
665
586
  Valid model options
666
587
  -------------------
667
- Any HuggingFace model identifier, e.g. 'meta-llama/Llama-3.2-1B'
668
-
669
- NOTE: vLLM's OpenAI-compatible server serves ONE model per server instance.
670
- If you need multiple models, you must create multiple @vllm decorators.
588
+ Any model here https://ollama.com/search, e.g. 'llama3.2', 'llama3.3'
671
589
 
672
590
 
673
591
  Parameters
674
592
  ----------
675
- model: str
676
- HuggingFace model identifier to be served by vLLM.
593
+ models: list[str]
594
+ List of Ollama containers running models in sidecars.
677
595
  backend: str
678
- Determines where and how to run the vLLM process.
679
- openai_api_server: bool
680
- Whether to use OpenAI-compatible API server mode (subprocess) instead of native engine.
681
- Default is False (uses native engine).
682
- Set to True for backward compatibility with existing code.
596
+ Determines where and how to run the Ollama process.
597
+ force_pull: bool
598
+ Whether to run `ollama pull` no matter what, or first check the remote cache in Metaflow datastore for this model key.
599
+ cache_update_policy: str
600
+ Cache update policy: "auto", "force", or "never".
601
+ force_cache_update: bool
602
+ Simple override for "force" cache update policy.
683
603
  debug: bool
684
604
  Whether to turn on verbose debugging logs.
685
- card_refresh_interval: int
686
- Interval in seconds for refreshing the vLLM status card.
687
- Only used when openai_api_server=True.
688
- max_retries: int
689
- Maximum number of retries checking for vLLM server startup.
690
- Only used when openai_api_server=True.
691
- retry_alert_frequency: int
692
- Frequency of alert logs for vLLM server startup retries.
693
- Only used when openai_api_server=True.
694
- engine_args : dict
695
- Additional keyword arguments to pass to the vLLM engine.
696
- For example, `tensor_parallel_size=2`.
605
+ circuit_breaker_config: dict
606
+ Configuration for circuit breaker protection. Keys: failure_threshold, recovery_timeout, reset_timeout.
607
+ timeout_config: dict
608
+ Configuration for various operation timeouts. Keys: pull, stop, health_check, install, server_startup.
609
+ """
610
+ ...
611
+
612
+ def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
613
+ """
614
+ Specifies that this step should execute on DGX cloud.
615
+
616
+
617
+ Parameters
618
+ ----------
619
+ gpu : int
620
+ Number of GPUs to use.
621
+ gpu_type : str
622
+ Type of Nvidia GPU to use.
697
623
  """
698
624
  ...
699
625
 
@@ -716,151 +642,24 @@ def app_deploy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None
716
642
  """
717
643
  ...
718
644
 
719
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[typing.Dict[str, str]] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
720
- """
721
- Specifies that this step should execute on Kubernetes.
722
-
723
-
724
- Parameters
725
- ----------
726
- cpu : int, default 1
727
- Number of CPUs required for this step. If `@resources` is
728
- also present, the maximum value from all decorators is used.
729
- memory : int, default 4096
730
- Memory size (in MB) required for this step. If
731
- `@resources` is also present, the maximum value from all decorators is
732
- used.
733
- disk : int, default 10240
734
- Disk size (in MB) required for this step. If
735
- `@resources` is also present, the maximum value from all decorators is
736
- used.
737
- image : str, optional, default None
738
- Docker image to use when launching on Kubernetes. If not specified, and
739
- METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
740
- not, a default Docker image mapping to the current version of Python is used.
741
- image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
742
- If given, the imagePullPolicy to be applied to the Docker image of the step.
743
- image_pull_secrets: List[str], default []
744
- The default is extracted from METAFLOW_KUBERNETES_IMAGE_PULL_SECRETS.
745
- Kubernetes image pull secrets to use when pulling container images
746
- in Kubernetes.
747
- service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
748
- Kubernetes service account to use when launching pod in Kubernetes.
749
- secrets : List[str], optional, default None
750
- Kubernetes secrets to use when launching pod in Kubernetes. These
751
- secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
752
- in Metaflow configuration.
753
- node_selector: Union[Dict[str,str], str], optional, default None
754
- Kubernetes node selector(s) to apply to the pod running the task.
755
- Can be passed in as a comma separated string of values e.g.
756
- 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
757
- {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
758
- namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
759
- Kubernetes namespace to use when launching pod in Kubernetes.
760
- gpu : int, optional, default None
761
- Number of GPUs required for this step. A value of zero implies that
762
- the scheduled node should not have GPUs.
763
- gpu_vendor : str, default KUBERNETES_GPU_VENDOR
764
- The vendor of the GPUs to be used for this step.
765
- tolerations : List[Dict[str,str]], default []
766
- The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
767
- Kubernetes tolerations to use when launching pod in Kubernetes.
768
- labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
769
- Kubernetes labels to use when launching pod in Kubernetes.
770
- annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
771
- Kubernetes annotations to use when launching pod in Kubernetes.
772
- use_tmpfs : bool, default False
773
- This enables an explicit tmpfs mount for this step.
774
- tmpfs_tempdir : bool, default True
775
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
776
- tmpfs_size : int, optional, default: None
777
- The value for the size (in MiB) of the tmpfs mount for this step.
778
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
779
- memory allocated for this step.
780
- tmpfs_path : str, optional, default /metaflow_temp
781
- Path to tmpfs mount for this step.
782
- persistent_volume_claims : Dict[str, str], optional, default None
783
- A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
784
- volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
785
- shared_memory: int, optional
786
- Shared memory size (in MiB) required for this step
787
- port: int, optional
788
- Port number to specify in the Kubernetes job object
789
- compute_pool : str, optional, default None
790
- Compute pool to be used for for this step.
791
- If not specified, any accessible compute pool within the perimeter is used.
792
- hostname_resolution_timeout: int, default 10 * 60
793
- Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
794
- Only applicable when @parallel is used.
795
- qos: str, default: Burstable
796
- Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
797
-
798
- security_context: Dict[str, Any], optional, default None
799
- Container security context. Applies to the task container. Allows the following keys:
800
- - privileged: bool, optional, default None
801
- - allow_privilege_escalation: bool, optional, default None
802
- - run_as_user: int, optional, default None
803
- - run_as_group: int, optional, default None
804
- - run_as_non_root: bool, optional, default None
805
- """
806
- ...
807
-
808
645
  @typing.overload
809
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
646
+ def nebius_s3_proxy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
810
647
  """
811
- Specifies the Conda environment for the step.
812
-
813
- Information in this decorator will augment any
814
- attributes set in the `@conda_base` flow-level decorator. Hence,
815
- you can use `@conda_base` to set packages required by all
816
- steps and use `@conda` to specify step-specific overrides.
817
-
818
-
819
- Parameters
820
- ----------
821
- packages : Dict[str, str], default {}
822
- Packages to use for this step. The key is the name of the package
823
- and the value is the version to use.
824
- libraries : Dict[str, str], default {}
825
- Supported for backward compatibility. When used with packages, packages will take precedence.
826
- python : str, optional, default None
827
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
828
- that the version used will correspond to the version of the Python interpreter used to start the run.
829
- disabled : bool, default False
830
- If set to True, disables @conda.
648
+ Nebius-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
649
+ It exists to make it easier for users to know that this decorator should only be used with
650
+ a Neo Cloud like Nebius.
831
651
  """
832
652
  ...
833
653
 
834
654
  @typing.overload
835
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
836
- ...
837
-
838
- @typing.overload
839
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
655
+ def nebius_s3_proxy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
840
656
  ...
841
657
 
842
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
658
+ def nebius_s3_proxy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
843
659
  """
844
- Specifies the Conda environment for the step.
845
-
846
- Information in this decorator will augment any
847
- attributes set in the `@conda_base` flow-level decorator. Hence,
848
- you can use `@conda_base` to set packages required by all
849
- steps and use `@conda` to specify step-specific overrides.
850
-
851
-
852
- Parameters
853
- ----------
854
- packages : Dict[str, str], default {}
855
- Packages to use for this step. The key is the name of the package
856
- and the value is the version to use.
857
- libraries : Dict[str, str], default {}
858
- Supported for backward compatibility. When used with packages, packages will take precedence.
859
- python : str, optional, default None
860
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
861
- that the version used will correspond to the version of the Python interpreter used to start the run.
862
- disabled : bool, default False
863
- If set to True, disables @conda.
660
+ Nebius-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
661
+ It exists to make it easier for users to know that this decorator should only be used with
662
+ a Neo Cloud like Nebius.
864
663
  """
865
664
  ...
866
665
 
@@ -915,25 +714,6 @@ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
915
714
  """
916
715
  ...
917
716
 
918
- @typing.overload
919
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
920
- """
921
- Decorator prototype for all step decorators. This function gets specialized
922
- and imported for all decorators types by _import_plugin_decorators().
923
- """
924
- ...
925
-
926
- @typing.overload
927
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
928
- ...
929
-
930
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
931
- """
932
- Decorator prototype for all step decorators. This function gets specialized
933
- and imported for all decorators types by _import_plugin_decorators().
934
- """
935
- ...
936
-
937
717
  @typing.overload
938
718
  def checkpoint(*, load_policy: str = 'fresh', temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
939
719
  """
@@ -1076,51 +856,67 @@ def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None
1076
856
  With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
1077
857
  created within the task will be loaded when the task is retries execution on failure.
1078
858
 
1079
- temp_dir_root : str, default: None
1080
- The root directory under which `current.checkpoint.directory` will be created.
859
+ temp_dir_root : str, default: None
860
+ The root directory under which `current.checkpoint.directory` will be created.
861
+ """
862
+ ...
863
+
864
+ @typing.overload
865
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
866
+ """
867
+ Specifies a timeout for your step.
868
+
869
+ This decorator is useful if this step may hang indefinitely.
870
+
871
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
872
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
873
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
874
+
875
+ Note that all the values specified in parameters are added together so if you specify
876
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
877
+
878
+
879
+ Parameters
880
+ ----------
881
+ seconds : int, default 0
882
+ Number of seconds to wait prior to timing out.
883
+ minutes : int, default 0
884
+ Number of minutes to wait prior to timing out.
885
+ hours : int, default 0
886
+ Number of hours to wait prior to timing out.
1081
887
  """
1082
888
  ...
1083
889
 
1084
- def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy: str, force_cache_update: bool, debug: bool, circuit_breaker_config: dict, timeout_config: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
890
+ @typing.overload
891
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
892
+ ...
893
+
894
+ @typing.overload
895
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
896
+ ...
897
+
898
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
1085
899
  """
1086
- This decorator is used to run Ollama APIs as Metaflow task sidecars.
900
+ Specifies a timeout for your step.
1087
901
 
1088
- User code call
1089
- --------------
1090
- @ollama(
1091
- models=[...],
1092
- ...
1093
- )
902
+ This decorator is useful if this step may hang indefinitely.
1094
903
 
1095
- Valid backend options
1096
- ---------------------
1097
- - 'local': Run as a separate process on the local task machine.
1098
- - (TODO) 'managed': Outerbounds hosts and selects compute provider.
1099
- - (TODO) 'remote': Spin up separate instance to serve Ollama models.
904
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
905
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
906
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
1100
907
 
1101
- Valid model options
1102
- -------------------
1103
- Any model here https://ollama.com/search, e.g. 'llama3.2', 'llama3.3'
908
+ Note that all the values specified in parameters are added together so if you specify
909
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1104
910
 
1105
911
 
1106
912
  Parameters
1107
913
  ----------
1108
- models: list[str]
1109
- List of Ollama containers running models in sidecars.
1110
- backend: str
1111
- Determines where and how to run the Ollama process.
1112
- force_pull: bool
1113
- Whether to run `ollama pull` no matter what, or first check the remote cache in Metaflow datastore for this model key.
1114
- cache_update_policy: str
1115
- Cache update policy: "auto", "force", or "never".
1116
- force_cache_update: bool
1117
- Simple override for "force" cache update policy.
1118
- debug: bool
1119
- Whether to turn on verbose debugging logs.
1120
- circuit_breaker_config: dict
1121
- Configuration for circuit breaker protection. Keys: failure_threshold, recovery_timeout, reset_timeout.
1122
- timeout_config: dict
1123
- Configuration for various operation timeouts. Keys: pull, stop, health_check, install, server_startup.
914
+ seconds : int, default 0
915
+ Number of seconds to wait prior to timing out.
916
+ minutes : int, default 0
917
+ Number of minutes to wait prior to timing out.
918
+ hours : int, default 0
919
+ Number of hours to wait prior to timing out.
1124
920
  """
1125
921
  ...
1126
922
 
@@ -1143,27 +939,6 @@ def test_append_card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag]
1143
939
  """
1144
940
  ...
1145
941
 
1146
- @typing.overload
1147
- def nebius_s3_proxy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1148
- """
1149
- Nebius-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
1150
- It exists to make it easier for users to know that this decorator should only be used with
1151
- a Neo Cloud like Nebius.
1152
- """
1153
- ...
1154
-
1155
- @typing.overload
1156
- def nebius_s3_proxy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1157
- ...
1158
-
1159
- def nebius_s3_proxy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1160
- """
1161
- Nebius-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
1162
- It exists to make it easier for users to know that this decorator should only be used with
1163
- a Neo Cloud like Nebius.
1164
- """
1165
- ...
1166
-
1167
942
  @typing.overload
1168
943
  def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1169
944
  """
@@ -1294,99 +1069,329 @@ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
1294
1069
  """
1295
1070
  ...
1296
1071
 
1072
+ def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1073
+ """
1074
+ Specifies that this step should execute on DGX cloud.
1075
+
1076
+
1077
+ Parameters
1078
+ ----------
1079
+ gpu : int
1080
+ Number of GPUs to use.
1081
+ gpu_type : str
1082
+ Type of Nvidia GPU to use.
1083
+ queue_timeout : int
1084
+ Time to keep the job in NVCF's queue.
1085
+ """
1086
+ ...
1087
+
1297
1088
  @typing.overload
1298
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1089
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1299
1090
  """
1300
- Specifies the number of times the task corresponding
1301
- to a step needs to be retried.
1091
+ Specifies environment variables to be set prior to the execution of a step.
1302
1092
 
1303
- This decorator is useful for handling transient errors, such as networking issues.
1304
- If your task contains operations that can't be retried safely, e.g. database updates,
1305
- it is advisable to annotate it with `@retry(times=0)`.
1306
1093
 
1307
- This can be used in conjunction with the `@catch` decorator. The `@catch`
1308
- decorator will execute a no-op task after all retries have been exhausted,
1309
- ensuring that the flow execution can continue.
1094
+ Parameters
1095
+ ----------
1096
+ vars : Dict[str, str], default {}
1097
+ Dictionary of environment variables to set.
1098
+ """
1099
+ ...
1100
+
1101
+ @typing.overload
1102
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1103
+ ...
1104
+
1105
+ @typing.overload
1106
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1107
+ ...
1108
+
1109
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
1110
+ """
1111
+ Specifies environment variables to be set prior to the execution of a step.
1112
+
1113
+
1114
+ Parameters
1115
+ ----------
1116
+ vars : Dict[str, str], default {}
1117
+ Dictionary of environment variables to set.
1118
+ """
1119
+ ...
1120
+
1121
+ @typing.overload
1122
+ def model(*, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1123
+ """
1124
+ Enables loading / saving of models within a step.
1125
+
1126
+ > Examples
1127
+ - Saving Models
1128
+ ```python
1129
+ @model
1130
+ @step
1131
+ def train(self):
1132
+ # current.model.save returns a dictionary reference to the model saved
1133
+ self.my_model = current.model.save(
1134
+ path_to_my_model,
1135
+ label="my_model",
1136
+ metadata={
1137
+ "epochs": 10,
1138
+ "batch-size": 32,
1139
+ "learning-rate": 0.001,
1140
+ }
1141
+ )
1142
+ self.next(self.test)
1143
+
1144
+ @model(load="my_model")
1145
+ @step
1146
+ def test(self):
1147
+ # `current.model.loaded` returns a dictionary of the loaded models
1148
+ # where the key is the name of the artifact and the value is the path to the model
1149
+ print(os.listdir(current.model.loaded["my_model"]))
1150
+ self.next(self.end)
1151
+ ```
1152
+
1153
+ - Loading models
1154
+ ```python
1155
+ @step
1156
+ def train(self):
1157
+ # current.model.load returns the path to the model loaded
1158
+ checkpoint_path = current.model.load(
1159
+ self.checkpoint_key,
1160
+ )
1161
+ model_path = current.model.load(
1162
+ self.model,
1163
+ )
1164
+ self.next(self.test)
1165
+ ```
1166
+
1167
+
1168
+ Parameters
1169
+ ----------
1170
+ load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
1171
+ Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
1172
+ These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
1173
+ If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
1174
+ the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
1175
+ If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
1176
+
1177
+ temp_dir_root : str, default: None
1178
+ The root directory under which `current.model.loaded` will store loaded models
1179
+ """
1180
+ ...
1181
+
1182
+ @typing.overload
1183
+ def model(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1184
+ ...
1185
+
1186
+ @typing.overload
1187
+ def model(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1188
+ ...
1189
+
1190
+ def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None):
1191
+ """
1192
+ Enables loading / saving of models within a step.
1193
+
1194
+ > Examples
1195
+ - Saving Models
1196
+ ```python
1197
+ @model
1198
+ @step
1199
+ def train(self):
1200
+ # current.model.save returns a dictionary reference to the model saved
1201
+ self.my_model = current.model.save(
1202
+ path_to_my_model,
1203
+ label="my_model",
1204
+ metadata={
1205
+ "epochs": 10,
1206
+ "batch-size": 32,
1207
+ "learning-rate": 0.001,
1208
+ }
1209
+ )
1210
+ self.next(self.test)
1211
+
1212
+ @model(load="my_model")
1213
+ @step
1214
+ def test(self):
1215
+ # `current.model.loaded` returns a dictionary of the loaded models
1216
+ # where the key is the name of the artifact and the value is the path to the model
1217
+ print(os.listdir(current.model.loaded["my_model"]))
1218
+ self.next(self.end)
1219
+ ```
1220
+
1221
+ - Loading models
1222
+ ```python
1223
+ @step
1224
+ def train(self):
1225
+ # current.model.load returns the path to the model loaded
1226
+ checkpoint_path = current.model.load(
1227
+ self.checkpoint_key,
1228
+ )
1229
+ model_path = current.model.load(
1230
+ self.model,
1231
+ )
1232
+ self.next(self.test)
1233
+ ```
1310
1234
 
1311
1235
 
1312
1236
  Parameters
1313
1237
  ----------
1314
- times : int, default 3
1315
- Number of times to retry this task.
1316
- minutes_between_retries : int, default 2
1317
- Number of minutes between retries.
1238
+ load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
1239
+ Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
1240
+ These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
1241
+ If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
1242
+ the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
1243
+ If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
1244
+
1245
+ temp_dir_root : str, default: None
1246
+ The root directory under which `current.model.loaded` will store loaded models
1318
1247
  """
1319
1248
  ...
1320
1249
 
1321
- @typing.overload
1322
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1323
- ...
1324
-
1325
- @typing.overload
1326
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1327
- ...
1328
-
1329
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
1250
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[typing.Dict[str, str]] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1330
1251
  """
1331
- Specifies the number of times the task corresponding
1332
- to a step needs to be retried.
1333
-
1334
- This decorator is useful for handling transient errors, such as networking issues.
1335
- If your task contains operations that can't be retried safely, e.g. database updates,
1336
- it is advisable to annotate it with `@retry(times=0)`.
1337
-
1338
- This can be used in conjunction with the `@catch` decorator. The `@catch`
1339
- decorator will execute a no-op task after all retries have been exhausted,
1340
- ensuring that the flow execution can continue.
1252
+ Specifies that this step should execute on Kubernetes.
1341
1253
 
1342
1254
 
1343
1255
  Parameters
1344
1256
  ----------
1345
- times : int, default 3
1346
- Number of times to retry this task.
1347
- minutes_between_retries : int, default 2
1348
- Number of minutes between retries.
1257
+ cpu : int, default 1
1258
+ Number of CPUs required for this step. If `@resources` is
1259
+ also present, the maximum value from all decorators is used.
1260
+ memory : int, default 4096
1261
+ Memory size (in MB) required for this step. If
1262
+ `@resources` is also present, the maximum value from all decorators is
1263
+ used.
1264
+ disk : int, default 10240
1265
+ Disk size (in MB) required for this step. If
1266
+ `@resources` is also present, the maximum value from all decorators is
1267
+ used.
1268
+ image : str, optional, default None
1269
+ Docker image to use when launching on Kubernetes. If not specified, and
1270
+ METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
1271
+ not, a default Docker image mapping to the current version of Python is used.
1272
+ image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
1273
+ If given, the imagePullPolicy to be applied to the Docker image of the step.
1274
+ image_pull_secrets: List[str], default []
1275
+ The default is extracted from METAFLOW_KUBERNETES_IMAGE_PULL_SECRETS.
1276
+ Kubernetes image pull secrets to use when pulling container images
1277
+ in Kubernetes.
1278
+ service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
1279
+ Kubernetes service account to use when launching pod in Kubernetes.
1280
+ secrets : List[str], optional, default None
1281
+ Kubernetes secrets to use when launching pod in Kubernetes. These
1282
+ secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
1283
+ in Metaflow configuration.
1284
+ node_selector: Union[Dict[str,str], str], optional, default None
1285
+ Kubernetes node selector(s) to apply to the pod running the task.
1286
+ Can be passed in as a comma separated string of values e.g.
1287
+ 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
1288
+ {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
1289
+ namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
1290
+ Kubernetes namespace to use when launching pod in Kubernetes.
1291
+ gpu : int, optional, default None
1292
+ Number of GPUs required for this step. A value of zero implies that
1293
+ the scheduled node should not have GPUs.
1294
+ gpu_vendor : str, default KUBERNETES_GPU_VENDOR
1295
+ The vendor of the GPUs to be used for this step.
1296
+ tolerations : List[Dict[str,str]], default []
1297
+ The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
1298
+ Kubernetes tolerations to use when launching pod in Kubernetes.
1299
+ labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
1300
+ Kubernetes labels to use when launching pod in Kubernetes.
1301
+ annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
1302
+ Kubernetes annotations to use when launching pod in Kubernetes.
1303
+ use_tmpfs : bool, default False
1304
+ This enables an explicit tmpfs mount for this step.
1305
+ tmpfs_tempdir : bool, default True
1306
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1307
+ tmpfs_size : int, optional, default: None
1308
+ The value for the size (in MiB) of the tmpfs mount for this step.
1309
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1310
+ memory allocated for this step.
1311
+ tmpfs_path : str, optional, default /metaflow_temp
1312
+ Path to tmpfs mount for this step.
1313
+ persistent_volume_claims : Dict[str, str], optional, default None
1314
+ A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
1315
+ volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
1316
+ shared_memory: int, optional
1317
+ Shared memory size (in MiB) required for this step
1318
+ port: int, optional
1319
+ Port number to specify in the Kubernetes job object
1320
+ compute_pool : str, optional, default None
1321
+ Compute pool to be used for for this step.
1322
+ If not specified, any accessible compute pool within the perimeter is used.
1323
+ hostname_resolution_timeout: int, default 10 * 60
1324
+ Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
1325
+ Only applicable when @parallel is used.
1326
+ qos: str, default: Burstable
1327
+ Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
1328
+
1329
+ security_context: Dict[str, Any], optional, default None
1330
+ Container security context. Applies to the task container. Allows the following keys:
1331
+ - privileged: bool, optional, default None
1332
+ - allow_privilege_escalation: bool, optional, default None
1333
+ - run_as_user: int, optional, default None
1334
+ - run_as_group: int, optional, default None
1335
+ - run_as_non_root: bool, optional, default None
1349
1336
  """
1350
1337
  ...
1351
1338
 
1352
1339
  @typing.overload
1353
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1340
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1354
1341
  """
1355
- Specifies the PyPI packages for all steps of the flow.
1342
+ Specifies the Conda environment for the step.
1343
+
1344
+ Information in this decorator will augment any
1345
+ attributes set in the `@conda_base` flow-level decorator. Hence,
1346
+ you can use `@conda_base` to set packages required by all
1347
+ steps and use `@conda` to specify step-specific overrides.
1356
1348
 
1357
- Use `@pypi_base` to set common packages required by all
1358
- steps and use `@pypi` to specify step-specific overrides.
1359
1349
 
1360
1350
  Parameters
1361
1351
  ----------
1362
- packages : Dict[str, str], default: {}
1363
- Packages to use for this flow. The key is the name of the package
1352
+ packages : Dict[str, str], default {}
1353
+ Packages to use for this step. The key is the name of the package
1364
1354
  and the value is the version to use.
1365
- python : str, optional, default: None
1355
+ libraries : Dict[str, str], default {}
1356
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1357
+ python : str, optional, default None
1366
1358
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1367
1359
  that the version used will correspond to the version of the Python interpreter used to start the run.
1360
+ disabled : bool, default False
1361
+ If set to True, disables @conda.
1368
1362
  """
1369
1363
  ...
1370
1364
 
1371
1365
  @typing.overload
1372
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1366
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1373
1367
  ...
1374
1368
 
1375
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1369
+ @typing.overload
1370
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1371
+ ...
1372
+
1373
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1376
1374
  """
1377
- Specifies the PyPI packages for all steps of the flow.
1375
+ Specifies the Conda environment for the step.
1376
+
1377
+ Information in this decorator will augment any
1378
+ attributes set in the `@conda_base` flow-level decorator. Hence,
1379
+ you can use `@conda_base` to set packages required by all
1380
+ steps and use `@conda` to specify step-specific overrides.
1378
1381
 
1379
- Use `@pypi_base` to set common packages required by all
1380
- steps and use `@pypi` to specify step-specific overrides.
1381
1382
 
1382
1383
  Parameters
1383
1384
  ----------
1384
- packages : Dict[str, str], default: {}
1385
- Packages to use for this flow. The key is the name of the package
1385
+ packages : Dict[str, str], default {}
1386
+ Packages to use for this step. The key is the name of the package
1386
1387
  and the value is the version to use.
1387
- python : str, optional, default: None
1388
+ libraries : Dict[str, str], default {}
1389
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1390
+ python : str, optional, default None
1388
1391
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1389
1392
  that the version used will correspond to the version of the Python interpreter used to start the run.
1393
+ disabled : bool, default False
1394
+ If set to True, disables @conda.
1390
1395
  """
1391
1396
  ...
1392
1397
 
@@ -1469,167 +1474,66 @@ def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = No
1469
1474
  ```
1470
1475
  @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1471
1476
  ```
1472
- This is equivalent to:
1473
- ```
1474
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1475
- ```
1476
-
1477
-
1478
- Parameters
1479
- ----------
1480
- event : Union[str, Dict[str, Any]], optional, default None
1481
- Event dependency for this flow.
1482
- events : List[Union[str, Dict[str, Any]]], default []
1483
- Events dependency for this flow.
1484
- options : Dict[str, Any], default {}
1485
- Backend-specific configuration for tuning eventing behavior.
1486
- """
1487
- ...
1488
-
1489
- @typing.overload
1490
- def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1491
- ...
1492
-
1493
- def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1494
- """
1495
- Specifies the event(s) that this flow depends on.
1496
-
1497
- ```
1498
- @trigger(event='foo')
1499
- ```
1500
- or
1501
- ```
1502
- @trigger(events=['foo', 'bar'])
1503
- ```
1504
-
1505
- Additionally, you can specify the parameter mappings
1506
- to map event payload to Metaflow parameters for the flow.
1507
- ```
1508
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1509
- ```
1510
- or
1511
- ```
1512
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1513
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1514
- ```
1515
-
1516
- 'parameters' can also be a list of strings and tuples like so:
1517
- ```
1518
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1519
- ```
1520
- This is equivalent to:
1521
- ```
1522
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1523
- ```
1524
-
1525
-
1526
- Parameters
1527
- ----------
1528
- event : Union[str, Dict[str, Any]], optional, default None
1529
- Event dependency for this flow.
1530
- events : List[Union[str, Dict[str, Any]]], default []
1531
- Events dependency for this flow.
1532
- options : Dict[str, Any], default {}
1533
- Backend-specific configuration for tuning eventing behavior.
1534
- """
1535
- ...
1536
-
1537
- @typing.overload
1538
- def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1539
- """
1540
- Specifies the flow(s) that this flow depends on.
1541
-
1542
- ```
1543
- @trigger_on_finish(flow='FooFlow')
1544
- ```
1545
- or
1546
- ```
1547
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1548
- ```
1549
- This decorator respects the @project decorator and triggers the flow
1550
- when upstream runs within the same namespace complete successfully
1551
-
1552
- Additionally, you can specify project aware upstream flow dependencies
1553
- by specifying the fully qualified project_flow_name.
1554
- ```
1555
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1556
- ```
1557
- or
1558
- ```
1559
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1560
- ```
1561
-
1562
- You can also specify just the project or project branch (other values will be
1563
- inferred from the current project or project branch):
1477
+ This is equivalent to:
1564
1478
  ```
1565
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1479
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1566
1480
  ```
1567
1481
 
1568
- Note that `branch` is typically one of:
1569
- - `prod`
1570
- - `user.bob`
1571
- - `test.my_experiment`
1572
- - `prod.staging`
1573
-
1574
1482
 
1575
1483
  Parameters
1576
1484
  ----------
1577
- flow : Union[str, Dict[str, str]], optional, default None
1578
- Upstream flow dependency for this flow.
1579
- flows : List[Union[str, Dict[str, str]]], default []
1580
- Upstream flow dependencies for this flow.
1485
+ event : Union[str, Dict[str, Any]], optional, default None
1486
+ Event dependency for this flow.
1487
+ events : List[Union[str, Dict[str, Any]]], default []
1488
+ Events dependency for this flow.
1581
1489
  options : Dict[str, Any], default {}
1582
1490
  Backend-specific configuration for tuning eventing behavior.
1583
1491
  """
1584
1492
  ...
1585
1493
 
1586
1494
  @typing.overload
1587
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1495
+ def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1588
1496
  ...
1589
1497
 
1590
- def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1498
+ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1591
1499
  """
1592
- Specifies the flow(s) that this flow depends on.
1500
+ Specifies the event(s) that this flow depends on.
1593
1501
 
1594
1502
  ```
1595
- @trigger_on_finish(flow='FooFlow')
1503
+ @trigger(event='foo')
1596
1504
  ```
1597
1505
  or
1598
1506
  ```
1599
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1507
+ @trigger(events=['foo', 'bar'])
1600
1508
  ```
1601
- This decorator respects the @project decorator and triggers the flow
1602
- when upstream runs within the same namespace complete successfully
1603
1509
 
1604
- Additionally, you can specify project aware upstream flow dependencies
1605
- by specifying the fully qualified project_flow_name.
1510
+ Additionally, you can specify the parameter mappings
1511
+ to map event payload to Metaflow parameters for the flow.
1606
1512
  ```
1607
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1513
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1608
1514
  ```
1609
1515
  or
1610
1516
  ```
1611
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1517
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1518
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1612
1519
  ```
1613
1520
 
1614
- You can also specify just the project or project branch (other values will be
1615
- inferred from the current project or project branch):
1521
+ 'parameters' can also be a list of strings and tuples like so:
1616
1522
  ```
1617
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1523
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1524
+ ```
1525
+ This is equivalent to:
1526
+ ```
1527
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1618
1528
  ```
1619
-
1620
- Note that `branch` is typically one of:
1621
- - `prod`
1622
- - `user.bob`
1623
- - `test.my_experiment`
1624
- - `prod.staging`
1625
1529
 
1626
1530
 
1627
1531
  Parameters
1628
1532
  ----------
1629
- flow : Union[str, Dict[str, str]], optional, default None
1630
- Upstream flow dependency for this flow.
1631
- flows : List[Union[str, Dict[str, str]]], default []
1632
- Upstream flow dependencies for this flow.
1533
+ event : Union[str, Dict[str, Any]], optional, default None
1534
+ Event dependency for this flow.
1535
+ events : List[Union[str, Dict[str, Any]]], default []
1536
+ Events dependency for this flow.
1633
1537
  options : Dict[str, Any], default {}
1634
1538
  Backend-specific configuration for tuning eventing behavior.
1635
1539
  """
@@ -1678,6 +1582,41 @@ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str,
1678
1582
  """
1679
1583
  ...
1680
1584
 
1585
+ def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1586
+ """
1587
+ Specifies what flows belong to the same project.
1588
+
1589
+ A project-specific namespace is created for all flows that
1590
+ use the same `@project(name)`.
1591
+
1592
+
1593
+ Parameters
1594
+ ----------
1595
+ name : str
1596
+ Project name. Make sure that the name is unique amongst all
1597
+ projects that use the same production scheduler. The name may
1598
+ contain only lowercase alphanumeric characters and underscores.
1599
+
1600
+ branch : Optional[str], default None
1601
+ The branch to use. If not specified, the branch is set to
1602
+ `user.<username>` unless `production` is set to `True`. This can
1603
+ also be set on the command line using `--branch` as a top-level option.
1604
+ It is an error to specify `branch` in the decorator and on the command line.
1605
+
1606
+ production : bool, default False
1607
+ Whether or not the branch is the production branch. This can also be set on the
1608
+ command line using `--production` as a top-level option. It is an error to specify
1609
+ `production` in the decorator and on the command line.
1610
+ The project branch name will be:
1611
+ - if `branch` is specified:
1612
+ - if `production` is True: `prod.<branch>`
1613
+ - if `production` is False: `test.<branch>`
1614
+ - if `branch` is not specified:
1615
+ - if `production` is True: `prod`
1616
+ - if `production` is False: `user.<username>`
1617
+ """
1618
+ ...
1619
+
1681
1620
  @typing.overload
1682
1621
  def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1683
1622
  """
@@ -1843,6 +1782,107 @@ def with_artifact_store(f: typing.Optional[typing.Type[FlowSpecDerived]] = None)
1843
1782
  """
1844
1783
  ...
1845
1784
 
1785
+ @typing.overload
1786
+ def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1787
+ """
1788
+ Specifies the flow(s) that this flow depends on.
1789
+
1790
+ ```
1791
+ @trigger_on_finish(flow='FooFlow')
1792
+ ```
1793
+ or
1794
+ ```
1795
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1796
+ ```
1797
+ This decorator respects the @project decorator and triggers the flow
1798
+ when upstream runs within the same namespace complete successfully
1799
+
1800
+ Additionally, you can specify project aware upstream flow dependencies
1801
+ by specifying the fully qualified project_flow_name.
1802
+ ```
1803
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1804
+ ```
1805
+ or
1806
+ ```
1807
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1808
+ ```
1809
+
1810
+ You can also specify just the project or project branch (other values will be
1811
+ inferred from the current project or project branch):
1812
+ ```
1813
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1814
+ ```
1815
+
1816
+ Note that `branch` is typically one of:
1817
+ - `prod`
1818
+ - `user.bob`
1819
+ - `test.my_experiment`
1820
+ - `prod.staging`
1821
+
1822
+
1823
+ Parameters
1824
+ ----------
1825
+ flow : Union[str, Dict[str, str]], optional, default None
1826
+ Upstream flow dependency for this flow.
1827
+ flows : List[Union[str, Dict[str, str]]], default []
1828
+ Upstream flow dependencies for this flow.
1829
+ options : Dict[str, Any], default {}
1830
+ Backend-specific configuration for tuning eventing behavior.
1831
+ """
1832
+ ...
1833
+
1834
+ @typing.overload
1835
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1836
+ ...
1837
+
1838
+ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1839
+ """
1840
+ Specifies the flow(s) that this flow depends on.
1841
+
1842
+ ```
1843
+ @trigger_on_finish(flow='FooFlow')
1844
+ ```
1845
+ or
1846
+ ```
1847
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1848
+ ```
1849
+ This decorator respects the @project decorator and triggers the flow
1850
+ when upstream runs within the same namespace complete successfully
1851
+
1852
+ Additionally, you can specify project aware upstream flow dependencies
1853
+ by specifying the fully qualified project_flow_name.
1854
+ ```
1855
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1856
+ ```
1857
+ or
1858
+ ```
1859
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1860
+ ```
1861
+
1862
+ You can also specify just the project or project branch (other values will be
1863
+ inferred from the current project or project branch):
1864
+ ```
1865
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1866
+ ```
1867
+
1868
+ Note that `branch` is typically one of:
1869
+ - `prod`
1870
+ - `user.bob`
1871
+ - `test.my_experiment`
1872
+ - `prod.staging`
1873
+
1874
+
1875
+ Parameters
1876
+ ----------
1877
+ flow : Union[str, Dict[str, str]], optional, default None
1878
+ Upstream flow dependency for this flow.
1879
+ flows : List[Union[str, Dict[str, str]]], default []
1880
+ Upstream flow dependencies for this flow.
1881
+ options : Dict[str, Any], default {}
1882
+ Backend-specific configuration for tuning eventing behavior.
1883
+ """
1884
+ ...
1885
+
1846
1886
  def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1847
1887
  """
1848
1888
  The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
@@ -1886,38 +1926,44 @@ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, expone
1886
1926
  """
1887
1927
  ...
1888
1928
 
1889
- def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1929
+ @typing.overload
1930
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1890
1931
  """
1891
- Specifies what flows belong to the same project.
1892
-
1893
- A project-specific namespace is created for all flows that
1894
- use the same `@project(name)`.
1932
+ Specifies the PyPI packages for all steps of the flow.
1895
1933
 
1934
+ Use `@pypi_base` to set common packages required by all
1935
+ steps and use `@pypi` to specify step-specific overrides.
1896
1936
 
1897
1937
  Parameters
1898
1938
  ----------
1899
- name : str
1900
- Project name. Make sure that the name is unique amongst all
1901
- projects that use the same production scheduler. The name may
1902
- contain only lowercase alphanumeric characters and underscores.
1939
+ packages : Dict[str, str], default: {}
1940
+ Packages to use for this flow. The key is the name of the package
1941
+ and the value is the version to use.
1942
+ python : str, optional, default: None
1943
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1944
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1945
+ """
1946
+ ...
1947
+
1948
+ @typing.overload
1949
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1950
+ ...
1951
+
1952
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1953
+ """
1954
+ Specifies the PyPI packages for all steps of the flow.
1903
1955
 
1904
- branch : Optional[str], default None
1905
- The branch to use. If not specified, the branch is set to
1906
- `user.<username>` unless `production` is set to `True`. This can
1907
- also be set on the command line using `--branch` as a top-level option.
1908
- It is an error to specify `branch` in the decorator and on the command line.
1956
+ Use `@pypi_base` to set common packages required by all
1957
+ steps and use `@pypi` to specify step-specific overrides.
1909
1958
 
1910
- production : bool, default False
1911
- Whether or not the branch is the production branch. This can also be set on the
1912
- command line using `--production` as a top-level option. It is an error to specify
1913
- `production` in the decorator and on the command line.
1914
- The project branch name will be:
1915
- - if `branch` is specified:
1916
- - if `production` is True: `prod.<branch>`
1917
- - if `production` is False: `test.<branch>`
1918
- - if `branch` is not specified:
1919
- - if `production` is True: `prod`
1920
- - if `production` is False: `user.<username>`
1959
+ Parameters
1960
+ ----------
1961
+ packages : Dict[str, str], default: {}
1962
+ Packages to use for this flow. The key is the name of the package
1963
+ and the value is the version to use.
1964
+ python : str, optional, default: None
1965
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1966
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1921
1967
  """
1922
1968
  ...
1923
1969