ob-metaflow-stubs 6.0.10.0__py2.py3-none-any.whl → 6.0.10.2__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ob-metaflow-stubs might be problematic. Click here for more details.

Files changed (262) hide show
  1. metaflow-stubs/__init__.pyi +854 -854
  2. metaflow-stubs/cards.pyi +2 -2
  3. metaflow-stubs/cli.pyi +2 -2
  4. metaflow-stubs/cli_components/__init__.pyi +2 -2
  5. metaflow-stubs/cli_components/utils.pyi +2 -2
  6. metaflow-stubs/client/__init__.pyi +2 -2
  7. metaflow-stubs/client/core.pyi +6 -6
  8. metaflow-stubs/client/filecache.pyi +3 -3
  9. metaflow-stubs/events.pyi +3 -3
  10. metaflow-stubs/exception.pyi +2 -2
  11. metaflow-stubs/flowspec.pyi +4 -4
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +5 -5
  14. metaflow-stubs/meta_files.pyi +2 -2
  15. metaflow-stubs/metadata_provider/__init__.pyi +2 -2
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +2 -2
  17. metaflow-stubs/metadata_provider/metadata.pyi +2 -2
  18. metaflow-stubs/metadata_provider/util.pyi +2 -2
  19. metaflow-stubs/metaflow_config.pyi +6 -2
  20. metaflow-stubs/metaflow_current.pyi +26 -26
  21. metaflow-stubs/metaflow_git.pyi +2 -2
  22. metaflow-stubs/mf_extensions/__init__.pyi +2 -2
  23. metaflow-stubs/mf_extensions/obcheckpoint/__init__.pyi +2 -2
  24. metaflow-stubs/mf_extensions/obcheckpoint/plugins/__init__.pyi +2 -2
  25. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/__init__.pyi +2 -2
  26. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/__init__.pyi +2 -2
  27. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/async_cards.pyi +2 -2
  28. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/deco_injection_mixin.pyi +2 -2
  29. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/extra_components.pyi +2 -2
  30. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/__init__.pyi +2 -2
  31. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/__init__.pyi +2 -2
  32. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/checkpoint_lister.pyi +2 -2
  33. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/lineage_card.pyi +2 -2
  34. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/checkpoint_storage.pyi +3 -3
  35. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/constructors.pyi +2 -2
  36. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/core.pyi +4 -4
  37. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/decorator.pyi +5 -5
  38. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/exceptions.pyi +2 -2
  39. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/final_api.pyi +3 -3
  40. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/lineage.pyi +2 -2
  41. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/__init__.pyi +2 -2
  42. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/context.pyi +3 -3
  43. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/core.pyi +2 -2
  44. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/decorator.pyi +2 -2
  45. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/exceptions.pyi +2 -2
  46. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/task_utils.pyi +2 -2
  47. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/utils.pyi +2 -2
  48. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastructures.pyi +3 -3
  49. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/exceptions.pyi +2 -2
  50. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/__init__.pyi +2 -2
  51. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/decorator.pyi +2 -2
  52. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/__init__.pyi +2 -2
  53. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/core.pyi +3 -3
  54. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/exceptions.pyi +2 -2
  55. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/model_storage.pyi +3 -3
  56. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/__init__.pyi +2 -2
  57. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/flowspec_utils.pyi +2 -2
  58. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/general.pyi +2 -2
  59. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/identity_utils.pyi +2 -2
  60. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/__init__.pyi +2 -2
  61. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/base.pyi +2 -2
  62. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/tar.pyi +2 -2
  63. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/tar_utils.pyi +2 -2
  64. metaflow-stubs/mf_extensions/outerbounds/__init__.pyi +2 -2
  65. metaflow-stubs/mf_extensions/outerbounds/plugins/__init__.pyi +2 -2
  66. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/__init__.pyi +2 -2
  67. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/__init__.pyi +2 -2
  68. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_state_machine.pyi +2 -2
  69. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/__init__.pyi +2 -2
  70. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/spinner/__init__.pyi +2 -2
  71. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/spinner/spinners.pyi +2 -2
  72. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/app_cli.pyi +2 -2
  73. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/app_config.pyi +2 -2
  74. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/capsule.pyi +3 -3
  75. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/click_importer.pyi +2 -2
  76. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/code_package/__init__.pyi +2 -2
  77. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/code_package/code_packager.pyi +3 -3
  78. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/__init__.pyi +2 -2
  79. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/cli_generator.pyi +2 -2
  80. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/config_utils.pyi +4 -4
  81. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/schema_export.pyi +2 -2
  82. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/typed_configs.pyi +2 -2
  83. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/unified_config.pyi +4 -4
  84. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/dependencies.pyi +3 -3
  85. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/deployer.pyi +5 -5
  86. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/experimental/__init__.pyi +2 -2
  87. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/perimeters.pyi +2 -2
  88. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/utils.pyi +4 -4
  89. metaflow-stubs/mf_extensions/outerbounds/plugins/aws/__init__.pyi +2 -2
  90. metaflow-stubs/mf_extensions/outerbounds/plugins/aws/assume_role_decorator.pyi +3 -3
  91. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/__init__.pyi +2 -2
  92. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/async_cards.pyi +3 -3
  93. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/injector.pyi +2 -2
  94. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/__init__.pyi +2 -2
  95. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/coreweave.pyi +3 -3
  96. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/nebius.pyi +3 -3
  97. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/__init__.pyi +2 -2
  98. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/baker.pyi +3 -3
  99. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/docker_environment.pyi +2 -2
  100. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/fast_bakery.pyi +2 -2
  101. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/__init__.pyi +2 -2
  102. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/pod_killer.pyi +2 -2
  103. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/__init__.pyi +2 -2
  104. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/constants.pyi +2 -2
  105. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/exceptions.pyi +2 -2
  106. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/ollama.pyi +2 -2
  107. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/status_card.pyi +2 -2
  108. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/__init__.pyi +2 -2
  109. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/snowflake.pyi +2 -2
  110. metaflow-stubs/mf_extensions/outerbounds/profilers/__init__.pyi +2 -2
  111. metaflow-stubs/mf_extensions/outerbounds/profilers/gpu.pyi +2 -2
  112. metaflow-stubs/mf_extensions/outerbounds/remote_config.pyi +3 -3
  113. metaflow-stubs/mf_extensions/outerbounds/toplevel/__init__.pyi +2 -2
  114. metaflow-stubs/mf_extensions/outerbounds/toplevel/global_aliases_for_metaflow_package.pyi +2 -2
  115. metaflow-stubs/mf_extensions/outerbounds/toplevel/s3_proxy.pyi +2 -2
  116. metaflow-stubs/multicore_utils.pyi +2 -2
  117. metaflow-stubs/ob_internal.pyi +2 -2
  118. metaflow-stubs/packaging_sys/__init__.pyi +6 -6
  119. metaflow-stubs/packaging_sys/backend.pyi +4 -4
  120. metaflow-stubs/packaging_sys/distribution_support.pyi +4 -4
  121. metaflow-stubs/packaging_sys/tar_backend.pyi +5 -5
  122. metaflow-stubs/packaging_sys/utils.pyi +2 -2
  123. metaflow-stubs/packaging_sys/v1.pyi +2 -2
  124. metaflow-stubs/parameters.pyi +4 -4
  125. metaflow-stubs/plugins/__init__.pyi +13 -13
  126. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  127. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  128. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  129. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  130. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
  131. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
  132. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
  133. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  134. metaflow-stubs/plugins/argo/argo_client.pyi +6 -4
  135. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  136. metaflow-stubs/plugins/argo/argo_workflows.pyi +10 -3
  137. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +3 -3
  138. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +3 -3
  139. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +3 -3
  140. metaflow-stubs/plugins/argo/exit_hooks.pyi +3 -3
  141. metaflow-stubs/plugins/aws/__init__.pyi +2 -2
  142. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  143. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  144. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  145. metaflow-stubs/plugins/aws/batch/batch.pyi +4 -4
  146. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  147. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
  148. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  149. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +3 -3
  150. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  151. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  152. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  153. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +3 -3
  154. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  155. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +7 -4
  156. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +2 -2
  157. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  158. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  159. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  160. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +3 -3
  161. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  162. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  163. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  164. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  165. metaflow-stubs/plugins/cards/card_client.pyi +3 -3
  166. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  167. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  168. metaflow-stubs/plugins/cards/card_decorator.pyi +3 -3
  169. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  170. metaflow-stubs/plugins/cards/card_modules/basic.pyi +3 -3
  171. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  172. metaflow-stubs/plugins/cards/card_modules/components.pyi +3 -3
  173. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  174. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  175. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  176. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  177. metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
  178. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  179. metaflow-stubs/plugins/catch_decorator.pyi +2 -2
  180. metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
  181. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  182. metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
  183. metaflow-stubs/plugins/datatools/s3/s3.pyi +4 -4
  184. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  185. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  186. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  187. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  188. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  189. metaflow-stubs/plugins/events_decorator.pyi +2 -2
  190. metaflow-stubs/plugins/exit_hook/__init__.pyi +2 -2
  191. metaflow-stubs/plugins/exit_hook/exit_hook_decorator.pyi +2 -2
  192. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  193. metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
  194. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  195. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +3 -3
  196. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  197. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  198. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  199. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  200. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  201. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +2 -2
  202. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
  203. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  204. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
  205. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
  206. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +2 -2
  207. metaflow-stubs/plugins/ollama/__init__.pyi +3 -3
  208. metaflow-stubs/plugins/optuna/__init__.pyi +2 -2
  209. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  210. metaflow-stubs/plugins/perimeters.pyi +2 -2
  211. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  212. metaflow-stubs/plugins/pypi/__init__.pyi +3 -3
  213. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  214. metaflow-stubs/plugins/pypi/conda_environment.pyi +3 -3
  215. metaflow-stubs/plugins/pypi/parsers.pyi +2 -2
  216. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  217. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  218. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  219. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  220. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  221. metaflow-stubs/plugins/secrets/__init__.pyi +3 -3
  222. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +3 -3
  223. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  224. metaflow-stubs/plugins/secrets/secrets_func.pyi +2 -2
  225. metaflow-stubs/plugins/secrets/secrets_spec.pyi +2 -2
  226. metaflow-stubs/plugins/secrets/utils.pyi +2 -2
  227. metaflow-stubs/plugins/snowflake/__init__.pyi +2 -2
  228. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  229. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +3 -3
  230. metaflow-stubs/plugins/timeout_decorator.pyi +2 -2
  231. metaflow-stubs/plugins/torchtune/__init__.pyi +2 -2
  232. metaflow-stubs/plugins/uv/__init__.pyi +2 -2
  233. metaflow-stubs/plugins/uv/uv_environment.pyi +2 -2
  234. metaflow-stubs/profilers/__init__.pyi +2 -2
  235. metaflow-stubs/pylint_wrapper.pyi +2 -2
  236. metaflow-stubs/runner/__init__.pyi +2 -2
  237. metaflow-stubs/runner/deployer.pyi +6 -6
  238. metaflow-stubs/runner/deployer_impl.pyi +3 -3
  239. metaflow-stubs/runner/metaflow_runner.pyi +4 -4
  240. metaflow-stubs/runner/nbdeploy.pyi +2 -2
  241. metaflow-stubs/runner/nbrun.pyi +2 -2
  242. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  243. metaflow-stubs/runner/utils.pyi +3 -3
  244. metaflow-stubs/system/__init__.pyi +2 -2
  245. metaflow-stubs/system/system_logger.pyi +3 -3
  246. metaflow-stubs/system/system_monitor.pyi +2 -2
  247. metaflow-stubs/tagging_util.pyi +2 -2
  248. metaflow-stubs/tuple_util.pyi +2 -2
  249. metaflow-stubs/user_configs/__init__.pyi +2 -2
  250. metaflow-stubs/user_configs/config_options.pyi +2 -2
  251. metaflow-stubs/user_configs/config_parameters.pyi +6 -6
  252. metaflow-stubs/user_decorators/__init__.pyi +2 -2
  253. metaflow-stubs/user_decorators/common.pyi +2 -2
  254. metaflow-stubs/user_decorators/mutable_flow.pyi +5 -5
  255. metaflow-stubs/user_decorators/mutable_step.pyi +6 -6
  256. metaflow-stubs/user_decorators/user_flow_decorator.pyi +5 -5
  257. metaflow-stubs/user_decorators/user_step_decorator.pyi +6 -6
  258. {ob_metaflow_stubs-6.0.10.0.dist-info → ob_metaflow_stubs-6.0.10.2.dist-info}/METADATA +1 -1
  259. ob_metaflow_stubs-6.0.10.2.dist-info/RECORD +262 -0
  260. ob_metaflow_stubs-6.0.10.0.dist-info/RECORD +0 -262
  261. {ob_metaflow_stubs-6.0.10.0.dist-info → ob_metaflow_stubs-6.0.10.2.dist-info}/WHEEL +0 -0
  262. {ob_metaflow_stubs-6.0.10.0.dist-info → ob_metaflow_stubs-6.0.10.2.dist-info}/top_level.txt +0 -0
@@ -1,7 +1,7 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.18.2.1+obcheckpoint(0.2.4);ob(v1) #
4
- # Generated on 2025-09-08T21:00:14.553698 #
3
+ # MF version: 2.18.5.1+obcheckpoint(0.2.4);ob(v1) #
4
+ # Generated on 2025-09-16T01:38:51.425889 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
@@ -40,17 +40,17 @@ from .user_decorators.user_step_decorator import StepMutator as StepMutator
40
40
  from .user_decorators.user_step_decorator import user_step_decorator as user_step_decorator
41
41
  from .user_decorators.user_flow_decorator import FlowMutator as FlowMutator
42
42
  from . import cards as cards
43
+ from . import tuple_util as tuple_util
43
44
  from . import metaflow_git as metaflow_git
44
45
  from . import events as events
45
- from . import tuple_util as tuple_util
46
46
  from . import runner as runner
47
47
  from . import plugins as plugins
48
48
  from .mf_extensions.outerbounds.toplevel.global_aliases_for_metaflow_package import S3 as S3
49
49
  from . import includefile as includefile
50
50
  from .includefile import IncludeFile as IncludeFile
51
+ from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
51
52
  from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
52
53
  from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
53
- from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
54
54
  from . import client as client
55
55
  from .client.core import namespace as namespace
56
56
  from .client.core import get_namespace as get_namespace
@@ -167,82 +167,146 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
167
167
  """
168
168
  ...
169
169
 
170
+ def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
171
+ """
172
+ Specifies that this step should execute on DGX cloud.
173
+
174
+
175
+ Parameters
176
+ ----------
177
+ gpu : int
178
+ Number of GPUs to use.
179
+ gpu_type : str
180
+ Type of Nvidia GPU to use.
181
+ """
182
+ ...
183
+
170
184
  @typing.overload
171
- def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
185
+ def model(*, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
172
186
  """
173
- Specifies the resources needed when executing this step.
187
+ Enables loading / saving of models within a step.
174
188
 
175
- Use `@resources` to specify the resource requirements
176
- independently of the specific compute layer (`@batch`, `@kubernetes`).
189
+ > Examples
190
+ - Saving Models
191
+ ```python
192
+ @model
193
+ @step
194
+ def train(self):
195
+ # current.model.save returns a dictionary reference to the model saved
196
+ self.my_model = current.model.save(
197
+ path_to_my_model,
198
+ label="my_model",
199
+ metadata={
200
+ "epochs": 10,
201
+ "batch-size": 32,
202
+ "learning-rate": 0.001,
203
+ }
204
+ )
205
+ self.next(self.test)
177
206
 
178
- You can choose the compute layer on the command line by executing e.g.
179
- ```
180
- python myflow.py run --with batch
181
- ```
182
- or
207
+ @model(load="my_model")
208
+ @step
209
+ def test(self):
210
+ # `current.model.loaded` returns a dictionary of the loaded models
211
+ # where the key is the name of the artifact and the value is the path to the model
212
+ print(os.listdir(current.model.loaded["my_model"]))
213
+ self.next(self.end)
183
214
  ```
184
- python myflow.py run --with kubernetes
215
+
216
+ - Loading models
217
+ ```python
218
+ @step
219
+ def train(self):
220
+ # current.model.load returns the path to the model loaded
221
+ checkpoint_path = current.model.load(
222
+ self.checkpoint_key,
223
+ )
224
+ model_path = current.model.load(
225
+ self.model,
226
+ )
227
+ self.next(self.test)
185
228
  ```
186
- which executes the flow on the desired system using the
187
- requirements specified in `@resources`.
188
229
 
189
230
 
190
231
  Parameters
191
232
  ----------
192
- cpu : int, default 1
193
- Number of CPUs required for this step.
194
- gpu : int, optional, default None
195
- Number of GPUs required for this step.
196
- disk : int, optional, default None
197
- Disk size (in MB) required for this step. Only applies on Kubernetes.
198
- memory : int, default 4096
199
- Memory size (in MB) required for this step.
200
- shared_memory : int, optional, default None
201
- The value for the size (in MiB) of the /dev/shm volume for this step.
202
- This parameter maps to the `--shm-size` option in Docker.
233
+ load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
234
+ Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
235
+ These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
236
+ If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
237
+ the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
238
+ If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
239
+
240
+ temp_dir_root : str, default: None
241
+ The root directory under which `current.model.loaded` will store loaded models
203
242
  """
204
243
  ...
205
244
 
206
245
  @typing.overload
207
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
246
+ def model(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
208
247
  ...
209
248
 
210
249
  @typing.overload
211
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
250
+ def model(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
212
251
  ...
213
252
 
214
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
253
+ def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None):
215
254
  """
216
- Specifies the resources needed when executing this step.
255
+ Enables loading / saving of models within a step.
217
256
 
218
- Use `@resources` to specify the resource requirements
219
- independently of the specific compute layer (`@batch`, `@kubernetes`).
257
+ > Examples
258
+ - Saving Models
259
+ ```python
260
+ @model
261
+ @step
262
+ def train(self):
263
+ # current.model.save returns a dictionary reference to the model saved
264
+ self.my_model = current.model.save(
265
+ path_to_my_model,
266
+ label="my_model",
267
+ metadata={
268
+ "epochs": 10,
269
+ "batch-size": 32,
270
+ "learning-rate": 0.001,
271
+ }
272
+ )
273
+ self.next(self.test)
220
274
 
221
- You can choose the compute layer on the command line by executing e.g.
222
- ```
223
- python myflow.py run --with batch
224
- ```
225
- or
275
+ @model(load="my_model")
276
+ @step
277
+ def test(self):
278
+ # `current.model.loaded` returns a dictionary of the loaded models
279
+ # where the key is the name of the artifact and the value is the path to the model
280
+ print(os.listdir(current.model.loaded["my_model"]))
281
+ self.next(self.end)
226
282
  ```
227
- python myflow.py run --with kubernetes
283
+
284
+ - Loading models
285
+ ```python
286
+ @step
287
+ def train(self):
288
+ # current.model.load returns the path to the model loaded
289
+ checkpoint_path = current.model.load(
290
+ self.checkpoint_key,
291
+ )
292
+ model_path = current.model.load(
293
+ self.model,
294
+ )
295
+ self.next(self.test)
228
296
  ```
229
- which executes the flow on the desired system using the
230
- requirements specified in `@resources`.
231
297
 
232
298
 
233
299
  Parameters
234
300
  ----------
235
- cpu : int, default 1
236
- Number of CPUs required for this step.
237
- gpu : int, optional, default None
238
- Number of GPUs required for this step.
239
- disk : int, optional, default None
240
- Disk size (in MB) required for this step. Only applies on Kubernetes.
241
- memory : int, default 4096
242
- Memory size (in MB) required for this step.
243
- shared_memory : int, optional, default None
244
- The value for the size (in MiB) of the /dev/shm volume for this step.
245
- This parameter maps to the `--shm-size` option in Docker.
301
+ load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
302
+ Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
303
+ These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
304
+ If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
305
+ the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
306
+ If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
307
+
308
+ temp_dir_root : str, default: None
309
+ The root directory under which `current.model.loaded` will store loaded models
246
310
  """
247
311
  ...
248
312
 
@@ -266,185 +330,133 @@ def test_append_card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag]
266
330
  ...
267
331
 
268
332
  @typing.overload
269
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
333
+ def nebius_s3_proxy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
270
334
  """
271
- Specifies environment variables to be set prior to the execution of a step.
272
-
273
-
274
- Parameters
275
- ----------
276
- vars : Dict[str, str], default {}
277
- Dictionary of environment variables to set.
335
+ Nebius-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
336
+ It exists to make it easier for users to know that this decorator should only be used with
337
+ a Neo Cloud like Nebius.
278
338
  """
279
339
  ...
280
340
 
281
341
  @typing.overload
282
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
283
- ...
284
-
285
- @typing.overload
286
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
342
+ def nebius_s3_proxy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
287
343
  ...
288
344
 
289
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
345
+ def nebius_s3_proxy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
290
346
  """
291
- Specifies environment variables to be set prior to the execution of a step.
292
-
293
-
294
- Parameters
295
- ----------
296
- vars : Dict[str, str], default {}
297
- Dictionary of environment variables to set.
347
+ Nebius-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
348
+ It exists to make it easier for users to know that this decorator should only be used with
349
+ a Neo Cloud like Nebius.
298
350
  """
299
351
  ...
300
352
 
301
353
  @typing.overload
302
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
354
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
303
355
  """
304
- Specifies the Conda environment for the step.
356
+ Specifies the PyPI packages for the step.
305
357
 
306
358
  Information in this decorator will augment any
307
- attributes set in the `@conda_base` flow-level decorator. Hence,
308
- you can use `@conda_base` to set packages required by all
309
- steps and use `@conda` to specify step-specific overrides.
359
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
360
+ you can use `@pypi_base` to set packages required by all
361
+ steps and use `@pypi` to specify step-specific overrides.
310
362
 
311
363
 
312
364
  Parameters
313
365
  ----------
314
- packages : Dict[str, str], default {}
366
+ packages : Dict[str, str], default: {}
315
367
  Packages to use for this step. The key is the name of the package
316
368
  and the value is the version to use.
317
- libraries : Dict[str, str], default {}
318
- Supported for backward compatibility. When used with packages, packages will take precedence.
319
- python : str, optional, default None
369
+ python : str, optional, default: None
320
370
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
321
371
  that the version used will correspond to the version of the Python interpreter used to start the run.
322
- disabled : bool, default False
323
- If set to True, disables @conda.
324
372
  """
325
373
  ...
326
374
 
327
375
  @typing.overload
328
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
376
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
329
377
  ...
330
378
 
331
379
  @typing.overload
332
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
380
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
333
381
  ...
334
382
 
335
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
383
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
336
384
  """
337
- Specifies the Conda environment for the step.
385
+ Specifies the PyPI packages for the step.
338
386
 
339
387
  Information in this decorator will augment any
340
- attributes set in the `@conda_base` flow-level decorator. Hence,
341
- you can use `@conda_base` to set packages required by all
342
- steps and use `@conda` to specify step-specific overrides.
388
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
389
+ you can use `@pypi_base` to set packages required by all
390
+ steps and use `@pypi` to specify step-specific overrides.
343
391
 
344
392
 
345
393
  Parameters
346
394
  ----------
347
- packages : Dict[str, str], default {}
395
+ packages : Dict[str, str], default: {}
348
396
  Packages to use for this step. The key is the name of the package
349
397
  and the value is the version to use.
350
- libraries : Dict[str, str], default {}
351
- Supported for backward compatibility. When used with packages, packages will take precedence.
352
- python : str, optional, default None
398
+ python : str, optional, default: None
353
399
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
354
400
  that the version used will correspond to the version of the Python interpreter used to start the run.
355
- disabled : bool, default False
356
- If set to True, disables @conda.
357
401
  """
358
402
  ...
359
403
 
360
404
  @typing.overload
361
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
405
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
362
406
  """
363
- Specifies the number of times the task corresponding
364
- to a step needs to be retried.
407
+ Specifies a timeout for your step.
365
408
 
366
- This decorator is useful for handling transient errors, such as networking issues.
367
- If your task contains operations that can't be retried safely, e.g. database updates,
368
- it is advisable to annotate it with `@retry(times=0)`.
409
+ This decorator is useful if this step may hang indefinitely.
369
410
 
370
- This can be used in conjunction with the `@catch` decorator. The `@catch`
371
- decorator will execute a no-op task after all retries have been exhausted,
372
- ensuring that the flow execution can continue.
411
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
412
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
413
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
414
+
415
+ Note that all the values specified in parameters are added together so if you specify
416
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
373
417
 
374
418
 
375
419
  Parameters
376
420
  ----------
377
- times : int, default 3
378
- Number of times to retry this task.
379
- minutes_between_retries : int, default 2
380
- Number of minutes between retries.
421
+ seconds : int, default 0
422
+ Number of seconds to wait prior to timing out.
423
+ minutes : int, default 0
424
+ Number of minutes to wait prior to timing out.
425
+ hours : int, default 0
426
+ Number of hours to wait prior to timing out.
381
427
  """
382
428
  ...
383
429
 
384
430
  @typing.overload
385
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
431
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
386
432
  ...
387
433
 
388
434
  @typing.overload
389
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
435
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
390
436
  ...
391
437
 
392
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
438
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
393
439
  """
394
- Specifies the number of times the task corresponding
395
- to a step needs to be retried.
396
-
397
- This decorator is useful for handling transient errors, such as networking issues.
398
- If your task contains operations that can't be retried safely, e.g. database updates,
399
- it is advisable to annotate it with `@retry(times=0)`.
400
-
401
- This can be used in conjunction with the `@catch` decorator. The `@catch`
402
- decorator will execute a no-op task after all retries have been exhausted,
403
- ensuring that the flow execution can continue.
404
-
440
+ Specifies a timeout for your step.
405
441
 
406
- Parameters
407
- ----------
408
- times : int, default 3
409
- Number of times to retry this task.
410
- minutes_between_retries : int, default 2
411
- Number of minutes between retries.
412
- """
413
- ...
414
-
415
- def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
416
- """
417
- Specifies that this step should execute on DGX cloud.
442
+ This decorator is useful if this step may hang indefinitely.
418
443
 
444
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
445
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
446
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
419
447
 
420
- Parameters
421
- ----------
422
- gpu : int
423
- Number of GPUs to use.
424
- gpu_type : str
425
- Type of Nvidia GPU to use.
426
- """
427
- ...
428
-
429
- def s3_proxy(*, integration_name: typing.Optional[str] = None, write_mode: typing.Optional[str] = None, debug: typing.Optional[bool] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
430
- """
431
- S3 Proxy decorator for routing S3 requests through a local proxy service.
448
+ Note that all the values specified in parameters are added together so if you specify
449
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
432
450
 
433
451
 
434
452
  Parameters
435
453
  ----------
436
- integration_name : str, optional
437
- Name of the S3 proxy integration. If not specified, will use the only
438
- available S3 proxy integration in the namespace (fails if multiple exist).
439
- write_mode : str, optional
440
- The desired behavior during write operations to target (origin) S3 bucket.
441
- allowed options are:
442
- "origin-and-cache" -> write to both the target S3 bucket and local object
443
- storage
444
- "origin" -> only write to the target S3 bucket
445
- "cache" -> only write to the object storage service used for caching
446
- debug : bool, optional
447
- Enable debug logging for proxy operations.
454
+ seconds : int, default 0
455
+ Number of seconds to wait prior to timing out.
456
+ minutes : int, default 0
457
+ Number of minutes to wait prior to timing out.
458
+ hours : int, default 0
459
+ Number of hours to wait prior to timing out.
448
460
  """
449
461
  ...
450
462
 
@@ -491,58 +503,189 @@ def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy:
491
503
  """
492
504
  ...
493
505
 
506
+ def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
507
+ """
508
+ Decorator that helps cache, version and store models/datasets from huggingface hub.
509
+
510
+ > Examples
511
+
512
+ **Usage: creating references of models from huggingface that may be loaded in downstream steps**
513
+ ```python
514
+ @huggingface_hub
515
+ @step
516
+ def pull_model_from_huggingface(self):
517
+ # `current.huggingface_hub.snapshot_download` downloads the model from the Hugging Face Hub
518
+ # and saves it in the backend storage based on the model's `repo_id`. If there exists a model
519
+ # with the same `repo_id` in the backend storage, it will not download the model again. The return
520
+ # value of the function is a reference to the model in the backend storage.
521
+ # This reference can be used to load the model in the subsequent steps via `@model(load=["llama_model"])`
522
+
523
+ self.model_id = "mistralai/Mistral-7B-Instruct-v0.1"
524
+ self.llama_model = current.huggingface_hub.snapshot_download(
525
+ repo_id=self.model_id,
526
+ allow_patterns=["*.safetensors", "*.json", "tokenizer.*"],
527
+ )
528
+ self.next(self.train)
529
+ ```
530
+
531
+ **Usage: loading models directly from huggingface hub or from cache (from metaflow's datastore)**
532
+ ```python
533
+ @huggingface_hub(load=["mistralai/Mistral-7B-Instruct-v0.1"])
534
+ @step
535
+ def pull_model_from_huggingface(self):
536
+ path_to_model = current.huggingface_hub.loaded["mistralai/Mistral-7B-Instruct-v0.1"]
537
+ ```
538
+
539
+ ```python
540
+ @huggingface_hub(load=[("mistralai/Mistral-7B-Instruct-v0.1", "/my-directory"), ("myorg/mistral-lora, "/my-lora-directory")])
541
+ @step
542
+ def finetune_model(self):
543
+ path_to_model = current.huggingface_hub.loaded["mistralai/Mistral-7B-Instruct-v0.1"]
544
+ # path_to_model will be /my-directory
545
+ ```
546
+
547
+ ```python
548
+ # Takes all the arguments passed to `snapshot_download`
549
+ # except for `local_dir`
550
+ @huggingface_hub(load=[
551
+ {
552
+ "repo_id": "mistralai/Mistral-7B-Instruct-v0.1",
553
+ },
554
+ {
555
+ "repo_id": "myorg/mistral-lora",
556
+ "repo_type": "model",
557
+ },
558
+ ])
559
+ @step
560
+ def finetune_model(self):
561
+ path_to_model = current.huggingface_hub.loaded["mistralai/Mistral-7B-Instruct-v0.1"]
562
+ # path_to_model will be /my-directory
563
+ ```
564
+
565
+
566
+ Parameters
567
+ ----------
568
+ temp_dir_root : str, optional
569
+ The root directory that will hold the temporary directory where objects will be downloaded.
570
+
571
+ load: Union[List[str], List[Tuple[Dict, str]], List[Tuple[str, str]], List[Dict], None]
572
+ The list of repos (models/datasets) to load.
573
+
574
+ Loaded repos can be accessed via `current.huggingface_hub.loaded`. If load is set, then the following happens:
575
+
576
+ - If repo (model/dataset) is not found in the datastore:
577
+ - Downloads the repo from Hugging Face Hub to a temporary directory (or uses specified path) for local access
578
+ - Stores it in Metaflow's datastore (s3/gcs/azure etc.) with a unique name based on repo_type/repo_id
579
+ - All HF models loaded for a `@step` will be cached separately under flow/step/namespace.
580
+
581
+ - If repo is found in the datastore:
582
+ - Loads it directly from datastore to local path (can be temporary directory or specified path)
583
+ """
584
+ ...
585
+
494
586
  @typing.overload
495
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
587
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
496
588
  """
497
- Specifies secrets to be retrieved and injected as environment variables prior to
498
- the execution of a step.
589
+ Specifies environment variables to be set prior to the execution of a step.
499
590
 
500
591
 
501
592
  Parameters
502
593
  ----------
503
- sources : List[Union[str, Dict[str, Any]]], default: []
504
- List of secret specs, defining how the secrets are to be retrieved
505
- role : str, optional, default: None
506
- Role to use for fetching secrets
594
+ vars : Dict[str, str], default {}
595
+ Dictionary of environment variables to set.
507
596
  """
508
597
  ...
509
598
 
510
599
  @typing.overload
511
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
600
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
512
601
  ...
513
602
 
514
603
  @typing.overload
515
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
604
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
516
605
  ...
517
606
 
518
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
607
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
519
608
  """
520
- Specifies secrets to be retrieved and injected as environment variables prior to
521
- the execution of a step.
609
+ Specifies environment variables to be set prior to the execution of a step.
522
610
 
523
611
 
524
612
  Parameters
525
613
  ----------
526
- sources : List[Union[str, Dict[str, Any]]], default: []
527
- List of secret specs, defining how the secrets are to be retrieved
528
- role : str, optional, default: None
529
- Role to use for fetching secrets
614
+ vars : Dict[str, str], default {}
615
+ Dictionary of environment variables to set.
530
616
  """
531
617
  ...
532
618
 
533
- def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
619
+ def s3_proxy(*, integration_name: typing.Optional[str] = None, write_mode: typing.Optional[str] = None, debug: typing.Optional[bool] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
534
620
  """
535
- Specifies that this step should execute on DGX cloud.
621
+ S3 Proxy decorator for routing S3 requests through a local proxy service.
536
622
 
537
623
 
538
624
  Parameters
539
625
  ----------
540
- gpu : int
541
- Number of GPUs to use.
542
- gpu_type : str
543
- Type of Nvidia GPU to use.
544
- queue_timeout : int
545
- Time to keep the job in NVCF's queue.
626
+ integration_name : str, optional
627
+ Name of the S3 proxy integration. If not specified, will use the only
628
+ available S3 proxy integration in the namespace (fails if multiple exist).
629
+ write_mode : str, optional
630
+ The desired behavior during write operations to target (origin) S3 bucket.
631
+ allowed options are:
632
+ "origin-and-cache" -> write to both the target S3 bucket and local object
633
+ storage
634
+ "origin" -> only write to the target S3 bucket
635
+ "cache" -> only write to the object storage service used for caching
636
+ debug : bool, optional
637
+ Enable debug logging for proxy operations.
638
+ """
639
+ ...
640
+
641
+ @typing.overload
642
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
643
+ """
644
+ Specifies that the step will success under all circumstances.
645
+
646
+ The decorator will create an optional artifact, specified by `var`, which
647
+ contains the exception raised. You can use it to detect the presence
648
+ of errors, indicating that all happy-path artifacts produced by the step
649
+ are missing.
650
+
651
+
652
+ Parameters
653
+ ----------
654
+ var : str, optional, default None
655
+ Name of the artifact in which to store the caught exception.
656
+ If not specified, the exception is not stored.
657
+ print_exception : bool, default True
658
+ Determines whether or not the exception is printed to
659
+ stdout when caught.
660
+ """
661
+ ...
662
+
663
+ @typing.overload
664
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
665
+ ...
666
+
667
+ @typing.overload
668
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
669
+ ...
670
+
671
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
672
+ """
673
+ Specifies that the step will success under all circumstances.
674
+
675
+ The decorator will create an optional artifact, specified by `var`, which
676
+ contains the exception raised. You can use it to detect the presence
677
+ of errors, indicating that all happy-path artifacts produced by the step
678
+ are missing.
679
+
680
+
681
+ Parameters
682
+ ----------
683
+ var : str, optional, default None
684
+ Name of the artifact in which to store the caught exception.
685
+ If not specified, the exception is not stored.
686
+ print_exception : bool, default True
687
+ Determines whether or not the exception is printed to
688
+ stdout when caught.
546
689
  """
547
690
  ...
548
691
 
@@ -694,70 +837,266 @@ def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None
694
837
  ...
695
838
 
696
839
  @typing.overload
697
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
698
- """
699
- Internal decorator to support Fast bakery
840
+ def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
700
841
  """
701
- ...
842
+ Specifies the resources needed when executing this step.
843
+
844
+ Use `@resources` to specify the resource requirements
845
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
846
+
847
+ You can choose the compute layer on the command line by executing e.g.
848
+ ```
849
+ python myflow.py run --with batch
850
+ ```
851
+ or
852
+ ```
853
+ python myflow.py run --with kubernetes
854
+ ```
855
+ which executes the flow on the desired system using the
856
+ requirements specified in `@resources`.
857
+
858
+
859
+ Parameters
860
+ ----------
861
+ cpu : int, default 1
862
+ Number of CPUs required for this step.
863
+ gpu : int, optional, default None
864
+ Number of GPUs required for this step.
865
+ disk : int, optional, default None
866
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
867
+ memory : int, default 4096
868
+ Memory size (in MB) required for this step.
869
+ shared_memory : int, optional, default None
870
+ The value for the size (in MiB) of the /dev/shm volume for this step.
871
+ This parameter maps to the `--shm-size` option in Docker.
872
+ """
873
+ ...
702
874
 
703
875
  @typing.overload
704
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
876
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
705
877
  ...
706
878
 
707
- def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
879
+ @typing.overload
880
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
881
+ ...
882
+
883
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
708
884
  """
709
- Internal decorator to support Fast bakery
885
+ Specifies the resources needed when executing this step.
886
+
887
+ Use `@resources` to specify the resource requirements
888
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
889
+
890
+ You can choose the compute layer on the command line by executing e.g.
891
+ ```
892
+ python myflow.py run --with batch
893
+ ```
894
+ or
895
+ ```
896
+ python myflow.py run --with kubernetes
897
+ ```
898
+ which executes the flow on the desired system using the
899
+ requirements specified in `@resources`.
900
+
901
+
902
+ Parameters
903
+ ----------
904
+ cpu : int, default 1
905
+ Number of CPUs required for this step.
906
+ gpu : int, optional, default None
907
+ Number of GPUs required for this step.
908
+ disk : int, optional, default None
909
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
910
+ memory : int, default 4096
911
+ Memory size (in MB) required for this step.
912
+ shared_memory : int, optional, default None
913
+ The value for the size (in MiB) of the /dev/shm volume for this step.
914
+ This parameter maps to the `--shm-size` option in Docker.
915
+ """
916
+ ...
917
+
918
+ def vllm(*, model: str, backend: str, openai_api_server: bool, debug: bool, card_refresh_interval: int, max_retries: int, retry_alert_frequency: int, engine_args: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
919
+ """
920
+ This decorator is used to run vllm APIs as Metaflow task sidecars.
921
+
922
+ User code call
923
+ --------------
924
+ @vllm(
925
+ model="...",
926
+ ...
927
+ )
928
+
929
+ Valid backend options
930
+ ---------------------
931
+ - 'local': Run as a separate process on the local task machine.
932
+
933
+ Valid model options
934
+ -------------------
935
+ Any HuggingFace model identifier, e.g. 'meta-llama/Llama-3.2-1B'
936
+
937
+ NOTE: vLLM's OpenAI-compatible server serves ONE model per server instance.
938
+ If you need multiple models, you must create multiple @vllm decorators.
939
+
940
+
941
+ Parameters
942
+ ----------
943
+ model: str
944
+ HuggingFace model identifier to be served by vLLM.
945
+ backend: str
946
+ Determines where and how to run the vLLM process.
947
+ openai_api_server: bool
948
+ Whether to use OpenAI-compatible API server mode (subprocess) instead of native engine.
949
+ Default is False (uses native engine).
950
+ Set to True for backward compatibility with existing code.
951
+ debug: bool
952
+ Whether to turn on verbose debugging logs.
953
+ card_refresh_interval: int
954
+ Interval in seconds for refreshing the vLLM status card.
955
+ Only used when openai_api_server=True.
956
+ max_retries: int
957
+ Maximum number of retries checking for vLLM server startup.
958
+ Only used when openai_api_server=True.
959
+ retry_alert_frequency: int
960
+ Frequency of alert logs for vLLM server startup retries.
961
+ Only used when openai_api_server=True.
962
+ engine_args : dict
963
+ Additional keyword arguments to pass to the vLLM engine.
964
+ For example, `tensor_parallel_size=2`.
710
965
  """
711
966
  ...
712
967
 
713
968
  @typing.overload
714
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
969
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
715
970
  """
716
- Specifies the PyPI packages for the step.
971
+ Specifies secrets to be retrieved and injected as environment variables prior to
972
+ the execution of a step.
973
+
974
+
975
+ Parameters
976
+ ----------
977
+ sources : List[Union[str, Dict[str, Any]]], default: []
978
+ List of secret specs, defining how the secrets are to be retrieved
979
+ role : str, optional, default: None
980
+ Role to use for fetching secrets
981
+ """
982
+ ...
983
+
984
+ @typing.overload
985
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
986
+ ...
987
+
988
+ @typing.overload
989
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
990
+ ...
991
+
992
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
993
+ """
994
+ Specifies secrets to be retrieved and injected as environment variables prior to
995
+ the execution of a step.
996
+
997
+
998
+ Parameters
999
+ ----------
1000
+ sources : List[Union[str, Dict[str, Any]]], default: []
1001
+ List of secret specs, defining how the secrets are to be retrieved
1002
+ role : str, optional, default: None
1003
+ Role to use for fetching secrets
1004
+ """
1005
+ ...
1006
+
1007
+ @typing.overload
1008
+ def coreweave_s3_proxy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1009
+ """
1010
+ CoreWeave-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
1011
+ It exists to make it easier for users to know that this decorator should only be used with
1012
+ a Neo Cloud like CoreWeave.
1013
+ """
1014
+ ...
1015
+
1016
+ @typing.overload
1017
+ def coreweave_s3_proxy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1018
+ ...
1019
+
1020
+ def coreweave_s3_proxy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1021
+ """
1022
+ CoreWeave-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
1023
+ It exists to make it easier for users to know that this decorator should only be used with
1024
+ a Neo Cloud like CoreWeave.
1025
+ """
1026
+ ...
1027
+
1028
+ @typing.overload
1029
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1030
+ """
1031
+ Specifies the Conda environment for the step.
717
1032
 
718
1033
  Information in this decorator will augment any
719
- attributes set in the `@pyi_base` flow-level decorator. Hence,
720
- you can use `@pypi_base` to set packages required by all
721
- steps and use `@pypi` to specify step-specific overrides.
1034
+ attributes set in the `@conda_base` flow-level decorator. Hence,
1035
+ you can use `@conda_base` to set packages required by all
1036
+ steps and use `@conda` to specify step-specific overrides.
722
1037
 
723
1038
 
724
1039
  Parameters
725
1040
  ----------
726
- packages : Dict[str, str], default: {}
1041
+ packages : Dict[str, str], default {}
727
1042
  Packages to use for this step. The key is the name of the package
728
1043
  and the value is the version to use.
729
- python : str, optional, default: None
1044
+ libraries : Dict[str, str], default {}
1045
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1046
+ python : str, optional, default None
730
1047
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
731
1048
  that the version used will correspond to the version of the Python interpreter used to start the run.
1049
+ disabled : bool, default False
1050
+ If set to True, disables @conda.
732
1051
  """
733
1052
  ...
734
1053
 
735
1054
  @typing.overload
736
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1055
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
737
1056
  ...
738
1057
 
739
1058
  @typing.overload
740
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1059
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
741
1060
  ...
742
1061
 
743
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1062
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
744
1063
  """
745
- Specifies the PyPI packages for the step.
1064
+ Specifies the Conda environment for the step.
746
1065
 
747
1066
  Information in this decorator will augment any
748
- attributes set in the `@pyi_base` flow-level decorator. Hence,
749
- you can use `@pypi_base` to set packages required by all
750
- steps and use `@pypi` to specify step-specific overrides.
1067
+ attributes set in the `@conda_base` flow-level decorator. Hence,
1068
+ you can use `@conda_base` to set packages required by all
1069
+ steps and use `@conda` to specify step-specific overrides.
751
1070
 
752
1071
 
753
1072
  Parameters
754
1073
  ----------
755
- packages : Dict[str, str], default: {}
1074
+ packages : Dict[str, str], default {}
756
1075
  Packages to use for this step. The key is the name of the package
757
1076
  and the value is the version to use.
758
- python : str, optional, default: None
1077
+ libraries : Dict[str, str], default {}
1078
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1079
+ python : str, optional, default None
759
1080
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
760
1081
  that the version used will correspond to the version of the Python interpreter used to start the run.
1082
+ disabled : bool, default False
1083
+ If set to True, disables @conda.
1084
+ """
1085
+ ...
1086
+
1087
+ def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1088
+ """
1089
+ Specifies that this step should execute on DGX cloud.
1090
+
1091
+
1092
+ Parameters
1093
+ ----------
1094
+ gpu : int
1095
+ Number of GPUs to use.
1096
+ gpu_type : str
1097
+ Type of Nvidia GPU to use.
1098
+ queue_timeout : int
1099
+ Time to keep the job in NVCF's queue.
761
1100
  """
762
1101
  ...
763
1102
 
@@ -851,336 +1190,26 @@ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: ty
851
1190
  ...
852
1191
 
853
1192
  @typing.overload
854
- def nebius_s3_proxy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1193
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
855
1194
  """
856
- Nebius-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
857
- It exists to make it easier for users to know that this decorator should only be used with
858
- a Neo Cloud like Nebius.
1195
+ Decorator prototype for all step decorators. This function gets specialized
1196
+ and imported for all decorators types by _import_plugin_decorators().
859
1197
  """
860
1198
  ...
861
1199
 
862
1200
  @typing.overload
863
- def nebius_s3_proxy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1201
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
864
1202
  ...
865
1203
 
866
- def nebius_s3_proxy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1204
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
867
1205
  """
868
- Nebius-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
869
- It exists to make it easier for users to know that this decorator should only be used with
870
- a Neo Cloud like Nebius.
1206
+ Decorator prototype for all step decorators. This function gets specialized
1207
+ and imported for all decorators types by _import_plugin_decorators().
871
1208
  """
872
1209
  ...
873
1210
 
874
1211
  @typing.overload
875
- def coreweave_s3_proxy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
876
- """
877
- CoreWeave-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
878
- It exists to make it easier for users to know that this decorator should only be used with
879
- a Neo Cloud like CoreWeave.
880
- """
881
- ...
882
-
883
- @typing.overload
884
- def coreweave_s3_proxy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
885
- ...
886
-
887
- def coreweave_s3_proxy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
888
- """
889
- CoreWeave-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
890
- It exists to make it easier for users to know that this decorator should only be used with
891
- a Neo Cloud like CoreWeave.
892
- """
893
- ...
894
-
895
- def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
896
- """
897
- Decorator that helps cache, version and store models/datasets from huggingface hub.
898
-
899
- > Examples
900
-
901
- **Usage: creating references of models from huggingface that may be loaded in downstream steps**
902
- ```python
903
- @huggingface_hub
904
- @step
905
- def pull_model_from_huggingface(self):
906
- # `current.huggingface_hub.snapshot_download` downloads the model from the Hugging Face Hub
907
- # and saves it in the backend storage based on the model's `repo_id`. If there exists a model
908
- # with the same `repo_id` in the backend storage, it will not download the model again. The return
909
- # value of the function is a reference to the model in the backend storage.
910
- # This reference can be used to load the model in the subsequent steps via `@model(load=["llama_model"])`
911
-
912
- self.model_id = "mistralai/Mistral-7B-Instruct-v0.1"
913
- self.llama_model = current.huggingface_hub.snapshot_download(
914
- repo_id=self.model_id,
915
- allow_patterns=["*.safetensors", "*.json", "tokenizer.*"],
916
- )
917
- self.next(self.train)
918
- ```
919
-
920
- **Usage: loading models directly from huggingface hub or from cache (from metaflow's datastore)**
921
- ```python
922
- @huggingface_hub(load=["mistralai/Mistral-7B-Instruct-v0.1"])
923
- @step
924
- def pull_model_from_huggingface(self):
925
- path_to_model = current.huggingface_hub.loaded["mistralai/Mistral-7B-Instruct-v0.1"]
926
- ```
927
-
928
- ```python
929
- @huggingface_hub(load=[("mistralai/Mistral-7B-Instruct-v0.1", "/my-directory"), ("myorg/mistral-lora, "/my-lora-directory")])
930
- @step
931
- def finetune_model(self):
932
- path_to_model = current.huggingface_hub.loaded["mistralai/Mistral-7B-Instruct-v0.1"]
933
- # path_to_model will be /my-directory
934
- ```
935
-
936
- ```python
937
- # Takes all the arguments passed to `snapshot_download`
938
- # except for `local_dir`
939
- @huggingface_hub(load=[
940
- {
941
- "repo_id": "mistralai/Mistral-7B-Instruct-v0.1",
942
- },
943
- {
944
- "repo_id": "myorg/mistral-lora",
945
- "repo_type": "model",
946
- },
947
- ])
948
- @step
949
- def finetune_model(self):
950
- path_to_model = current.huggingface_hub.loaded["mistralai/Mistral-7B-Instruct-v0.1"]
951
- # path_to_model will be /my-directory
952
- ```
953
-
954
-
955
- Parameters
956
- ----------
957
- temp_dir_root : str, optional
958
- The root directory that will hold the temporary directory where objects will be downloaded.
959
-
960
- load: Union[List[str], List[Tuple[Dict, str]], List[Tuple[str, str]], List[Dict], None]
961
- The list of repos (models/datasets) to load.
962
-
963
- Loaded repos can be accessed via `current.huggingface_hub.loaded`. If load is set, then the following happens:
964
-
965
- - If repo (model/dataset) is not found in the datastore:
966
- - Downloads the repo from Hugging Face Hub to a temporary directory (or uses specified path) for local access
967
- - Stores it in Metaflow's datastore (s3/gcs/azure etc.) with a unique name based on repo_type/repo_id
968
- - All HF models loaded for a `@step` will be cached separately under flow/step/namespace.
969
-
970
- - If repo is found in the datastore:
971
- - Loads it directly from datastore to local path (can be temporary directory or specified path)
972
- """
973
- ...
974
-
975
- @typing.overload
976
- def model(*, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
977
- """
978
- Enables loading / saving of models within a step.
979
-
980
- > Examples
981
- - Saving Models
982
- ```python
983
- @model
984
- @step
985
- def train(self):
986
- # current.model.save returns a dictionary reference to the model saved
987
- self.my_model = current.model.save(
988
- path_to_my_model,
989
- label="my_model",
990
- metadata={
991
- "epochs": 10,
992
- "batch-size": 32,
993
- "learning-rate": 0.001,
994
- }
995
- )
996
- self.next(self.test)
997
-
998
- @model(load="my_model")
999
- @step
1000
- def test(self):
1001
- # `current.model.loaded` returns a dictionary of the loaded models
1002
- # where the key is the name of the artifact and the value is the path to the model
1003
- print(os.listdir(current.model.loaded["my_model"]))
1004
- self.next(self.end)
1005
- ```
1006
-
1007
- - Loading models
1008
- ```python
1009
- @step
1010
- def train(self):
1011
- # current.model.load returns the path to the model loaded
1012
- checkpoint_path = current.model.load(
1013
- self.checkpoint_key,
1014
- )
1015
- model_path = current.model.load(
1016
- self.model,
1017
- )
1018
- self.next(self.test)
1019
- ```
1020
-
1021
-
1022
- Parameters
1023
- ----------
1024
- load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
1025
- Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
1026
- These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
1027
- If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
1028
- the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
1029
- If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
1030
-
1031
- temp_dir_root : str, default: None
1032
- The root directory under which `current.model.loaded` will store loaded models
1033
- """
1034
- ...
1035
-
1036
- @typing.overload
1037
- def model(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1038
- ...
1039
-
1040
- @typing.overload
1041
- def model(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1042
- ...
1043
-
1044
- def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None):
1045
- """
1046
- Enables loading / saving of models within a step.
1047
-
1048
- > Examples
1049
- - Saving Models
1050
- ```python
1051
- @model
1052
- @step
1053
- def train(self):
1054
- # current.model.save returns a dictionary reference to the model saved
1055
- self.my_model = current.model.save(
1056
- path_to_my_model,
1057
- label="my_model",
1058
- metadata={
1059
- "epochs": 10,
1060
- "batch-size": 32,
1061
- "learning-rate": 0.001,
1062
- }
1063
- )
1064
- self.next(self.test)
1065
-
1066
- @model(load="my_model")
1067
- @step
1068
- def test(self):
1069
- # `current.model.loaded` returns a dictionary of the loaded models
1070
- # where the key is the name of the artifact and the value is the path to the model
1071
- print(os.listdir(current.model.loaded["my_model"]))
1072
- self.next(self.end)
1073
- ```
1074
-
1075
- - Loading models
1076
- ```python
1077
- @step
1078
- def train(self):
1079
- # current.model.load returns the path to the model loaded
1080
- checkpoint_path = current.model.load(
1081
- self.checkpoint_key,
1082
- )
1083
- model_path = current.model.load(
1084
- self.model,
1085
- )
1086
- self.next(self.test)
1087
- ```
1088
-
1089
-
1090
- Parameters
1091
- ----------
1092
- load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
1093
- Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
1094
- These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
1095
- If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
1096
- the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
1097
- If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
1098
-
1099
- temp_dir_root : str, default: None
1100
- The root directory under which `current.model.loaded` will store loaded models
1101
- """
1102
- ...
1103
-
1104
- @typing.overload
1105
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1106
- """
1107
- Decorator prototype for all step decorators. This function gets specialized
1108
- and imported for all decorators types by _import_plugin_decorators().
1109
- """
1110
- ...
1111
-
1112
- @typing.overload
1113
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1114
- ...
1115
-
1116
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1117
- """
1118
- Decorator prototype for all step decorators. This function gets specialized
1119
- and imported for all decorators types by _import_plugin_decorators().
1120
- """
1121
- ...
1122
-
1123
- @typing.overload
1124
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1125
- """
1126
- Specifies a timeout for your step.
1127
-
1128
- This decorator is useful if this step may hang indefinitely.
1129
-
1130
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1131
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
1132
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
1133
-
1134
- Note that all the values specified in parameters are added together so if you specify
1135
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1136
-
1137
-
1138
- Parameters
1139
- ----------
1140
- seconds : int, default 0
1141
- Number of seconds to wait prior to timing out.
1142
- minutes : int, default 0
1143
- Number of minutes to wait prior to timing out.
1144
- hours : int, default 0
1145
- Number of hours to wait prior to timing out.
1146
- """
1147
- ...
1148
-
1149
- @typing.overload
1150
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1151
- ...
1152
-
1153
- @typing.overload
1154
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1155
- ...
1156
-
1157
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
1158
- """
1159
- Specifies a timeout for your step.
1160
-
1161
- This decorator is useful if this step may hang indefinitely.
1162
-
1163
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1164
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
1165
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
1166
-
1167
- Note that all the values specified in parameters are added together so if you specify
1168
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1169
-
1170
-
1171
- Parameters
1172
- ----------
1173
- seconds : int, default 0
1174
- Number of seconds to wait prior to timing out.
1175
- minutes : int, default 0
1176
- Number of minutes to wait prior to timing out.
1177
- hours : int, default 0
1178
- Number of hours to wait prior to timing out.
1179
- """
1180
- ...
1181
-
1182
- @typing.overload
1183
- def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1212
+ def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1184
1213
  """
1185
1214
  Creates a human-readable report, a Metaflow Card, after this step completes.
1186
1215
 
@@ -1229,266 +1258,93 @@ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
1229
1258
  ...
1230
1259
 
1231
1260
  @typing.overload
1232
- def app_deploy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1233
- """
1234
- Decorator prototype for all step decorators. This function gets specialized
1235
- and imported for all decorators types by _import_plugin_decorators().
1236
- """
1237
- ...
1238
-
1239
- @typing.overload
1240
- def app_deploy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1241
- ...
1242
-
1243
- def app_deploy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1244
- """
1245
- Decorator prototype for all step decorators. This function gets specialized
1246
- and imported for all decorators types by _import_plugin_decorators().
1247
- """
1248
- ...
1249
-
1250
- @typing.overload
1251
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1252
- """
1253
- Specifies that the step will success under all circumstances.
1254
-
1255
- The decorator will create an optional artifact, specified by `var`, which
1256
- contains the exception raised. You can use it to detect the presence
1257
- of errors, indicating that all happy-path artifacts produced by the step
1258
- are missing.
1259
-
1260
-
1261
- Parameters
1262
- ----------
1263
- var : str, optional, default None
1264
- Name of the artifact in which to store the caught exception.
1265
- If not specified, the exception is not stored.
1266
- print_exception : bool, default True
1267
- Determines whether or not the exception is printed to
1268
- stdout when caught.
1269
- """
1270
- ...
1271
-
1272
- @typing.overload
1273
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1274
- ...
1275
-
1276
- @typing.overload
1277
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1278
- ...
1279
-
1280
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
1281
- """
1282
- Specifies that the step will success under all circumstances.
1283
-
1284
- The decorator will create an optional artifact, specified by `var`, which
1285
- contains the exception raised. You can use it to detect the presence
1286
- of errors, indicating that all happy-path artifacts produced by the step
1287
- are missing.
1288
-
1289
-
1290
- Parameters
1291
- ----------
1292
- var : str, optional, default None
1293
- Name of the artifact in which to store the caught exception.
1294
- If not specified, the exception is not stored.
1295
- print_exception : bool, default True
1296
- Determines whether or not the exception is printed to
1297
- stdout when caught.
1298
- """
1299
- ...
1300
-
1301
- def vllm(*, model: str, backend: str, openai_api_server: bool, debug: bool, card_refresh_interval: int, max_retries: int, retry_alert_frequency: int, engine_args: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1302
- """
1303
- This decorator is used to run vllm APIs as Metaflow task sidecars.
1304
-
1305
- User code call
1306
- --------------
1307
- @vllm(
1308
- model="...",
1309
- ...
1310
- )
1311
-
1312
- Valid backend options
1313
- ---------------------
1314
- - 'local': Run as a separate process on the local task machine.
1315
-
1316
- Valid model options
1317
- -------------------
1318
- Any HuggingFace model identifier, e.g. 'meta-llama/Llama-3.2-1B'
1319
-
1320
- NOTE: vLLM's OpenAI-compatible server serves ONE model per server instance.
1321
- If you need multiple models, you must create multiple @vllm decorators.
1322
-
1323
-
1324
- Parameters
1325
- ----------
1326
- model: str
1327
- HuggingFace model identifier to be served by vLLM.
1328
- backend: str
1329
- Determines where and how to run the vLLM process.
1330
- openai_api_server: bool
1331
- Whether to use OpenAI-compatible API server mode (subprocess) instead of native engine.
1332
- Default is False (uses native engine).
1333
- Set to True for backward compatibility with existing code.
1334
- debug: bool
1335
- Whether to turn on verbose debugging logs.
1336
- card_refresh_interval: int
1337
- Interval in seconds for refreshing the vLLM status card.
1338
- Only used when openai_api_server=True.
1339
- max_retries: int
1340
- Maximum number of retries checking for vLLM server startup.
1341
- Only used when openai_api_server=True.
1342
- retry_alert_frequency: int
1343
- Frequency of alert logs for vLLM server startup retries.
1344
- Only used when openai_api_server=True.
1345
- engine_args : dict
1346
- Additional keyword arguments to pass to the vLLM engine.
1347
- For example, `tensor_parallel_size=2`.
1261
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1262
+ """
1263
+ Internal decorator to support Fast bakery
1348
1264
  """
1349
1265
  ...
1350
1266
 
1351
1267
  @typing.overload
1352
- def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1268
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1269
+ ...
1270
+
1271
+ def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1353
1272
  """
1354
- Specifies the event(s) that this flow depends on.
1355
-
1356
- ```
1357
- @trigger(event='foo')
1358
- ```
1359
- or
1360
- ```
1361
- @trigger(events=['foo', 'bar'])
1362
- ```
1273
+ Internal decorator to support Fast bakery
1274
+ """
1275
+ ...
1276
+
1277
+ @typing.overload
1278
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1279
+ """
1280
+ Specifies the number of times the task corresponding
1281
+ to a step needs to be retried.
1363
1282
 
1364
- Additionally, you can specify the parameter mappings
1365
- to map event payload to Metaflow parameters for the flow.
1366
- ```
1367
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1368
- ```
1369
- or
1370
- ```
1371
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1372
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1373
- ```
1283
+ This decorator is useful for handling transient errors, such as networking issues.
1284
+ If your task contains operations that can't be retried safely, e.g. database updates,
1285
+ it is advisable to annotate it with `@retry(times=0)`.
1374
1286
 
1375
- 'parameters' can also be a list of strings and tuples like so:
1376
- ```
1377
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1378
- ```
1379
- This is equivalent to:
1380
- ```
1381
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1382
- ```
1287
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
1288
+ decorator will execute a no-op task after all retries have been exhausted,
1289
+ ensuring that the flow execution can continue.
1383
1290
 
1384
1291
 
1385
1292
  Parameters
1386
1293
  ----------
1387
- event : Union[str, Dict[str, Any]], optional, default None
1388
- Event dependency for this flow.
1389
- events : List[Union[str, Dict[str, Any]]], default []
1390
- Events dependency for this flow.
1391
- options : Dict[str, Any], default {}
1392
- Backend-specific configuration for tuning eventing behavior.
1294
+ times : int, default 3
1295
+ Number of times to retry this task.
1296
+ minutes_between_retries : int, default 2
1297
+ Number of minutes between retries.
1393
1298
  """
1394
1299
  ...
1395
1300
 
1396
1301
  @typing.overload
1397
- def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1302
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1398
1303
  ...
1399
1304
 
1400
- def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1305
+ @typing.overload
1306
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1307
+ ...
1308
+
1309
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
1401
1310
  """
1402
- Specifies the event(s) that this flow depends on.
1403
-
1404
- ```
1405
- @trigger(event='foo')
1406
- ```
1407
- or
1408
- ```
1409
- @trigger(events=['foo', 'bar'])
1410
- ```
1311
+ Specifies the number of times the task corresponding
1312
+ to a step needs to be retried.
1411
1313
 
1412
- Additionally, you can specify the parameter mappings
1413
- to map event payload to Metaflow parameters for the flow.
1414
- ```
1415
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1416
- ```
1417
- or
1418
- ```
1419
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1420
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1421
- ```
1314
+ This decorator is useful for handling transient errors, such as networking issues.
1315
+ If your task contains operations that can't be retried safely, e.g. database updates,
1316
+ it is advisable to annotate it with `@retry(times=0)`.
1422
1317
 
1423
- 'parameters' can also be a list of strings and tuples like so:
1424
- ```
1425
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1426
- ```
1427
- This is equivalent to:
1428
- ```
1429
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1430
- ```
1318
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
1319
+ decorator will execute a no-op task after all retries have been exhausted,
1320
+ ensuring that the flow execution can continue.
1431
1321
 
1432
1322
 
1433
1323
  Parameters
1434
1324
  ----------
1435
- event : Union[str, Dict[str, Any]], optional, default None
1436
- Event dependency for this flow.
1437
- events : List[Union[str, Dict[str, Any]]], default []
1438
- Events dependency for this flow.
1439
- options : Dict[str, Any], default {}
1440
- Backend-specific configuration for tuning eventing behavior.
1325
+ times : int, default 3
1326
+ Number of times to retry this task.
1327
+ minutes_between_retries : int, default 2
1328
+ Number of minutes between retries.
1441
1329
  """
1442
1330
  ...
1443
1331
 
1444
1332
  @typing.overload
1445
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1333
+ def app_deploy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1446
1334
  """
1447
- Specifies the times when the flow should be run when running on a
1448
- production scheduler.
1449
-
1450
-
1451
- Parameters
1452
- ----------
1453
- hourly : bool, default False
1454
- Run the workflow hourly.
1455
- daily : bool, default True
1456
- Run the workflow daily.
1457
- weekly : bool, default False
1458
- Run the workflow weekly.
1459
- cron : str, optional, default None
1460
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1461
- specified by this expression.
1462
- timezone : str, optional, default None
1463
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1464
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1335
+ Decorator prototype for all step decorators. This function gets specialized
1336
+ and imported for all decorators types by _import_plugin_decorators().
1465
1337
  """
1466
1338
  ...
1467
1339
 
1468
1340
  @typing.overload
1469
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1341
+ def app_deploy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1470
1342
  ...
1471
1343
 
1472
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1344
+ def app_deploy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1473
1345
  """
1474
- Specifies the times when the flow should be run when running on a
1475
- production scheduler.
1476
-
1477
-
1478
- Parameters
1479
- ----------
1480
- hourly : bool, default False
1481
- Run the workflow hourly.
1482
- daily : bool, default True
1483
- Run the workflow daily.
1484
- weekly : bool, default False
1485
- Run the workflow weekly.
1486
- cron : str, optional, default None
1487
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1488
- specified by this expression.
1489
- timezone : str, optional, default None
1490
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1491
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1346
+ Decorator prototype for all step decorators. This function gets specialized
1347
+ and imported for all decorators types by _import_plugin_decorators().
1492
1348
  """
1493
1349
  ...
1494
1350
 
@@ -1593,6 +1449,41 @@ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *
1593
1449
  """
1594
1450
  ...
1595
1451
 
1452
+ def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1453
+ """
1454
+ Specifies what flows belong to the same project.
1455
+
1456
+ A project-specific namespace is created for all flows that
1457
+ use the same `@project(name)`.
1458
+
1459
+
1460
+ Parameters
1461
+ ----------
1462
+ name : str
1463
+ Project name. Make sure that the name is unique amongst all
1464
+ projects that use the same production scheduler. The name may
1465
+ contain only lowercase alphanumeric characters and underscores.
1466
+
1467
+ branch : Optional[str], default None
1468
+ The branch to use. If not specified, the branch is set to
1469
+ `user.<username>` unless `production` is set to `True`. This can
1470
+ also be set on the command line using `--branch` as a top-level option.
1471
+ It is an error to specify `branch` in the decorator and on the command line.
1472
+
1473
+ production : bool, default False
1474
+ Whether or not the branch is the production branch. This can also be set on the
1475
+ command line using `--production` as a top-level option. It is an error to specify
1476
+ `production` in the decorator and on the command line.
1477
+ The project branch name will be:
1478
+ - if `branch` is specified:
1479
+ - if `production` is True: `prod.<branch>`
1480
+ - if `production` is False: `test.<branch>`
1481
+ - if `branch` is not specified:
1482
+ - if `production` is True: `prod`
1483
+ - if `production` is False: `user.<username>`
1484
+ """
1485
+ ...
1486
+
1596
1487
  def with_artifact_store(f: typing.Optional[typing.Type[FlowSpecDerived]] = None):
1597
1488
  """
1598
1489
  Allows setting external datastores to save data for the
@@ -1673,78 +1564,37 @@ def with_artifact_store(f: typing.Optional[typing.Type[FlowSpecDerived]] = None)
1673
1564
  )[0]
1674
1565
  print(latest)
1675
1566
  cp.load(
1676
- latest,
1677
- "test-checkpoints"
1678
- )
1679
-
1680
- task = Task("TorchTuneFlow/8484/train/53673")
1681
- with artifact_store_from(run=run, config={
1682
- "client_params": {
1683
- "aws_access_key_id": os.environ.get("MY_CUSTOM_ACCESS_KEY"),
1684
- "aws_secret_access_key": os.environ.get("MY_CUSTOM_SECRET_KEY"),
1685
- },
1686
- }):
1687
- load_model(
1688
- task.data.model_ref,
1689
- "test-models"
1690
- )
1691
- ```
1692
- Parameters:
1693
- ----------
1694
-
1695
- type: str
1696
- The type of the datastore. Can be one of 's3', 'gcs', 'azure' or any other supported metaflow Datastore.
1697
-
1698
- config: dict or Callable
1699
- Dictionary of configuration options for the datastore. The following keys are required:
1700
- - root: The root path in the datastore where the data will be saved. (needs to be in the format expected by the datastore)
1701
- - example: 's3://bucket-name/path/to/root'
1702
- - example: 'gs://bucket-name/path/to/root'
1703
- - example: 'https://myblockacc.blob.core.windows.net/metaflow/'
1704
- - role_arn (optional): AWS IAM role to access s3 bucket (only when `type` is 's3')
1705
- - session_vars (optional): AWS session variables to access s3 bucket (only when `type` is 's3')
1706
- - client_params (optional): AWS client parameters to access s3 bucket (only when `type` is 's3')
1707
- """
1708
- ...
1709
-
1710
- @typing.overload
1711
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1712
- """
1713
- Specifies the PyPI packages for all steps of the flow.
1714
-
1715
- Use `@pypi_base` to set common packages required by all
1716
- steps and use `@pypi` to specify step-specific overrides.
1717
-
1718
- Parameters
1719
- ----------
1720
- packages : Dict[str, str], default: {}
1721
- Packages to use for this flow. The key is the name of the package
1722
- and the value is the version to use.
1723
- python : str, optional, default: None
1724
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1725
- that the version used will correspond to the version of the Python interpreter used to start the run.
1726
- """
1727
- ...
1728
-
1729
- @typing.overload
1730
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1731
- ...
1732
-
1733
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1734
- """
1735
- Specifies the PyPI packages for all steps of the flow.
1736
-
1737
- Use `@pypi_base` to set common packages required by all
1738
- steps and use `@pypi` to specify step-specific overrides.
1567
+ latest,
1568
+ "test-checkpoints"
1569
+ )
1739
1570
 
1740
- Parameters
1571
+ task = Task("TorchTuneFlow/8484/train/53673")
1572
+ with artifact_store_from(run=run, config={
1573
+ "client_params": {
1574
+ "aws_access_key_id": os.environ.get("MY_CUSTOM_ACCESS_KEY"),
1575
+ "aws_secret_access_key": os.environ.get("MY_CUSTOM_SECRET_KEY"),
1576
+ },
1577
+ }):
1578
+ load_model(
1579
+ task.data.model_ref,
1580
+ "test-models"
1581
+ )
1582
+ ```
1583
+ Parameters:
1741
1584
  ----------
1742
- packages : Dict[str, str], default: {}
1743
- Packages to use for this flow. The key is the name of the package
1744
- and the value is the version to use.
1745
- python : str, optional, default: None
1746
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1747
- that the version used will correspond to the version of the Python interpreter used to start the run.
1585
+
1586
+ type: str
1587
+ The type of the datastore. Can be one of 's3', 'gcs', 'azure' or any other supported metaflow Datastore.
1588
+
1589
+ config: dict or Callable
1590
+ Dictionary of configuration options for the datastore. The following keys are required:
1591
+ - root: The root path in the datastore where the data will be saved. (needs to be in the format expected by the datastore)
1592
+ - example: 's3://bucket-name/path/to/root'
1593
+ - example: 'gs://bucket-name/path/to/root'
1594
+ - example: 'https://myblockacc.blob.core.windows.net/metaflow/'
1595
+ - role_arn (optional): AWS IAM role to access s3 bucket (only when `type` is 's3')
1596
+ - session_vars (optional): AWS session variables to access s3 bucket (only when `type` is 's3')
1597
+ - client_params (optional): AWS client parameters to access s3 bucket (only when `type` is 's3')
1748
1598
  """
1749
1599
  ...
1750
1600
 
@@ -1791,6 +1641,57 @@ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, expone
1791
1641
  """
1792
1642
  ...
1793
1643
 
1644
+ @typing.overload
1645
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1646
+ """
1647
+ Specifies the times when the flow should be run when running on a
1648
+ production scheduler.
1649
+
1650
+
1651
+ Parameters
1652
+ ----------
1653
+ hourly : bool, default False
1654
+ Run the workflow hourly.
1655
+ daily : bool, default True
1656
+ Run the workflow daily.
1657
+ weekly : bool, default False
1658
+ Run the workflow weekly.
1659
+ cron : str, optional, default None
1660
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1661
+ specified by this expression.
1662
+ timezone : str, optional, default None
1663
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1664
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1665
+ """
1666
+ ...
1667
+
1668
+ @typing.overload
1669
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1670
+ ...
1671
+
1672
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1673
+ """
1674
+ Specifies the times when the flow should be run when running on a
1675
+ production scheduler.
1676
+
1677
+
1678
+ Parameters
1679
+ ----------
1680
+ hourly : bool, default False
1681
+ Run the workflow hourly.
1682
+ daily : bool, default True
1683
+ Run the workflow daily.
1684
+ weekly : bool, default False
1685
+ Run the workflow weekly.
1686
+ cron : str, optional, default None
1687
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1688
+ specified by this expression.
1689
+ timezone : str, optional, default None
1690
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1691
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1692
+ """
1693
+ ...
1694
+
1794
1695
  @typing.overload
1795
1696
  def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1796
1697
  """
@@ -1842,6 +1743,140 @@ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packa
1842
1743
  """
1843
1744
  ...
1844
1745
 
1746
+ @typing.overload
1747
+ def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1748
+ """
1749
+ Specifies the event(s) that this flow depends on.
1750
+
1751
+ ```
1752
+ @trigger(event='foo')
1753
+ ```
1754
+ or
1755
+ ```
1756
+ @trigger(events=['foo', 'bar'])
1757
+ ```
1758
+
1759
+ Additionally, you can specify the parameter mappings
1760
+ to map event payload to Metaflow parameters for the flow.
1761
+ ```
1762
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1763
+ ```
1764
+ or
1765
+ ```
1766
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1767
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1768
+ ```
1769
+
1770
+ 'parameters' can also be a list of strings and tuples like so:
1771
+ ```
1772
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1773
+ ```
1774
+ This is equivalent to:
1775
+ ```
1776
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1777
+ ```
1778
+
1779
+
1780
+ Parameters
1781
+ ----------
1782
+ event : Union[str, Dict[str, Any]], optional, default None
1783
+ Event dependency for this flow.
1784
+ events : List[Union[str, Dict[str, Any]]], default []
1785
+ Events dependency for this flow.
1786
+ options : Dict[str, Any], default {}
1787
+ Backend-specific configuration for tuning eventing behavior.
1788
+ """
1789
+ ...
1790
+
1791
+ @typing.overload
1792
+ def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1793
+ ...
1794
+
1795
+ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1796
+ """
1797
+ Specifies the event(s) that this flow depends on.
1798
+
1799
+ ```
1800
+ @trigger(event='foo')
1801
+ ```
1802
+ or
1803
+ ```
1804
+ @trigger(events=['foo', 'bar'])
1805
+ ```
1806
+
1807
+ Additionally, you can specify the parameter mappings
1808
+ to map event payload to Metaflow parameters for the flow.
1809
+ ```
1810
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1811
+ ```
1812
+ or
1813
+ ```
1814
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1815
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1816
+ ```
1817
+
1818
+ 'parameters' can also be a list of strings and tuples like so:
1819
+ ```
1820
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1821
+ ```
1822
+ This is equivalent to:
1823
+ ```
1824
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1825
+ ```
1826
+
1827
+
1828
+ Parameters
1829
+ ----------
1830
+ event : Union[str, Dict[str, Any]], optional, default None
1831
+ Event dependency for this flow.
1832
+ events : List[Union[str, Dict[str, Any]]], default []
1833
+ Events dependency for this flow.
1834
+ options : Dict[str, Any], default {}
1835
+ Backend-specific configuration for tuning eventing behavior.
1836
+ """
1837
+ ...
1838
+
1839
+ @typing.overload
1840
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1841
+ """
1842
+ Specifies the PyPI packages for all steps of the flow.
1843
+
1844
+ Use `@pypi_base` to set common packages required by all
1845
+ steps and use `@pypi` to specify step-specific overrides.
1846
+
1847
+ Parameters
1848
+ ----------
1849
+ packages : Dict[str, str], default: {}
1850
+ Packages to use for this flow. The key is the name of the package
1851
+ and the value is the version to use.
1852
+ python : str, optional, default: None
1853
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1854
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1855
+ """
1856
+ ...
1857
+
1858
+ @typing.overload
1859
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1860
+ ...
1861
+
1862
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1863
+ """
1864
+ Specifies the PyPI packages for all steps of the flow.
1865
+
1866
+ Use `@pypi_base` to set common packages required by all
1867
+ steps and use `@pypi` to specify step-specific overrides.
1868
+
1869
+ Parameters
1870
+ ----------
1871
+ packages : Dict[str, str], default: {}
1872
+ Packages to use for this flow. The key is the name of the package
1873
+ and the value is the version to use.
1874
+ python : str, optional, default: None
1875
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1876
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1877
+ """
1878
+ ...
1879
+
1845
1880
  def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1846
1881
  """
1847
1882
  The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
@@ -1885,40 +1920,5 @@ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str,
1885
1920
  """
1886
1921
  ...
1887
1922
 
1888
- def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1889
- """
1890
- Specifies what flows belong to the same project.
1891
-
1892
- A project-specific namespace is created for all flows that
1893
- use the same `@project(name)`.
1894
-
1895
-
1896
- Parameters
1897
- ----------
1898
- name : str
1899
- Project name. Make sure that the name is unique amongst all
1900
- projects that use the same production scheduler. The name may
1901
- contain only lowercase alphanumeric characters and underscores.
1902
-
1903
- branch : Optional[str], default None
1904
- The branch to use. If not specified, the branch is set to
1905
- `user.<username>` unless `production` is set to `True`. This can
1906
- also be set on the command line using `--branch` as a top-level option.
1907
- It is an error to specify `branch` in the decorator and on the command line.
1908
-
1909
- production : bool, default False
1910
- Whether or not the branch is the production branch. This can also be set on the
1911
- command line using `--production` as a top-level option. It is an error to specify
1912
- `production` in the decorator and on the command line.
1913
- The project branch name will be:
1914
- - if `branch` is specified:
1915
- - if `production` is True: `prod.<branch>`
1916
- - if `production` is False: `test.<branch>`
1917
- - if `branch` is not specified:
1918
- - if `production` is True: `prod`
1919
- - if `production` is False: `user.<username>`
1920
- """
1921
- ...
1922
-
1923
1923
  pkg_name: str
1924
1924