ob-metaflow-stubs 6.0.10.16__py2.py3-none-any.whl → 6.0.10.17__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (266) hide show
  1. metaflow-stubs/__init__.pyi +1140 -1140
  2. metaflow-stubs/cards.pyi +1 -1
  3. metaflow-stubs/cli.pyi +1 -1
  4. metaflow-stubs/cli_components/__init__.pyi +1 -1
  5. metaflow-stubs/cli_components/utils.pyi +1 -1
  6. metaflow-stubs/client/__init__.pyi +1 -1
  7. metaflow-stubs/client/core.pyi +5 -5
  8. metaflow-stubs/client/filecache.pyi +2 -2
  9. metaflow-stubs/events.pyi +1 -1
  10. metaflow-stubs/exception.pyi +1 -1
  11. metaflow-stubs/flowspec.pyi +4 -4
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +3 -3
  14. metaflow-stubs/meta_files.pyi +1 -1
  15. metaflow-stubs/metadata_provider/__init__.pyi +1 -1
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +1 -1
  17. metaflow-stubs/metadata_provider/metadata.pyi +1 -1
  18. metaflow-stubs/metadata_provider/util.pyi +1 -1
  19. metaflow-stubs/metaflow_config.pyi +1 -1
  20. metaflow-stubs/metaflow_current.pyi +46 -46
  21. metaflow-stubs/metaflow_git.pyi +1 -1
  22. metaflow-stubs/mf_extensions/__init__.pyi +1 -1
  23. metaflow-stubs/mf_extensions/obcheckpoint/__init__.pyi +1 -1
  24. metaflow-stubs/mf_extensions/obcheckpoint/plugins/__init__.pyi +1 -1
  25. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/__init__.pyi +1 -1
  26. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/__init__.pyi +1 -1
  27. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/async_cards.pyi +1 -1
  28. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/deco_injection_mixin.pyi +1 -1
  29. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/extra_components.pyi +2 -2
  30. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/__init__.pyi +1 -1
  31. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/__init__.pyi +1 -1
  32. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/checkpoint_lister.pyi +3 -3
  33. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/lineage_card.pyi +1 -1
  34. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/checkpoint_storage.pyi +2 -2
  35. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/constructors.pyi +1 -1
  36. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/core.pyi +3 -3
  37. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/decorator.pyi +3 -3
  38. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/exceptions.pyi +1 -1
  39. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/final_api.pyi +1 -1
  40. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/lineage.pyi +1 -1
  41. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/__init__.pyi +1 -1
  42. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/context.pyi +2 -2
  43. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/core.pyi +1 -1
  44. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/decorator.pyi +1 -1
  45. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/exceptions.pyi +1 -1
  46. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/task_utils.pyi +1 -1
  47. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/utils.pyi +1 -1
  48. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastructures.pyi +2 -2
  49. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/exceptions.pyi +1 -1
  50. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/__init__.pyi +1 -1
  51. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/cards/__init__.pyi +1 -1
  52. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/cards/hf_hub_card.pyi +2 -2
  53. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/decorator.pyi +1 -1
  54. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/__init__.pyi +1 -1
  55. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/core.pyi +1 -1
  56. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/exceptions.pyi +1 -1
  57. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/model_storage.pyi +2 -2
  58. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/__init__.pyi +1 -1
  59. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/flowspec_utils.pyi +1 -1
  60. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/general.pyi +1 -1
  61. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/identity_utils.pyi +1 -1
  62. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/__init__.pyi +1 -1
  63. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/base.pyi +1 -1
  64. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/tar.pyi +1 -1
  65. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/tar_utils.pyi +1 -1
  66. metaflow-stubs/mf_extensions/outerbounds/__init__.pyi +1 -1
  67. metaflow-stubs/mf_extensions/outerbounds/plugins/__init__.pyi +1 -1
  68. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/__init__.pyi +1 -1
  69. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/__init__.pyi +1 -1
  70. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_state_machine.pyi +1 -1
  71. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/__init__.pyi +1 -1
  72. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/spinner/__init__.pyi +1 -1
  73. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/spinner/spinners.pyi +1 -1
  74. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/app_cli.pyi +1 -1
  75. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/app_config.pyi +2 -2
  76. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/capsule.pyi +3 -3
  77. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/click_importer.pyi +1 -1
  78. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/code_package/__init__.pyi +1 -1
  79. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/code_package/code_packager.pyi +2 -2
  80. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/__init__.pyi +1 -1
  81. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/cli_generator.pyi +1 -1
  82. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/config_utils.pyi +3 -3
  83. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/schema_export.pyi +1 -1
  84. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/typed_configs.pyi +2 -2
  85. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/unified_config.pyi +3 -3
  86. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/dependencies.pyi +2 -2
  87. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/deployer.pyi +2 -2
  88. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/experimental/__init__.pyi +1 -1
  89. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/perimeters.pyi +1 -1
  90. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/utils.pyi +3 -3
  91. metaflow-stubs/mf_extensions/outerbounds/plugins/aws/__init__.pyi +1 -1
  92. metaflow-stubs/mf_extensions/outerbounds/plugins/aws/assume_role_decorator.pyi +2 -2
  93. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/__init__.pyi +1 -1
  94. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/async_cards.pyi +1 -1
  95. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/injector.pyi +1 -1
  96. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/__init__.pyi +1 -1
  97. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/coreweave.pyi +3 -3
  98. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/nebius.pyi +3 -3
  99. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/__init__.pyi +1 -1
  100. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/baker.pyi +3 -3
  101. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/docker_environment.pyi +1 -1
  102. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/fast_bakery.pyi +1 -1
  103. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/__init__.pyi +1 -1
  104. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/pod_killer.pyi +1 -1
  105. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/__init__.pyi +1 -1
  106. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/constants.pyi +1 -1
  107. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/exceptions.pyi +1 -1
  108. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/ollama.pyi +1 -1
  109. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/status_card.pyi +1 -1
  110. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/__init__.pyi +1 -1
  111. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/snowflake.pyi +1 -1
  112. metaflow-stubs/mf_extensions/outerbounds/profilers/__init__.pyi +1 -1
  113. metaflow-stubs/mf_extensions/outerbounds/profilers/gpu.pyi +1 -1
  114. metaflow-stubs/mf_extensions/outerbounds/remote_config.pyi +2 -2
  115. metaflow-stubs/mf_extensions/outerbounds/toplevel/__init__.pyi +1 -1
  116. metaflow-stubs/mf_extensions/outerbounds/toplevel/global_aliases_for_metaflow_package.pyi +1 -1
  117. metaflow-stubs/mf_extensions/outerbounds/toplevel/s3_proxy.pyi +1 -1
  118. metaflow-stubs/multicore_utils.pyi +1 -1
  119. metaflow-stubs/ob_internal.pyi +1 -1
  120. metaflow-stubs/packaging_sys/__init__.pyi +4 -4
  121. metaflow-stubs/packaging_sys/backend.pyi +3 -3
  122. metaflow-stubs/packaging_sys/distribution_support.pyi +3 -3
  123. metaflow-stubs/packaging_sys/tar_backend.pyi +4 -4
  124. metaflow-stubs/packaging_sys/utils.pyi +1 -1
  125. metaflow-stubs/packaging_sys/v1.pyi +2 -2
  126. metaflow-stubs/parameters.pyi +3 -3
  127. metaflow-stubs/plugins/__init__.pyi +13 -13
  128. metaflow-stubs/plugins/airflow/__init__.pyi +1 -1
  129. metaflow-stubs/plugins/airflow/airflow_utils.pyi +1 -1
  130. metaflow-stubs/plugins/airflow/exception.pyi +1 -1
  131. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +1 -1
  132. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +1 -1
  133. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +1 -1
  134. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +1 -1
  135. metaflow-stubs/plugins/argo/__init__.pyi +1 -1
  136. metaflow-stubs/plugins/argo/argo_client.pyi +1 -1
  137. metaflow-stubs/plugins/argo/argo_events.pyi +1 -1
  138. metaflow-stubs/plugins/argo/argo_workflows.pyi +1 -1
  139. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +2 -2
  140. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +2 -2
  141. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +2 -2
  142. metaflow-stubs/plugins/argo/exit_hooks.pyi +2 -2
  143. metaflow-stubs/plugins/aws/__init__.pyi +1 -1
  144. metaflow-stubs/plugins/aws/aws_client.pyi +1 -1
  145. metaflow-stubs/plugins/aws/aws_utils.pyi +1 -1
  146. metaflow-stubs/plugins/aws/batch/__init__.pyi +1 -1
  147. metaflow-stubs/plugins/aws/batch/batch.pyi +1 -1
  148. metaflow-stubs/plugins/aws/batch/batch_client.pyi +1 -1
  149. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +1 -1
  150. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +1 -1
  151. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +3 -3
  152. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +1 -1
  153. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +1 -1
  154. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +1 -1
  155. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +1 -1
  156. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +1 -1
  157. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +3 -3
  158. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +1 -1
  159. metaflow-stubs/plugins/azure/__init__.pyi +1 -1
  160. metaflow-stubs/plugins/azure/azure_credential.pyi +1 -1
  161. metaflow-stubs/plugins/azure/azure_exceptions.pyi +1 -1
  162. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +3 -3
  163. metaflow-stubs/plugins/azure/azure_utils.pyi +1 -1
  164. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +1 -1
  165. metaflow-stubs/plugins/azure/includefile_support.pyi +1 -1
  166. metaflow-stubs/plugins/cards/__init__.pyi +1 -1
  167. metaflow-stubs/plugins/cards/card_client.pyi +1 -1
  168. metaflow-stubs/plugins/cards/card_creator.pyi +1 -1
  169. metaflow-stubs/plugins/cards/card_datastore.pyi +1 -1
  170. metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
  171. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +1 -1
  172. metaflow-stubs/plugins/cards/card_modules/basic.pyi +1 -1
  173. metaflow-stubs/plugins/cards/card_modules/card.pyi +1 -1
  174. metaflow-stubs/plugins/cards/card_modules/components.pyi +3 -3
  175. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +1 -1
  176. metaflow-stubs/plugins/cards/card_modules/json_viewer.pyi +2 -2
  177. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +1 -1
  178. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +1 -1
  179. metaflow-stubs/plugins/cards/card_resolver.pyi +1 -1
  180. metaflow-stubs/plugins/cards/component_serializer.pyi +1 -1
  181. metaflow-stubs/plugins/cards/exception.pyi +1 -1
  182. metaflow-stubs/plugins/catch_decorator.pyi +2 -2
  183. metaflow-stubs/plugins/datatools/__init__.pyi +1 -1
  184. metaflow-stubs/plugins/datatools/local.pyi +1 -1
  185. metaflow-stubs/plugins/datatools/s3/__init__.pyi +1 -1
  186. metaflow-stubs/plugins/datatools/s3/s3.pyi +3 -3
  187. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +1 -1
  188. metaflow-stubs/plugins/datatools/s3/s3util.pyi +1 -1
  189. metaflow-stubs/plugins/debug_logger.pyi +1 -1
  190. metaflow-stubs/plugins/debug_monitor.pyi +1 -1
  191. metaflow-stubs/plugins/environment_decorator.pyi +1 -1
  192. metaflow-stubs/plugins/events_decorator.pyi +1 -1
  193. metaflow-stubs/plugins/exit_hook/__init__.pyi +1 -1
  194. metaflow-stubs/plugins/exit_hook/exit_hook_decorator.pyi +1 -1
  195. metaflow-stubs/plugins/frameworks/__init__.pyi +1 -1
  196. metaflow-stubs/plugins/frameworks/pytorch.pyi +1 -1
  197. metaflow-stubs/plugins/gcp/__init__.pyi +1 -1
  198. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +3 -3
  199. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +1 -1
  200. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +1 -1
  201. metaflow-stubs/plugins/gcp/gs_utils.pyi +1 -1
  202. metaflow-stubs/plugins/gcp/includefile_support.pyi +1 -1
  203. metaflow-stubs/plugins/kubernetes/__init__.pyi +1 -1
  204. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +1 -1
  205. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +1 -1
  206. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +1 -1
  207. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +1 -1
  208. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +1 -1
  209. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +1 -1
  210. metaflow-stubs/plugins/ollama/__init__.pyi +1 -1
  211. metaflow-stubs/plugins/optuna/__init__.pyi +1 -1
  212. metaflow-stubs/plugins/parallel_decorator.pyi +1 -1
  213. metaflow-stubs/plugins/parsers.pyi +1 -1
  214. metaflow-stubs/plugins/perimeters.pyi +1 -1
  215. metaflow-stubs/plugins/project_decorator.pyi +1 -1
  216. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  217. metaflow-stubs/plugins/pypi/conda_decorator.pyi +1 -1
  218. metaflow-stubs/plugins/pypi/conda_environment.pyi +3 -3
  219. metaflow-stubs/plugins/pypi/parsers.pyi +1 -1
  220. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +1 -1
  221. metaflow-stubs/plugins/pypi/pypi_environment.pyi +1 -1
  222. metaflow-stubs/plugins/pypi/utils.pyi +1 -1
  223. metaflow-stubs/plugins/resources_decorator.pyi +1 -1
  224. metaflow-stubs/plugins/retry_decorator.pyi +1 -1
  225. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  226. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +1 -1
  227. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +1 -1
  228. metaflow-stubs/plugins/secrets/secrets_func.pyi +1 -1
  229. metaflow-stubs/plugins/secrets/secrets_spec.pyi +1 -1
  230. metaflow-stubs/plugins/secrets/utils.pyi +1 -1
  231. metaflow-stubs/plugins/snowflake/__init__.pyi +1 -1
  232. metaflow-stubs/plugins/storage_executor.pyi +1 -1
  233. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +2 -2
  234. metaflow-stubs/plugins/timeout_decorator.pyi +2 -2
  235. metaflow-stubs/plugins/torchtune/__init__.pyi +1 -1
  236. metaflow-stubs/plugins/uv/__init__.pyi +1 -1
  237. metaflow-stubs/plugins/uv/uv_environment.pyi +1 -1
  238. metaflow-stubs/profilers/__init__.pyi +1 -1
  239. metaflow-stubs/pylint_wrapper.pyi +1 -1
  240. metaflow-stubs/runner/__init__.pyi +1 -1
  241. metaflow-stubs/runner/deployer.pyi +34 -34
  242. metaflow-stubs/runner/deployer_impl.pyi +2 -2
  243. metaflow-stubs/runner/metaflow_runner.pyi +3 -3
  244. metaflow-stubs/runner/nbdeploy.pyi +1 -1
  245. metaflow-stubs/runner/nbrun.pyi +1 -1
  246. metaflow-stubs/runner/subprocess_manager.pyi +1 -1
  247. metaflow-stubs/runner/utils.pyi +3 -3
  248. metaflow-stubs/system/__init__.pyi +1 -1
  249. metaflow-stubs/system/system_logger.pyi +2 -2
  250. metaflow-stubs/system/system_monitor.pyi +1 -1
  251. metaflow-stubs/tagging_util.pyi +1 -1
  252. metaflow-stubs/tuple_util.pyi +1 -1
  253. metaflow-stubs/user_configs/__init__.pyi +1 -1
  254. metaflow-stubs/user_configs/config_options.pyi +2 -2
  255. metaflow-stubs/user_configs/config_parameters.pyi +5 -5
  256. metaflow-stubs/user_decorators/__init__.pyi +1 -1
  257. metaflow-stubs/user_decorators/common.pyi +1 -1
  258. metaflow-stubs/user_decorators/mutable_flow.pyi +3 -3
  259. metaflow-stubs/user_decorators/mutable_step.pyi +4 -4
  260. metaflow-stubs/user_decorators/user_flow_decorator.pyi +4 -4
  261. metaflow-stubs/user_decorators/user_step_decorator.pyi +5 -5
  262. {ob_metaflow_stubs-6.0.10.16.dist-info → ob_metaflow_stubs-6.0.10.17.dist-info}/METADATA +1 -1
  263. ob_metaflow_stubs-6.0.10.17.dist-info/RECORD +266 -0
  264. ob_metaflow_stubs-6.0.10.16.dist-info/RECORD +0 -266
  265. {ob_metaflow_stubs-6.0.10.16.dist-info → ob_metaflow_stubs-6.0.10.17.dist-info}/WHEEL +0 -0
  266. {ob_metaflow_stubs-6.0.10.16.dist-info → ob_metaflow_stubs-6.0.10.17.dist-info}/top_level.txt +0 -0
@@ -1,15 +1,15 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
3
  # MF version: 2.18.11.1+obcheckpoint(0.2.8);ob(v1) #
4
- # Generated on 2025-10-13T07:07:26.927215 #
4
+ # Generated on 2025-10-13T21:06:57.979951 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import datetime
12
11
  import typing
12
+ import datetime
13
13
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
14
14
  StepFlag = typing.NewType("StepFlag", bool)
15
15
 
@@ -39,9 +39,9 @@ from .user_decorators.user_step_decorator import UserStepDecorator as UserStepDe
39
39
  from .user_decorators.user_step_decorator import StepMutator as StepMutator
40
40
  from .user_decorators.user_step_decorator import user_step_decorator as user_step_decorator
41
41
  from .user_decorators.user_flow_decorator import FlowMutator as FlowMutator
42
- from . import cards as cards
43
- from . import tuple_util as tuple_util
44
42
  from . import metaflow_git as metaflow_git
43
+ from . import tuple_util as tuple_util
44
+ from . import cards as cards
45
45
  from . import events as events
46
46
  from . import runner as runner
47
47
  from . import plugins as plugins
@@ -49,8 +49,8 @@ from .mf_extensions.outerbounds.toplevel.global_aliases_for_metaflow_package imp
49
49
  from . import includefile as includefile
50
50
  from .includefile import IncludeFile as IncludeFile
51
51
  from .plugins.parsers import yaml_parser as yaml_parser
52
- from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
53
52
  from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
53
+ from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
54
54
  from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
55
55
  from . import client as client
56
56
  from .client.core import namespace as namespace
@@ -169,101 +169,64 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
169
169
  """
170
170
  ...
171
171
 
172
- def vllm(*, model: str, backend: str, openai_api_server: bool, debug: bool, card_refresh_interval: int, max_retries: int, retry_alert_frequency: int, engine_args: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
172
+ @typing.overload
173
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
173
174
  """
174
- This decorator is used to run vllm APIs as Metaflow task sidecars.
175
-
176
- User code call
177
- --------------
178
- @vllm(
179
- model="...",
180
- ...
181
- )
182
-
183
- Valid backend options
184
- ---------------------
185
- - 'local': Run as a separate process on the local task machine.
186
-
187
- Valid model options
188
- -------------------
189
- Any HuggingFace model identifier, e.g. 'meta-llama/Llama-3.2-1B'
175
+ Specifies the PyPI packages for the step.
190
176
 
191
- NOTE: vLLM's OpenAI-compatible server serves ONE model per server instance.
192
- If you need multiple models, you must create multiple @vllm decorators.
177
+ Information in this decorator will augment any
178
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
179
+ you can use `@pypi_base` to set packages required by all
180
+ steps and use `@pypi` to specify step-specific overrides.
193
181
 
194
182
 
195
183
  Parameters
196
184
  ----------
197
- model: str
198
- HuggingFace model identifier to be served by vLLM.
199
- backend: str
200
- Determines where and how to run the vLLM process.
201
- openai_api_server: bool
202
- Whether to use OpenAI-compatible API server mode (subprocess) instead of native engine.
203
- Default is False (uses native engine).
204
- Set to True for backward compatibility with existing code.
205
- debug: bool
206
- Whether to turn on verbose debugging logs.
207
- card_refresh_interval: int
208
- Interval in seconds for refreshing the vLLM status card.
209
- Only used when openai_api_server=True.
210
- max_retries: int
211
- Maximum number of retries checking for vLLM server startup.
212
- Only used when openai_api_server=True.
213
- retry_alert_frequency: int
214
- Frequency of alert logs for vLLM server startup retries.
215
- Only used when openai_api_server=True.
216
- engine_args : dict
217
- Additional keyword arguments to pass to the vLLM engine.
218
- For example, `tensor_parallel_size=2`.
185
+ packages : Dict[str, str], default: {}
186
+ Packages to use for this step. The key is the name of the package
187
+ and the value is the version to use.
188
+ python : str, optional, default: None
189
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
190
+ that the version used will correspond to the version of the Python interpreter used to start the run.
219
191
  """
220
192
  ...
221
193
 
222
- def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy: str, force_cache_update: bool, debug: bool, circuit_breaker_config: dict, timeout_config: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
194
+ @typing.overload
195
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
196
+ ...
197
+
198
+ @typing.overload
199
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
200
+ ...
201
+
202
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
223
203
  """
224
- This decorator is used to run Ollama APIs as Metaflow task sidecars.
225
-
226
- User code call
227
- --------------
228
- @ollama(
229
- models=[...],
230
- ...
231
- )
232
-
233
- Valid backend options
234
- ---------------------
235
- - 'local': Run as a separate process on the local task machine.
236
- - (TODO) 'managed': Outerbounds hosts and selects compute provider.
237
- - (TODO) 'remote': Spin up separate instance to serve Ollama models.
204
+ Specifies the PyPI packages for the step.
238
205
 
239
- Valid model options
240
- -------------------
241
- Any model here https://ollama.com/search, e.g. 'llama3.2', 'llama3.3'
206
+ Information in this decorator will augment any
207
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
208
+ you can use `@pypi_base` to set packages required by all
209
+ steps and use `@pypi` to specify step-specific overrides.
242
210
 
243
211
 
244
212
  Parameters
245
213
  ----------
246
- models: list[str]
247
- List of Ollama containers running models in sidecars.
248
- backend: str
249
- Determines where and how to run the Ollama process.
250
- force_pull: bool
251
- Whether to run `ollama pull` no matter what, or first check the remote cache in Metaflow datastore for this model key.
252
- cache_update_policy: str
253
- Cache update policy: "auto", "force", or "never".
254
- force_cache_update: bool
255
- Simple override for "force" cache update policy.
256
- debug: bool
257
- Whether to turn on verbose debugging logs.
258
- circuit_breaker_config: dict
259
- Configuration for circuit breaker protection. Keys: failure_threshold, recovery_timeout, reset_timeout.
260
- timeout_config: dict
261
- Configuration for various operation timeouts. Keys: pull, stop, health_check, install, server_startup.
214
+ packages : Dict[str, str], default: {}
215
+ Packages to use for this step. The key is the name of the package
216
+ and the value is the version to use.
217
+ python : str, optional, default: None
218
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
219
+ that the version used will correspond to the version of the Python interpreter used to start the run.
262
220
  """
263
221
  ...
264
222
 
265
- def s3_proxy(*, integration_name: typing.Optional[str] = None, write_mode: typing.Optional[str] = None, debug: typing.Optional[bool] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
223
+ def nebius_s3_proxy(*, integration_name: typing.Optional[str] = None, write_mode: typing.Optional[str] = None, debug: typing.Optional[bool] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
266
224
  """
225
+ `@nebius_s3_proxy` is a Nebius-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
226
+ It exists to make it easier for users to know that this decorator should only be used with
227
+ a Neo Cloud like Nebius. The underlying mechanics of the decorator is the same as the `@s3_proxy`:
228
+
229
+
267
230
  Set up an S3 proxy that caches objects in an external, S3‑compatible bucket
268
231
  for S3 read and write requests.
269
232
 
@@ -321,236 +284,365 @@ def s3_proxy(*, integration_name: typing.Optional[str] = None, write_mode: typin
321
284
  """
322
285
  ...
323
286
 
324
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[typing.Dict[str, str]] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
325
- """
326
- Specifies that this step should execute on Kubernetes.
327
-
328
-
329
- Parameters
330
- ----------
331
- cpu : int, default 1
332
- Number of CPUs required for this step. If `@resources` is
333
- also present, the maximum value from all decorators is used.
334
- memory : int, default 4096
335
- Memory size (in MB) required for this step. If
336
- `@resources` is also present, the maximum value from all decorators is
337
- used.
338
- disk : int, default 10240
339
- Disk size (in MB) required for this step. If
340
- `@resources` is also present, the maximum value from all decorators is
341
- used.
342
- image : str, optional, default None
343
- Docker image to use when launching on Kubernetes. If not specified, and
344
- METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
345
- not, a default Docker image mapping to the current version of Python is used.
346
- image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
347
- If given, the imagePullPolicy to be applied to the Docker image of the step.
348
- image_pull_secrets: List[str], default []
349
- The default is extracted from METAFLOW_KUBERNETES_IMAGE_PULL_SECRETS.
350
- Kubernetes image pull secrets to use when pulling container images
351
- in Kubernetes.
352
- service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
353
- Kubernetes service account to use when launching pod in Kubernetes.
354
- secrets : List[str], optional, default None
355
- Kubernetes secrets to use when launching pod in Kubernetes. These
356
- secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
357
- in Metaflow configuration.
358
- node_selector: Union[Dict[str,str], str], optional, default None
359
- Kubernetes node selector(s) to apply to the pod running the task.
360
- Can be passed in as a comma separated string of values e.g.
361
- 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
362
- {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
363
- namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
364
- Kubernetes namespace to use when launching pod in Kubernetes.
365
- gpu : int, optional, default None
366
- Number of GPUs required for this step. A value of zero implies that
367
- the scheduled node should not have GPUs.
368
- gpu_vendor : str, default KUBERNETES_GPU_VENDOR
369
- The vendor of the GPUs to be used for this step.
370
- tolerations : List[Dict[str,str]], default []
371
- The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
372
- Kubernetes tolerations to use when launching pod in Kubernetes.
373
- labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
374
- Kubernetes labels to use when launching pod in Kubernetes.
375
- annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
376
- Kubernetes annotations to use when launching pod in Kubernetes.
377
- use_tmpfs : bool, default False
378
- This enables an explicit tmpfs mount for this step.
379
- tmpfs_tempdir : bool, default True
380
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
381
- tmpfs_size : int, optional, default: None
382
- The value for the size (in MiB) of the tmpfs mount for this step.
383
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
384
- memory allocated for this step.
385
- tmpfs_path : str, optional, default /metaflow_temp
386
- Path to tmpfs mount for this step.
387
- persistent_volume_claims : Dict[str, str], optional, default None
388
- A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
389
- volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
390
- shared_memory: int, optional
391
- Shared memory size (in MiB) required for this step
392
- port: int, optional
393
- Port number to specify in the Kubernetes job object
394
- compute_pool : str, optional, default None
395
- Compute pool to be used for for this step.
396
- If not specified, any accessible compute pool within the perimeter is used.
397
- hostname_resolution_timeout: int, default 10 * 60
398
- Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
399
- Only applicable when @parallel is used.
400
- qos: str, default: Burstable
401
- Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
402
-
403
- security_context: Dict[str, Any], optional, default None
404
- Container security context. Applies to the task container. Allows the following keys:
405
- - privileged: bool, optional, default None
406
- - allow_privilege_escalation: bool, optional, default None
407
- - run_as_user: int, optional, default None
408
- - run_as_group: int, optional, default None
409
- - run_as_non_root: bool, optional, default None
410
- """
411
- ...
412
-
413
- @typing.overload
414
- def app_deploy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
415
- """
416
- Decorator prototype for all step decorators. This function gets specialized
417
- and imported for all decorators types by _import_plugin_decorators().
418
- """
419
- ...
420
-
421
287
  @typing.overload
422
- def app_deploy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
423
- ...
424
-
425
- def app_deploy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
426
- """
427
- Decorator prototype for all step decorators. This function gets specialized
428
- and imported for all decorators types by _import_plugin_decorators().
429
- """
430
- ...
431
-
432
- def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, cache_scope: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
288
+ def checkpoint(*, load_policy: str = 'fresh', temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
433
289
  """
434
- Decorator that helps cache, version, and store models/datasets from the Hugging Face Hub.
290
+ Enables checkpointing for a step.
435
291
 
436
- Examples
437
- --------
292
+ > Examples
293
+
294
+ - Saving Checkpoints
438
295
 
439
296
  ```python
440
- # **Usage: creating references to models from the Hugging Face Hub that may be loaded in downstream steps**
441
- @huggingface_hub
297
+ @checkpoint
442
298
  @step
443
- def pull_model_from_huggingface(self):
444
- # `current.huggingface_hub.snapshot_download` downloads the model from the Hugging Face Hub
445
- # and saves it in the backend storage based on the model's `repo_id`. If there exists a model
446
- # with the same `repo_id` in the backend storage, it will not download the model again. The return
447
- # value of the function is a reference to the model in the backend storage.
448
- # This reference can be used to load the model in the subsequent steps via `@model(load=["llama_model"])`
449
-
450
- self.model_id = "mistralai/Mistral-7B-Instruct-v0.1"
451
- self.llama_model = current.huggingface_hub.snapshot_download(
452
- repo_id=self.model_id,
453
- allow_patterns=["*.safetensors", "*.json", "tokenizer.*"],
454
- )
455
- self.next(self.train)
456
-
457
- # **Usage: explicitly loading models at runtime from the Hugging Face Hub or from cache (from Metaflow's datastore)**
458
- @huggingface_hub
299
+ def train(self):
300
+ model = create_model(self.parameters, checkpoint_path = None)
301
+ for i in range(self.epochs):
302
+ # some training logic
303
+ loss = model.train(self.dataset)
304
+ if i % 10 == 0:
305
+ model.save(
306
+ current.checkpoint.directory,
307
+ )
308
+ # saves the contents of the `current.checkpoint.directory` as a checkpoint
309
+ # and returns a reference dictionary to the checkpoint saved in the datastore
310
+ self.latest_checkpoint = current.checkpoint.save(
311
+ name="epoch_checkpoint",
312
+ metadata={
313
+ "epoch": i,
314
+ "loss": loss,
315
+ }
316
+ )
317
+ ```
318
+
319
+ - Using Loaded Checkpoints
320
+
321
+ ```python
322
+ @retry(times=3)
323
+ @checkpoint
459
324
  @step
460
- def run_training(self):
461
- # Temporary directory (auto-cleaned on exit)
462
- with current.huggingface_hub.load(
463
- repo_id="google-bert/bert-base-uncased",
464
- allow_patterns=["*.bin"],
465
- ) as local_path:
466
- # Use files under local_path
467
- train_model(local_path)
325
+ def train(self):
326
+ # Assume that the task has restarted and the previous attempt of the task
327
+ # saved a checkpoint
328
+ checkpoint_path = None
329
+ if current.checkpoint.is_loaded: # Check if a checkpoint is loaded
330
+ print("Loaded checkpoint from the previous attempt")
331
+ checkpoint_path = current.checkpoint.directory
332
+
333
+ model = create_model(self.parameters, checkpoint_path = checkpoint_path)
334
+ for i in range(self.epochs):
468
335
  ...
336
+ ```
469
337
 
470
- # **Usage: loading models directly from the Hugging Face Hub or from cache (from Metaflow's datastore)**
471
338
 
472
- @huggingface_hub(load=["mistralai/Mistral-7B-Instruct-v0.1"])
473
- @step
474
- def pull_model_from_huggingface(self):
475
- path_to_model = current.huggingface_hub.loaded["mistralai/Mistral-7B-Instruct-v0.1"]
339
+ Parameters
340
+ ----------
341
+ load_policy : str, default: "fresh"
342
+ The policy for loading the checkpoint. The following policies are supported:
343
+ - "eager": Loads the the latest available checkpoint within the namespace.
344
+ With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
345
+ will be loaded at the start of the task.
346
+ - "none": Do not load any checkpoint
347
+ - "fresh": Loads the lastest checkpoint created within the running Task.
348
+ This mode helps loading checkpoints across various retry attempts of the same task.
349
+ With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
350
+ created within the task will be loaded when the task is retries execution on failure.
476
351
 
477
- @huggingface_hub(load=[("mistralai/Mistral-7B-Instruct-v0.1", "/my-directory"), ("myorg/mistral-lora", "/my-lora-directory")])
352
+ temp_dir_root : str, default: None
353
+ The root directory under which `current.checkpoint.directory` will be created.
354
+ """
355
+ ...
356
+
357
+ @typing.overload
358
+ def checkpoint(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
359
+ ...
360
+
361
+ @typing.overload
362
+ def checkpoint(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
363
+ ...
364
+
365
+ def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load_policy: str = 'fresh', temp_dir_root: str = None):
366
+ """
367
+ Enables checkpointing for a step.
368
+
369
+ > Examples
370
+
371
+ - Saving Checkpoints
372
+
373
+ ```python
374
+ @checkpoint
478
375
  @step
479
- def finetune_model(self):
480
- path_to_model = current.huggingface_hub.loaded["mistralai/Mistral-7B-Instruct-v0.1"]
481
- # path_to_model will be /my-directory
376
+ def train(self):
377
+ model = create_model(self.parameters, checkpoint_path = None)
378
+ for i in range(self.epochs):
379
+ # some training logic
380
+ loss = model.train(self.dataset)
381
+ if i % 10 == 0:
382
+ model.save(
383
+ current.checkpoint.directory,
384
+ )
385
+ # saves the contents of the `current.checkpoint.directory` as a checkpoint
386
+ # and returns a reference dictionary to the checkpoint saved in the datastore
387
+ self.latest_checkpoint = current.checkpoint.save(
388
+ name="epoch_checkpoint",
389
+ metadata={
390
+ "epoch": i,
391
+ "loss": loss,
392
+ }
393
+ )
394
+ ```
482
395
 
396
+ - Using Loaded Checkpoints
483
397
 
484
- # Takes all the arguments passed to `snapshot_download`
485
- # except for `local_dir`
486
- @huggingface_hub(load=[
487
- {
488
- "repo_id": "mistralai/Mistral-7B-Instruct-v0.1",
489
- },
490
- {
491
- "repo_id": "myorg/mistral-lora",
492
- "repo_type": "model",
493
- },
494
- ])
398
+ ```python
399
+ @retry(times=3)
400
+ @checkpoint
495
401
  @step
496
- def finetune_model(self):
497
- path_to_model = current.huggingface_hub.loaded["mistralai/Mistral-7B-Instruct-v0.1"]
498
- # path_to_model will be /my-directory
402
+ def train(self):
403
+ # Assume that the task has restarted and the previous attempt of the task
404
+ # saved a checkpoint
405
+ checkpoint_path = None
406
+ if current.checkpoint.is_loaded: # Check if a checkpoint is loaded
407
+ print("Loaded checkpoint from the previous attempt")
408
+ checkpoint_path = current.checkpoint.directory
409
+
410
+ model = create_model(self.parameters, checkpoint_path = checkpoint_path)
411
+ for i in range(self.epochs):
412
+ ...
499
413
  ```
500
414
 
501
415
 
502
416
  Parameters
503
417
  ----------
504
- temp_dir_root : str, optional
505
- The root directory that will hold the temporary directory where objects will be downloaded.
418
+ load_policy : str, default: "fresh"
419
+ The policy for loading the checkpoint. The following policies are supported:
420
+ - "eager": Loads the the latest available checkpoint within the namespace.
421
+ With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
422
+ will be loaded at the start of the task.
423
+ - "none": Do not load any checkpoint
424
+ - "fresh": Loads the lastest checkpoint created within the running Task.
425
+ This mode helps loading checkpoints across various retry attempts of the same task.
426
+ With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
427
+ created within the task will be loaded when the task is retries execution on failure.
506
428
 
507
- cache_scope : str, optional
508
- The scope of the cache. Can be `checkpoint` / `flow` / `global`.
509
- - `checkpoint` (default): All repos are stored like objects saved by `@checkpoint`.
510
- i.e., the cached path is derived from the namespace, flow, step, and Metaflow foreach iteration.
511
- Any repo downloaded under this scope will only be retrieved from the cache when the step runs under the same namespace in the same flow (at the same foreach index).
429
+ temp_dir_root : str, default: None
430
+ The root directory under which `current.checkpoint.directory` will be created.
431
+ """
432
+ ...
433
+
434
+ @typing.overload
435
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
436
+ """
437
+ Specifies the Conda environment for the step.
512
438
 
513
- - `flow`: All repos are cached under the flow, regardless of namespace.
514
- i.e., the cached path is derived solely from the flow name.
515
- When to use this mode: (1) Multiple users are executing the same flow and want shared access to the repos cached by the decorator. (2) Multiple versions of a flow are deployed, all needing access to the same repos cached by the decorator.
439
+ Information in this decorator will augment any
440
+ attributes set in the `@conda_base` flow-level decorator. Hence,
441
+ you can use `@conda_base` to set packages required by all
442
+ steps and use `@conda` to specify step-specific overrides.
516
443
 
517
- - `global`: All repos are cached under a globally static path.
518
- i.e., the base path of the cache is static and all repos are stored under it.
519
- When to use this mode:
520
- - All repos from the Hugging Face Hub need to be shared by users across all flow executions.
521
- - Each caching scope comes with its own trade-offs:
522
- - `checkpoint`:
523
- - Has explicit control over when caches are populated (controlled by the same flow that has the `@huggingface_hub` decorator) but ends up hitting the Hugging Face Hub more often if there are many users/namespaces/steps.
524
- - Since objects are written on a `namespace/flow/step` basis, the blast radius of a bad checkpoint is limited to a particular flow in a namespace.
525
- - `flow`:
526
- - Has less control over when caches are populated (can be written by any execution instance of a flow from any namespace) but results in more cache hits.
527
- - The blast radius of a bad checkpoint is limited to all runs of a particular flow.
528
- - It doesn't promote cache reuse across flows.
529
- - `global`:
530
- - Has no control over when caches are populated (can be written by any flow execution) but has the highest cache hit rate.
531
- - It promotes cache reuse across flows.
532
- - The blast radius of a bad checkpoint spans every flow that could be using a particular repo.
533
444
 
534
- load: Union[List[str], List[Tuple[Dict, str]], List[Tuple[str, str]], List[Dict], None]
535
- The list of repos (models/datasets) to load.
445
+ Parameters
446
+ ----------
447
+ packages : Dict[str, str], default {}
448
+ Packages to use for this step. The key is the name of the package
449
+ and the value is the version to use.
450
+ libraries : Dict[str, str], default {}
451
+ Supported for backward compatibility. When used with packages, packages will take precedence.
452
+ python : str, optional, default None
453
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
454
+ that the version used will correspond to the version of the Python interpreter used to start the run.
455
+ disabled : bool, default False
456
+ If set to True, disables @conda.
457
+ """
458
+ ...
459
+
460
+ @typing.overload
461
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
462
+ ...
463
+
464
+ @typing.overload
465
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
466
+ ...
467
+
468
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
469
+ """
470
+ Specifies the Conda environment for the step.
536
471
 
537
- Loaded repos can be accessed via `current.huggingface_hub.loaded`. If load is set, then the following happens:
472
+ Information in this decorator will augment any
473
+ attributes set in the `@conda_base` flow-level decorator. Hence,
474
+ you can use `@conda_base` to set packages required by all
475
+ steps and use `@conda` to specify step-specific overrides.
538
476
 
539
- - If repo (model/dataset) is not found in the datastore:
540
- - Downloads the repo from Hugging Face Hub to a temporary directory (or uses specified path) for local access
541
- - Stores it in Metaflow's datastore (s3/gcs/azure etc.) with a unique name based on repo_type/repo_id
542
- - All HF models loaded for a `@step` will be cached separately under flow/step/namespace.
543
477
 
544
- - If repo is found in the datastore:
545
- - Loads it directly from datastore to local path (can be temporary directory or specified path)
478
+ Parameters
479
+ ----------
480
+ packages : Dict[str, str], default {}
481
+ Packages to use for this step. The key is the name of the package
482
+ and the value is the version to use.
483
+ libraries : Dict[str, str], default {}
484
+ Supported for backward compatibility. When used with packages, packages will take precedence.
485
+ python : str, optional, default None
486
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
487
+ that the version used will correspond to the version of the Python interpreter used to start the run.
488
+ disabled : bool, default False
489
+ If set to True, disables @conda.
546
490
  """
547
491
  ...
548
492
 
549
- def nebius_s3_proxy(*, integration_name: typing.Optional[str] = None, write_mode: typing.Optional[str] = None, debug: typing.Optional[bool] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
493
+ @typing.overload
494
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
550
495
  """
551
- `@nebius_s3_proxy` is a Nebius-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
496
+ Internal decorator to support Fast bakery
497
+ """
498
+ ...
499
+
500
+ @typing.overload
501
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
502
+ ...
503
+
504
+ def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
505
+ """
506
+ Internal decorator to support Fast bakery
507
+ """
508
+ ...
509
+
510
+ @typing.overload
511
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
512
+ """
513
+ Specifies secrets to be retrieved and injected as environment variables prior to
514
+ the execution of a step.
515
+
516
+
517
+ Parameters
518
+ ----------
519
+ sources : List[Union[str, Dict[str, Any]]], default: []
520
+ List of secret specs, defining how the secrets are to be retrieved
521
+ role : str, optional, default: None
522
+ Role to use for fetching secrets
523
+ """
524
+ ...
525
+
526
+ @typing.overload
527
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
528
+ ...
529
+
530
+ @typing.overload
531
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
532
+ ...
533
+
534
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
535
+ """
536
+ Specifies secrets to be retrieved and injected as environment variables prior to
537
+ the execution of a step.
538
+
539
+
540
+ Parameters
541
+ ----------
542
+ sources : List[Union[str, Dict[str, Any]]], default: []
543
+ List of secret specs, defining how the secrets are to be retrieved
544
+ role : str, optional, default: None
545
+ Role to use for fetching secrets
546
+ """
547
+ ...
548
+
549
+ @typing.overload
550
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
551
+ """
552
+ Specifies a timeout for your step.
553
+
554
+ This decorator is useful if this step may hang indefinitely.
555
+
556
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
557
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
558
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
559
+
560
+ Note that all the values specified in parameters are added together so if you specify
561
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
562
+
563
+
564
+ Parameters
565
+ ----------
566
+ seconds : int, default 0
567
+ Number of seconds to wait prior to timing out.
568
+ minutes : int, default 0
569
+ Number of minutes to wait prior to timing out.
570
+ hours : int, default 0
571
+ Number of hours to wait prior to timing out.
572
+ """
573
+ ...
574
+
575
+ @typing.overload
576
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
577
+ ...
578
+
579
+ @typing.overload
580
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
581
+ ...
582
+
583
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
584
+ """
585
+ Specifies a timeout for your step.
586
+
587
+ This decorator is useful if this step may hang indefinitely.
588
+
589
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
590
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
591
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
592
+
593
+ Note that all the values specified in parameters are added together so if you specify
594
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
595
+
596
+
597
+ Parameters
598
+ ----------
599
+ seconds : int, default 0
600
+ Number of seconds to wait prior to timing out.
601
+ minutes : int, default 0
602
+ Number of minutes to wait prior to timing out.
603
+ hours : int, default 0
604
+ Number of hours to wait prior to timing out.
605
+ """
606
+ ...
607
+
608
+ @typing.overload
609
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
610
+ """
611
+ Specifies environment variables to be set prior to the execution of a step.
612
+
613
+
614
+ Parameters
615
+ ----------
616
+ vars : Dict[str, str], default {}
617
+ Dictionary of environment variables to set.
618
+ """
619
+ ...
620
+
621
+ @typing.overload
622
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
623
+ ...
624
+
625
+ @typing.overload
626
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
627
+ ...
628
+
629
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
630
+ """
631
+ Specifies environment variables to be set prior to the execution of a step.
632
+
633
+
634
+ Parameters
635
+ ----------
636
+ vars : Dict[str, str], default {}
637
+ Dictionary of environment variables to set.
638
+ """
639
+ ...
640
+
641
+ def coreweave_s3_proxy(*, integration_name: typing.Optional[str] = None, write_mode: typing.Optional[str] = None, debug: typing.Optional[bool] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
642
+ """
643
+ `@coreweave_s3_proxy` is a CoreWeave-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
552
644
  It exists to make it easier for users to know that this decorator should only be used with
553
- a Neo Cloud like Nebius. The underlying mechanics of the decorator is the same as the `@s3_proxy`:
645
+ a Neo Cloud like CoreWeave. The underlying mechanics of the decorator is the same as the `@s3_proxy`:
554
646
 
555
647
 
556
648
  Set up an S3 proxy that caches objects in an external, S3‑compatible bucket
@@ -605,168 +697,24 @@ def nebius_s3_proxy(*, integration_name: typing.Optional[str] = None, write_mode
605
697
  Controls whether writes also go to the external bucket.
606
698
  - `origin` (default)
607
699
  - `origin-and-cache`
608
- debug : bool, optional
609
- Enables debug logging for proxy operations.
610
- """
611
- ...
612
-
613
- @typing.overload
614
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
615
- """
616
- Internal decorator to support Fast bakery
617
- """
618
- ...
619
-
620
- @typing.overload
621
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
622
- ...
623
-
624
- def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
625
- """
626
- Internal decorator to support Fast bakery
627
- """
628
- ...
629
-
630
- def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
631
- """
632
- Specifies that this step should execute on DGX cloud.
633
-
634
-
635
- Parameters
636
- ----------
637
- gpu : int
638
- Number of GPUs to use.
639
- gpu_type : str
640
- Type of Nvidia GPU to use.
641
- """
642
- ...
643
-
644
- @typing.overload
645
- def model(*, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
646
- """
647
- Enables loading / saving of models within a step.
648
-
649
- > Examples
650
- - Saving Models
651
- ```python
652
- @model
653
- @step
654
- def train(self):
655
- # current.model.save returns a dictionary reference to the model saved
656
- self.my_model = current.model.save(
657
- path_to_my_model,
658
- label="my_model",
659
- metadata={
660
- "epochs": 10,
661
- "batch-size": 32,
662
- "learning-rate": 0.001,
663
- }
664
- )
665
- self.next(self.test)
666
-
667
- @model(load="my_model")
668
- @step
669
- def test(self):
670
- # `current.model.loaded` returns a dictionary of the loaded models
671
- # where the key is the name of the artifact and the value is the path to the model
672
- print(os.listdir(current.model.loaded["my_model"]))
673
- self.next(self.end)
674
- ```
675
-
676
- - Loading models
677
- ```python
678
- @step
679
- def train(self):
680
- # current.model.load returns the path to the model loaded
681
- checkpoint_path = current.model.load(
682
- self.checkpoint_key,
683
- )
684
- model_path = current.model.load(
685
- self.model,
686
- )
687
- self.next(self.test)
688
- ```
689
-
690
-
691
- Parameters
692
- ----------
693
- load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
694
- Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
695
- These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
696
- If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
697
- the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
698
- If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
699
-
700
- temp_dir_root : str, default: None
701
- The root directory under which `current.model.loaded` will store loaded models
700
+ debug : bool, optional
701
+ Enables debug logging for proxy operations.
702
702
  """
703
703
  ...
704
704
 
705
- @typing.overload
706
- def model(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
707
- ...
708
-
709
- @typing.overload
710
- def model(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
711
- ...
712
-
713
- def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None):
705
+ def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
714
706
  """
715
- Enables loading / saving of models within a step.
716
-
717
- > Examples
718
- - Saving Models
719
- ```python
720
- @model
721
- @step
722
- def train(self):
723
- # current.model.save returns a dictionary reference to the model saved
724
- self.my_model = current.model.save(
725
- path_to_my_model,
726
- label="my_model",
727
- metadata={
728
- "epochs": 10,
729
- "batch-size": 32,
730
- "learning-rate": 0.001,
731
- }
732
- )
733
- self.next(self.test)
734
-
735
- @model(load="my_model")
736
- @step
737
- def test(self):
738
- # `current.model.loaded` returns a dictionary of the loaded models
739
- # where the key is the name of the artifact and the value is the path to the model
740
- print(os.listdir(current.model.loaded["my_model"]))
741
- self.next(self.end)
742
- ```
743
-
744
- - Loading models
745
- ```python
746
- @step
747
- def train(self):
748
- # current.model.load returns the path to the model loaded
749
- checkpoint_path = current.model.load(
750
- self.checkpoint_key,
751
- )
752
- model_path = current.model.load(
753
- self.model,
754
- )
755
- self.next(self.test)
756
- ```
707
+ Specifies that this step should execute on DGX cloud.
757
708
 
758
709
 
759
710
  Parameters
760
711
  ----------
761
- load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
762
- Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
763
- These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
764
- If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
765
- the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
766
- If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
767
-
768
- temp_dir_root : str, default: None
769
- The root directory under which `current.model.loaded` will store loaded models
712
+ gpu : int
713
+ Number of GPUs to use.
714
+ gpu_type : str
715
+ Type of Nvidia GPU to use.
716
+ queue_timeout : int
717
+ Time to keep the job in NVCF's queue.
770
718
  """
771
719
  ...
772
720
 
@@ -825,499 +773,686 @@ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
825
773
  """
826
774
  ...
827
775
 
828
- @typing.overload
829
- def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
776
+ def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy: str, force_cache_update: bool, debug: bool, circuit_breaker_config: dict, timeout_config: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
830
777
  """
831
- Creates a human-readable report, a Metaflow Card, after this step completes.
778
+ This decorator is used to run Ollama APIs as Metaflow task sidecars.
832
779
 
833
- Note that you may add multiple `@card` decorators in a step with different parameters.
780
+ User code call
781
+ --------------
782
+ @ollama(
783
+ models=[...],
784
+ ...
785
+ )
786
+
787
+ Valid backend options
788
+ ---------------------
789
+ - 'local': Run as a separate process on the local task machine.
790
+ - (TODO) 'managed': Outerbounds hosts and selects compute provider.
791
+ - (TODO) 'remote': Spin up separate instance to serve Ollama models.
792
+
793
+ Valid model options
794
+ -------------------
795
+ Any model here https://ollama.com/search, e.g. 'llama3.2', 'llama3.3'
834
796
 
835
797
 
836
798
  Parameters
837
799
  ----------
838
- type : str, default 'default'
839
- Card type.
840
- id : str, optional, default None
841
- If multiple cards are present, use this id to identify this card.
842
- options : Dict[str, Any], default {}
843
- Options passed to the card. The contents depend on the card type.
844
- timeout : int, default 45
845
- Interrupt reporting if it takes more than this many seconds.
800
+ models: list[str]
801
+ List of Ollama containers running models in sidecars.
802
+ backend: str
803
+ Determines where and how to run the Ollama process.
804
+ force_pull: bool
805
+ Whether to run `ollama pull` no matter what, or first check the remote cache in Metaflow datastore for this model key.
806
+ cache_update_policy: str
807
+ Cache update policy: "auto", "force", or "never".
808
+ force_cache_update: bool
809
+ Simple override for "force" cache update policy.
810
+ debug: bool
811
+ Whether to turn on verbose debugging logs.
812
+ circuit_breaker_config: dict
813
+ Configuration for circuit breaker protection. Keys: failure_threshold, recovery_timeout, reset_timeout.
814
+ timeout_config: dict
815
+ Configuration for various operation timeouts. Keys: pull, stop, health_check, install, server_startup.
816
+ """
817
+ ...
818
+
819
+ def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
820
+ """
821
+ Specifies that this step should execute on DGX cloud.
822
+
823
+
824
+ Parameters
825
+ ----------
826
+ gpu : int
827
+ Number of GPUs to use.
828
+ gpu_type : str
829
+ Type of Nvidia GPU to use.
846
830
  """
847
831
  ...
848
832
 
849
833
  @typing.overload
850
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
834
+ def app_deploy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
835
+ """
836
+ Decorator prototype for all step decorators. This function gets specialized
837
+ and imported for all decorators types by _import_plugin_decorators().
838
+ """
851
839
  ...
852
840
 
853
841
  @typing.overload
854
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
842
+ def app_deploy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
855
843
  ...
856
844
 
857
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
845
+ def app_deploy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
858
846
  """
859
- Creates a human-readable report, a Metaflow Card, after this step completes.
847
+ Decorator prototype for all step decorators. This function gets specialized
848
+ and imported for all decorators types by _import_plugin_decorators().
849
+ """
850
+ ...
851
+
852
+ def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, cache_scope: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
853
+ """
854
+ Decorator that helps cache, version, and store models/datasets from the Hugging Face Hub.
860
855
 
861
- Note that you may add multiple `@card` decorators in a step with different parameters.
856
+ Examples
857
+ --------
858
+
859
+ ```python
860
+ # **Usage: creating references to models from the Hugging Face Hub that may be loaded in downstream steps**
861
+ @huggingface_hub
862
+ @step
863
+ def pull_model_from_huggingface(self):
864
+ # `current.huggingface_hub.snapshot_download` downloads the model from the Hugging Face Hub
865
+ # and saves it in the backend storage based on the model's `repo_id`. If there exists a model
866
+ # with the same `repo_id` in the backend storage, it will not download the model again. The return
867
+ # value of the function is a reference to the model in the backend storage.
868
+ # This reference can be used to load the model in the subsequent steps via `@model(load=["llama_model"])`
869
+
870
+ self.model_id = "mistralai/Mistral-7B-Instruct-v0.1"
871
+ self.llama_model = current.huggingface_hub.snapshot_download(
872
+ repo_id=self.model_id,
873
+ allow_patterns=["*.safetensors", "*.json", "tokenizer.*"],
874
+ )
875
+ self.next(self.train)
876
+
877
+ # **Usage: explicitly loading models at runtime from the Hugging Face Hub or from cache (from Metaflow's datastore)**
878
+ @huggingface_hub
879
+ @step
880
+ def run_training(self):
881
+ # Temporary directory (auto-cleaned on exit)
882
+ with current.huggingface_hub.load(
883
+ repo_id="google-bert/bert-base-uncased",
884
+ allow_patterns=["*.bin"],
885
+ ) as local_path:
886
+ # Use files under local_path
887
+ train_model(local_path)
888
+ ...
889
+
890
+ # **Usage: loading models directly from the Hugging Face Hub or from cache (from Metaflow's datastore)**
891
+
892
+ @huggingface_hub(load=["mistralai/Mistral-7B-Instruct-v0.1"])
893
+ @step
894
+ def pull_model_from_huggingface(self):
895
+ path_to_model = current.huggingface_hub.loaded["mistralai/Mistral-7B-Instruct-v0.1"]
896
+
897
+ @huggingface_hub(load=[("mistralai/Mistral-7B-Instruct-v0.1", "/my-directory"), ("myorg/mistral-lora", "/my-lora-directory")])
898
+ @step
899
+ def finetune_model(self):
900
+ path_to_model = current.huggingface_hub.loaded["mistralai/Mistral-7B-Instruct-v0.1"]
901
+ # path_to_model will be /my-directory
902
+
903
+
904
+ # Takes all the arguments passed to `snapshot_download`
905
+ # except for `local_dir`
906
+ @huggingface_hub(load=[
907
+ {
908
+ "repo_id": "mistralai/Mistral-7B-Instruct-v0.1",
909
+ },
910
+ {
911
+ "repo_id": "myorg/mistral-lora",
912
+ "repo_type": "model",
913
+ },
914
+ ])
915
+ @step
916
+ def finetune_model(self):
917
+ path_to_model = current.huggingface_hub.loaded["mistralai/Mistral-7B-Instruct-v0.1"]
918
+ # path_to_model will be /my-directory
919
+ ```
920
+
921
+
922
+ Parameters
923
+ ----------
924
+ temp_dir_root : str, optional
925
+ The root directory that will hold the temporary directory where objects will be downloaded.
926
+
927
+ cache_scope : str, optional
928
+ The scope of the cache. Can be `checkpoint` / `flow` / `global`.
929
+ - `checkpoint` (default): All repos are stored like objects saved by `@checkpoint`.
930
+ i.e., the cached path is derived from the namespace, flow, step, and Metaflow foreach iteration.
931
+ Any repo downloaded under this scope will only be retrieved from the cache when the step runs under the same namespace in the same flow (at the same foreach index).
932
+
933
+ - `flow`: All repos are cached under the flow, regardless of namespace.
934
+ i.e., the cached path is derived solely from the flow name.
935
+ When to use this mode: (1) Multiple users are executing the same flow and want shared access to the repos cached by the decorator. (2) Multiple versions of a flow are deployed, all needing access to the same repos cached by the decorator.
936
+
937
+ - `global`: All repos are cached under a globally static path.
938
+ i.e., the base path of the cache is static and all repos are stored under it.
939
+ When to use this mode:
940
+ - All repos from the Hugging Face Hub need to be shared by users across all flow executions.
941
+ - Each caching scope comes with its own trade-offs:
942
+ - `checkpoint`:
943
+ - Has explicit control over when caches are populated (controlled by the same flow that has the `@huggingface_hub` decorator) but ends up hitting the Hugging Face Hub more often if there are many users/namespaces/steps.
944
+ - Since objects are written on a `namespace/flow/step` basis, the blast radius of a bad checkpoint is limited to a particular flow in a namespace.
945
+ - `flow`:
946
+ - Has less control over when caches are populated (can be written by any execution instance of a flow from any namespace) but results in more cache hits.
947
+ - The blast radius of a bad checkpoint is limited to all runs of a particular flow.
948
+ - It doesn't promote cache reuse across flows.
949
+ - `global`:
950
+ - Has no control over when caches are populated (can be written by any flow execution) but has the highest cache hit rate.
951
+ - It promotes cache reuse across flows.
952
+ - The blast radius of a bad checkpoint spans every flow that could be using a particular repo.
953
+
954
+ load: Union[List[str], List[Tuple[Dict, str]], List[Tuple[str, str]], List[Dict], None]
955
+ The list of repos (models/datasets) to load.
956
+
957
+ Loaded repos can be accessed via `current.huggingface_hub.loaded`. If load is set, then the following happens:
862
958
 
959
+ - If repo (model/dataset) is not found in the datastore:
960
+ - Downloads the repo from Hugging Face Hub to a temporary directory (or uses specified path) for local access
961
+ - Stores it in Metaflow's datastore (s3/gcs/azure etc.) with a unique name based on repo_type/repo_id
962
+ - All HF models loaded for a `@step` will be cached separately under flow/step/namespace.
863
963
 
864
- Parameters
865
- ----------
866
- type : str, default 'default'
867
- Card type.
868
- id : str, optional, default None
869
- If multiple cards are present, use this id to identify this card.
870
- options : Dict[str, Any], default {}
871
- Options passed to the card. The contents depend on the card type.
872
- timeout : int, default 45
873
- Interrupt reporting if it takes more than this many seconds.
874
- """
875
- ...
876
-
877
- @typing.overload
878
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
879
- """
880
- Decorator prototype for all step decorators. This function gets specialized
881
- and imported for all decorators types by _import_plugin_decorators().
882
- """
883
- ...
884
-
885
- @typing.overload
886
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
887
- ...
888
-
889
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
890
- """
891
- Decorator prototype for all step decorators. This function gets specialized
892
- and imported for all decorators types by _import_plugin_decorators().
964
+ - If repo is found in the datastore:
965
+ - Loads it directly from datastore to local path (can be temporary directory or specified path)
893
966
  """
894
967
  ...
895
968
 
896
969
  @typing.overload
897
- def checkpoint(*, load_policy: str = 'fresh', temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
970
+ def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
898
971
  """
899
- Enables checkpointing for a step.
900
-
901
- > Examples
972
+ Specifies the resources needed when executing this step.
902
973
 
903
- - Saving Checkpoints
974
+ Use `@resources` to specify the resource requirements
975
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
904
976
 
905
- ```python
906
- @checkpoint
907
- @step
908
- def train(self):
909
- model = create_model(self.parameters, checkpoint_path = None)
910
- for i in range(self.epochs):
911
- # some training logic
912
- loss = model.train(self.dataset)
913
- if i % 10 == 0:
914
- model.save(
915
- current.checkpoint.directory,
916
- )
917
- # saves the contents of the `current.checkpoint.directory` as a checkpoint
918
- # and returns a reference dictionary to the checkpoint saved in the datastore
919
- self.latest_checkpoint = current.checkpoint.save(
920
- name="epoch_checkpoint",
921
- metadata={
922
- "epoch": i,
923
- "loss": loss,
924
- }
925
- )
977
+ You can choose the compute layer on the command line by executing e.g.
926
978
  ```
927
-
928
- - Using Loaded Checkpoints
929
-
930
- ```python
931
- @retry(times=3)
932
- @checkpoint
933
- @step
934
- def train(self):
935
- # Assume that the task has restarted and the previous attempt of the task
936
- # saved a checkpoint
937
- checkpoint_path = None
938
- if current.checkpoint.is_loaded: # Check if a checkpoint is loaded
939
- print("Loaded checkpoint from the previous attempt")
940
- checkpoint_path = current.checkpoint.directory
941
-
942
- model = create_model(self.parameters, checkpoint_path = checkpoint_path)
943
- for i in range(self.epochs):
944
- ...
979
+ python myflow.py run --with batch
980
+ ```
981
+ or
982
+ ```
983
+ python myflow.py run --with kubernetes
945
984
  ```
985
+ which executes the flow on the desired system using the
986
+ requirements specified in `@resources`.
946
987
 
947
988
 
948
989
  Parameters
949
990
  ----------
950
- load_policy : str, default: "fresh"
951
- The policy for loading the checkpoint. The following policies are supported:
952
- - "eager": Loads the the latest available checkpoint within the namespace.
953
- With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
954
- will be loaded at the start of the task.
955
- - "none": Do not load any checkpoint
956
- - "fresh": Loads the lastest checkpoint created within the running Task.
957
- This mode helps loading checkpoints across various retry attempts of the same task.
958
- With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
959
- created within the task will be loaded when the task is retries execution on failure.
960
-
961
- temp_dir_root : str, default: None
962
- The root directory under which `current.checkpoint.directory` will be created.
991
+ cpu : int, default 1
992
+ Number of CPUs required for this step.
993
+ gpu : int, optional, default None
994
+ Number of GPUs required for this step.
995
+ disk : int, optional, default None
996
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
997
+ memory : int, default 4096
998
+ Memory size (in MB) required for this step.
999
+ shared_memory : int, optional, default None
1000
+ The value for the size (in MiB) of the /dev/shm volume for this step.
1001
+ This parameter maps to the `--shm-size` option in Docker.
963
1002
  """
964
1003
  ...
965
1004
 
966
1005
  @typing.overload
967
- def checkpoint(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1006
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
968
1007
  ...
969
1008
 
970
1009
  @typing.overload
971
- def checkpoint(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1010
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
972
1011
  ...
973
1012
 
974
- def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load_policy: str = 'fresh', temp_dir_root: str = None):
1013
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
975
1014
  """
976
- Enables checkpointing for a step.
977
-
978
- > Examples
1015
+ Specifies the resources needed when executing this step.
979
1016
 
980
- - Saving Checkpoints
1017
+ Use `@resources` to specify the resource requirements
1018
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
981
1019
 
982
- ```python
983
- @checkpoint
984
- @step
985
- def train(self):
986
- model = create_model(self.parameters, checkpoint_path = None)
987
- for i in range(self.epochs):
988
- # some training logic
989
- loss = model.train(self.dataset)
990
- if i % 10 == 0:
991
- model.save(
992
- current.checkpoint.directory,
993
- )
994
- # saves the contents of the `current.checkpoint.directory` as a checkpoint
995
- # and returns a reference dictionary to the checkpoint saved in the datastore
996
- self.latest_checkpoint = current.checkpoint.save(
997
- name="epoch_checkpoint",
998
- metadata={
999
- "epoch": i,
1000
- "loss": loss,
1001
- }
1002
- )
1020
+ You can choose the compute layer on the command line by executing e.g.
1003
1021
  ```
1022
+ python myflow.py run --with batch
1023
+ ```
1024
+ or
1025
+ ```
1026
+ python myflow.py run --with kubernetes
1027
+ ```
1028
+ which executes the flow on the desired system using the
1029
+ requirements specified in `@resources`.
1004
1030
 
1005
- - Using Loaded Checkpoints
1006
-
1007
- ```python
1008
- @retry(times=3)
1009
- @checkpoint
1010
- @step
1011
- def train(self):
1012
- # Assume that the task has restarted and the previous attempt of the task
1013
- # saved a checkpoint
1014
- checkpoint_path = None
1015
- if current.checkpoint.is_loaded: # Check if a checkpoint is loaded
1016
- print("Loaded checkpoint from the previous attempt")
1017
- checkpoint_path = current.checkpoint.directory
1018
1031
 
1019
- model = create_model(self.parameters, checkpoint_path = checkpoint_path)
1020
- for i in range(self.epochs):
1021
- ...
1022
- ```
1032
+ Parameters
1033
+ ----------
1034
+ cpu : int, default 1
1035
+ Number of CPUs required for this step.
1036
+ gpu : int, optional, default None
1037
+ Number of GPUs required for this step.
1038
+ disk : int, optional, default None
1039
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
1040
+ memory : int, default 4096
1041
+ Memory size (in MB) required for this step.
1042
+ shared_memory : int, optional, default None
1043
+ The value for the size (in MiB) of the /dev/shm volume for this step.
1044
+ This parameter maps to the `--shm-size` option in Docker.
1045
+ """
1046
+ ...
1047
+
1048
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[typing.Dict[str, str]] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1049
+ """
1050
+ Specifies that this step should execute on Kubernetes.
1023
1051
 
1024
1052
 
1025
1053
  Parameters
1026
1054
  ----------
1027
- load_policy : str, default: "fresh"
1028
- The policy for loading the checkpoint. The following policies are supported:
1029
- - "eager": Loads the the latest available checkpoint within the namespace.
1030
- With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
1031
- will be loaded at the start of the task.
1032
- - "none": Do not load any checkpoint
1033
- - "fresh": Loads the lastest checkpoint created within the running Task.
1034
- This mode helps loading checkpoints across various retry attempts of the same task.
1035
- With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
1036
- created within the task will be loaded when the task is retries execution on failure.
1055
+ cpu : int, default 1
1056
+ Number of CPUs required for this step. If `@resources` is
1057
+ also present, the maximum value from all decorators is used.
1058
+ memory : int, default 4096
1059
+ Memory size (in MB) required for this step. If
1060
+ `@resources` is also present, the maximum value from all decorators is
1061
+ used.
1062
+ disk : int, default 10240
1063
+ Disk size (in MB) required for this step. If
1064
+ `@resources` is also present, the maximum value from all decorators is
1065
+ used.
1066
+ image : str, optional, default None
1067
+ Docker image to use when launching on Kubernetes. If not specified, and
1068
+ METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
1069
+ not, a default Docker image mapping to the current version of Python is used.
1070
+ image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
1071
+ If given, the imagePullPolicy to be applied to the Docker image of the step.
1072
+ image_pull_secrets: List[str], default []
1073
+ The default is extracted from METAFLOW_KUBERNETES_IMAGE_PULL_SECRETS.
1074
+ Kubernetes image pull secrets to use when pulling container images
1075
+ in Kubernetes.
1076
+ service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
1077
+ Kubernetes service account to use when launching pod in Kubernetes.
1078
+ secrets : List[str], optional, default None
1079
+ Kubernetes secrets to use when launching pod in Kubernetes. These
1080
+ secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
1081
+ in Metaflow configuration.
1082
+ node_selector: Union[Dict[str,str], str], optional, default None
1083
+ Kubernetes node selector(s) to apply to the pod running the task.
1084
+ Can be passed in as a comma separated string of values e.g.
1085
+ 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
1086
+ {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
1087
+ namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
1088
+ Kubernetes namespace to use when launching pod in Kubernetes.
1089
+ gpu : int, optional, default None
1090
+ Number of GPUs required for this step. A value of zero implies that
1091
+ the scheduled node should not have GPUs.
1092
+ gpu_vendor : str, default KUBERNETES_GPU_VENDOR
1093
+ The vendor of the GPUs to be used for this step.
1094
+ tolerations : List[Dict[str,str]], default []
1095
+ The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
1096
+ Kubernetes tolerations to use when launching pod in Kubernetes.
1097
+ labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
1098
+ Kubernetes labels to use when launching pod in Kubernetes.
1099
+ annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
1100
+ Kubernetes annotations to use when launching pod in Kubernetes.
1101
+ use_tmpfs : bool, default False
1102
+ This enables an explicit tmpfs mount for this step.
1103
+ tmpfs_tempdir : bool, default True
1104
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1105
+ tmpfs_size : int, optional, default: None
1106
+ The value for the size (in MiB) of the tmpfs mount for this step.
1107
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1108
+ memory allocated for this step.
1109
+ tmpfs_path : str, optional, default /metaflow_temp
1110
+ Path to tmpfs mount for this step.
1111
+ persistent_volume_claims : Dict[str, str], optional, default None
1112
+ A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
1113
+ volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
1114
+ shared_memory: int, optional
1115
+ Shared memory size (in MiB) required for this step
1116
+ port: int, optional
1117
+ Port number to specify in the Kubernetes job object
1118
+ compute_pool : str, optional, default None
1119
+ Compute pool to be used for for this step.
1120
+ If not specified, any accessible compute pool within the perimeter is used.
1121
+ hostname_resolution_timeout: int, default 10 * 60
1122
+ Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
1123
+ Only applicable when @parallel is used.
1124
+ qos: str, default: Burstable
1125
+ Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
1037
1126
 
1038
- temp_dir_root : str, default: None
1039
- The root directory under which `current.checkpoint.directory` will be created.
1127
+ security_context: Dict[str, Any], optional, default None
1128
+ Container security context. Applies to the task container. Allows the following keys:
1129
+ - privileged: bool, optional, default None
1130
+ - allow_privilege_escalation: bool, optional, default None
1131
+ - run_as_user: int, optional, default None
1132
+ - run_as_group: int, optional, default None
1133
+ - run_as_non_root: bool, optional, default None
1040
1134
  """
1041
1135
  ...
1042
1136
 
1043
1137
  @typing.overload
1044
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1138
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1045
1139
  """
1046
- Specifies a timeout for your step.
1047
-
1048
- This decorator is useful if this step may hang indefinitely.
1049
-
1050
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1051
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
1052
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
1140
+ Specifies that the step will success under all circumstances.
1053
1141
 
1054
- Note that all the values specified in parameters are added together so if you specify
1055
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1142
+ The decorator will create an optional artifact, specified by `var`, which
1143
+ contains the exception raised. You can use it to detect the presence
1144
+ of errors, indicating that all happy-path artifacts produced by the step
1145
+ are missing.
1056
1146
 
1057
1147
 
1058
1148
  Parameters
1059
1149
  ----------
1060
- seconds : int, default 0
1061
- Number of seconds to wait prior to timing out.
1062
- minutes : int, default 0
1063
- Number of minutes to wait prior to timing out.
1064
- hours : int, default 0
1065
- Number of hours to wait prior to timing out.
1150
+ var : str, optional, default None
1151
+ Name of the artifact in which to store the caught exception.
1152
+ If not specified, the exception is not stored.
1153
+ print_exception : bool, default True
1154
+ Determines whether or not the exception is printed to
1155
+ stdout when caught.
1066
1156
  """
1067
1157
  ...
1068
1158
 
1069
1159
  @typing.overload
1070
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1160
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1071
1161
  ...
1072
1162
 
1073
1163
  @typing.overload
1074
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1164
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1075
1165
  ...
1076
1166
 
1077
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
1167
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
1078
1168
  """
1079
- Specifies a timeout for your step.
1080
-
1081
- This decorator is useful if this step may hang indefinitely.
1082
-
1083
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1084
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
1085
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
1169
+ Specifies that the step will success under all circumstances.
1086
1170
 
1087
- Note that all the values specified in parameters are added together so if you specify
1088
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1171
+ The decorator will create an optional artifact, specified by `var`, which
1172
+ contains the exception raised. You can use it to detect the presence
1173
+ of errors, indicating that all happy-path artifacts produced by the step
1174
+ are missing.
1089
1175
 
1090
1176
 
1091
1177
  Parameters
1092
1178
  ----------
1093
- seconds : int, default 0
1094
- Number of seconds to wait prior to timing out.
1095
- minutes : int, default 0
1096
- Number of minutes to wait prior to timing out.
1097
- hours : int, default 0
1098
- Number of hours to wait prior to timing out.
1179
+ var : str, optional, default None
1180
+ Name of the artifact in which to store the caught exception.
1181
+ If not specified, the exception is not stored.
1182
+ print_exception : bool, default True
1183
+ Determines whether or not the exception is printed to
1184
+ stdout when caught.
1099
1185
  """
1100
1186
  ...
1101
1187
 
1102
- @typing.overload
1103
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1188
+ def vllm(*, model: str, backend: str, openai_api_server: bool, debug: bool, card_refresh_interval: int, max_retries: int, retry_alert_frequency: int, engine_args: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1104
1189
  """
1105
- Specifies the Conda environment for the step.
1190
+ This decorator is used to run vllm APIs as Metaflow task sidecars.
1106
1191
 
1107
- Information in this decorator will augment any
1108
- attributes set in the `@conda_base` flow-level decorator. Hence,
1109
- you can use `@conda_base` to set packages required by all
1110
- steps and use `@conda` to specify step-specific overrides.
1192
+ User code call
1193
+ --------------
1194
+ @vllm(
1195
+ model="...",
1196
+ ...
1197
+ )
1111
1198
 
1199
+ Valid backend options
1200
+ ---------------------
1201
+ - 'local': Run as a separate process on the local task machine.
1112
1202
 
1113
- Parameters
1114
- ----------
1115
- packages : Dict[str, str], default {}
1116
- Packages to use for this step. The key is the name of the package
1117
- and the value is the version to use.
1118
- libraries : Dict[str, str], default {}
1119
- Supported for backward compatibility. When used with packages, packages will take precedence.
1120
- python : str, optional, default None
1121
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1122
- that the version used will correspond to the version of the Python interpreter used to start the run.
1123
- disabled : bool, default False
1124
- If set to True, disables @conda.
1125
- """
1126
- ...
1127
-
1128
- @typing.overload
1129
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1130
- ...
1131
-
1132
- @typing.overload
1133
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1134
- ...
1135
-
1136
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1137
- """
1138
- Specifies the Conda environment for the step.
1203
+ Valid model options
1204
+ -------------------
1205
+ Any HuggingFace model identifier, e.g. 'meta-llama/Llama-3.2-1B'
1139
1206
 
1140
- Information in this decorator will augment any
1141
- attributes set in the `@conda_base` flow-level decorator. Hence,
1142
- you can use `@conda_base` to set packages required by all
1143
- steps and use `@conda` to specify step-specific overrides.
1207
+ NOTE: vLLM's OpenAI-compatible server serves ONE model per server instance.
1208
+ If you need multiple models, you must create multiple @vllm decorators.
1144
1209
 
1145
1210
 
1146
1211
  Parameters
1147
1212
  ----------
1148
- packages : Dict[str, str], default {}
1149
- Packages to use for this step. The key is the name of the package
1150
- and the value is the version to use.
1151
- libraries : Dict[str, str], default {}
1152
- Supported for backward compatibility. When used with packages, packages will take precedence.
1153
- python : str, optional, default None
1154
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1155
- that the version used will correspond to the version of the Python interpreter used to start the run.
1156
- disabled : bool, default False
1157
- If set to True, disables @conda.
1213
+ model: str
1214
+ HuggingFace model identifier to be served by vLLM.
1215
+ backend: str
1216
+ Determines where and how to run the vLLM process.
1217
+ openai_api_server: bool
1218
+ Whether to use OpenAI-compatible API server mode (subprocess) instead of native engine.
1219
+ Default is False (uses native engine).
1220
+ Set to True for backward compatibility with existing code.
1221
+ debug: bool
1222
+ Whether to turn on verbose debugging logs.
1223
+ card_refresh_interval: int
1224
+ Interval in seconds for refreshing the vLLM status card.
1225
+ Only used when openai_api_server=True.
1226
+ max_retries: int
1227
+ Maximum number of retries checking for vLLM server startup.
1228
+ Only used when openai_api_server=True.
1229
+ retry_alert_frequency: int
1230
+ Frequency of alert logs for vLLM server startup retries.
1231
+ Only used when openai_api_server=True.
1232
+ engine_args : dict
1233
+ Additional keyword arguments to pass to the vLLM engine.
1234
+ For example, `tensor_parallel_size=2`.
1158
1235
  """
1159
1236
  ...
1160
1237
 
1161
1238
  @typing.overload
1162
- def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1239
+ def model(*, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1163
1240
  """
1164
- Specifies the resources needed when executing this step.
1241
+ Enables loading / saving of models within a step.
1165
1242
 
1166
- Use `@resources` to specify the resource requirements
1167
- independently of the specific compute layer (`@batch`, `@kubernetes`).
1243
+ > Examples
1244
+ - Saving Models
1245
+ ```python
1246
+ @model
1247
+ @step
1248
+ def train(self):
1249
+ # current.model.save returns a dictionary reference to the model saved
1250
+ self.my_model = current.model.save(
1251
+ path_to_my_model,
1252
+ label="my_model",
1253
+ metadata={
1254
+ "epochs": 10,
1255
+ "batch-size": 32,
1256
+ "learning-rate": 0.001,
1257
+ }
1258
+ )
1259
+ self.next(self.test)
1168
1260
 
1169
- You can choose the compute layer on the command line by executing e.g.
1170
- ```
1171
- python myflow.py run --with batch
1172
- ```
1173
- or
1261
+ @model(load="my_model")
1262
+ @step
1263
+ def test(self):
1264
+ # `current.model.loaded` returns a dictionary of the loaded models
1265
+ # where the key is the name of the artifact and the value is the path to the model
1266
+ print(os.listdir(current.model.loaded["my_model"]))
1267
+ self.next(self.end)
1174
1268
  ```
1175
- python myflow.py run --with kubernetes
1269
+
1270
+ - Loading models
1271
+ ```python
1272
+ @step
1273
+ def train(self):
1274
+ # current.model.load returns the path to the model loaded
1275
+ checkpoint_path = current.model.load(
1276
+ self.checkpoint_key,
1277
+ )
1278
+ model_path = current.model.load(
1279
+ self.model,
1280
+ )
1281
+ self.next(self.test)
1176
1282
  ```
1177
- which executes the flow on the desired system using the
1178
- requirements specified in `@resources`.
1179
1283
 
1180
1284
 
1181
1285
  Parameters
1182
1286
  ----------
1183
- cpu : int, default 1
1184
- Number of CPUs required for this step.
1185
- gpu : int, optional, default None
1186
- Number of GPUs required for this step.
1187
- disk : int, optional, default None
1188
- Disk size (in MB) required for this step. Only applies on Kubernetes.
1189
- memory : int, default 4096
1190
- Memory size (in MB) required for this step.
1191
- shared_memory : int, optional, default None
1192
- The value for the size (in MiB) of the /dev/shm volume for this step.
1193
- This parameter maps to the `--shm-size` option in Docker.
1287
+ load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
1288
+ Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
1289
+ These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
1290
+ If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
1291
+ the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
1292
+ If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
1293
+
1294
+ temp_dir_root : str, default: None
1295
+ The root directory under which `current.model.loaded` will store loaded models
1194
1296
  """
1195
1297
  ...
1196
1298
 
1197
1299
  @typing.overload
1198
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1300
+ def model(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1199
1301
  ...
1200
1302
 
1201
1303
  @typing.overload
1202
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1304
+ def model(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1203
1305
  ...
1204
1306
 
1205
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
1307
+ def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None):
1206
1308
  """
1207
- Specifies the resources needed when executing this step.
1309
+ Enables loading / saving of models within a step.
1208
1310
 
1209
- Use `@resources` to specify the resource requirements
1210
- independently of the specific compute layer (`@batch`, `@kubernetes`).
1311
+ > Examples
1312
+ - Saving Models
1313
+ ```python
1314
+ @model
1315
+ @step
1316
+ def train(self):
1317
+ # current.model.save returns a dictionary reference to the model saved
1318
+ self.my_model = current.model.save(
1319
+ path_to_my_model,
1320
+ label="my_model",
1321
+ metadata={
1322
+ "epochs": 10,
1323
+ "batch-size": 32,
1324
+ "learning-rate": 0.001,
1325
+ }
1326
+ )
1327
+ self.next(self.test)
1211
1328
 
1212
- You can choose the compute layer on the command line by executing e.g.
1213
- ```
1214
- python myflow.py run --with batch
1215
- ```
1216
- or
1329
+ @model(load="my_model")
1330
+ @step
1331
+ def test(self):
1332
+ # `current.model.loaded` returns a dictionary of the loaded models
1333
+ # where the key is the name of the artifact and the value is the path to the model
1334
+ print(os.listdir(current.model.loaded["my_model"]))
1335
+ self.next(self.end)
1217
1336
  ```
1218
- python myflow.py run --with kubernetes
1337
+
1338
+ - Loading models
1339
+ ```python
1340
+ @step
1341
+ def train(self):
1342
+ # current.model.load returns the path to the model loaded
1343
+ checkpoint_path = current.model.load(
1344
+ self.checkpoint_key,
1345
+ )
1346
+ model_path = current.model.load(
1347
+ self.model,
1348
+ )
1349
+ self.next(self.test)
1219
1350
  ```
1220
- which executes the flow on the desired system using the
1221
- requirements specified in `@resources`.
1222
1351
 
1223
1352
 
1224
1353
  Parameters
1225
1354
  ----------
1226
- cpu : int, default 1
1227
- Number of CPUs required for this step.
1228
- gpu : int, optional, default None
1229
- Number of GPUs required for this step.
1230
- disk : int, optional, default None
1231
- Disk size (in MB) required for this step. Only applies on Kubernetes.
1232
- memory : int, default 4096
1233
- Memory size (in MB) required for this step.
1234
- shared_memory : int, optional, default None
1235
- The value for the size (in MiB) of the /dev/shm volume for this step.
1236
- This parameter maps to the `--shm-size` option in Docker.
1355
+ load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
1356
+ Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
1357
+ These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
1358
+ If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
1359
+ the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
1360
+ If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
1361
+
1362
+ temp_dir_root : str, default: None
1363
+ The root directory under which `current.model.loaded` will store loaded models
1237
1364
  """
1238
1365
  ...
1239
1366
 
1240
1367
  @typing.overload
1241
- def test_append_card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1368
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1242
1369
  """
1243
- A simple decorator that demonstrates using CardDecoratorInjector
1244
- to inject a card and render simple markdown content.
1370
+ Decorator prototype for all step decorators. This function gets specialized
1371
+ and imported for all decorators types by _import_plugin_decorators().
1245
1372
  """
1246
1373
  ...
1247
1374
 
1248
1375
  @typing.overload
1249
- def test_append_card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1376
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1250
1377
  ...
1251
1378
 
1252
- def test_append_card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1379
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1253
1380
  """
1254
- A simple decorator that demonstrates using CardDecoratorInjector
1255
- to inject a card and render simple markdown content.
1381
+ Decorator prototype for all step decorators. This function gets specialized
1382
+ and imported for all decorators types by _import_plugin_decorators().
1256
1383
  """
1257
1384
  ...
1258
1385
 
1259
- def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1386
+ @typing.overload
1387
+ def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1260
1388
  """
1261
- Specifies that this step should execute on DGX cloud.
1389
+ Creates a human-readable report, a Metaflow Card, after this step completes.
1390
+
1391
+ Note that you may add multiple `@card` decorators in a step with different parameters.
1262
1392
 
1263
1393
 
1264
1394
  Parameters
1265
1395
  ----------
1266
- gpu : int
1267
- Number of GPUs to use.
1268
- gpu_type : str
1269
- Type of Nvidia GPU to use.
1270
- queue_timeout : int
1271
- Time to keep the job in NVCF's queue.
1396
+ type : str, default 'default'
1397
+ Card type.
1398
+ id : str, optional, default None
1399
+ If multiple cards are present, use this id to identify this card.
1400
+ options : Dict[str, Any], default {}
1401
+ Options passed to the card. The contents depend on the card type.
1402
+ timeout : int, default 45
1403
+ Interrupt reporting if it takes more than this many seconds.
1272
1404
  """
1273
1405
  ...
1274
1406
 
1275
1407
  @typing.overload
1276
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1408
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1409
+ ...
1410
+
1411
+ @typing.overload
1412
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1413
+ ...
1414
+
1415
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
1277
1416
  """
1278
- Specifies secrets to be retrieved and injected as environment variables prior to
1279
- the execution of a step.
1417
+ Creates a human-readable report, a Metaflow Card, after this step completes.
1418
+
1419
+ Note that you may add multiple `@card` decorators in a step with different parameters.
1280
1420
 
1281
1421
 
1282
1422
  Parameters
1283
1423
  ----------
1284
- sources : List[Union[str, Dict[str, Any]]], default: []
1285
- List of secret specs, defining how the secrets are to be retrieved
1286
- role : str, optional, default: None
1287
- Role to use for fetching secrets
1424
+ type : str, default 'default'
1425
+ Card type.
1426
+ id : str, optional, default None
1427
+ If multiple cards are present, use this id to identify this card.
1428
+ options : Dict[str, Any], default {}
1429
+ Options passed to the card. The contents depend on the card type.
1430
+ timeout : int, default 45
1431
+ Interrupt reporting if it takes more than this many seconds.
1288
1432
  """
1289
1433
  ...
1290
1434
 
1291
1435
  @typing.overload
1292
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1436
+ def test_append_card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1437
+ """
1438
+ A simple decorator that demonstrates using CardDecoratorInjector
1439
+ to inject a card and render simple markdown content.
1440
+ """
1293
1441
  ...
1294
1442
 
1295
1443
  @typing.overload
1296
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1444
+ def test_append_card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1297
1445
  ...
1298
1446
 
1299
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
1447
+ def test_append_card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1300
1448
  """
1301
- Specifies secrets to be retrieved and injected as environment variables prior to
1302
- the execution of a step.
1303
-
1304
-
1305
- Parameters
1306
- ----------
1307
- sources : List[Union[str, Dict[str, Any]]], default: []
1308
- List of secret specs, defining how the secrets are to be retrieved
1309
- role : str, optional, default: None
1310
- Role to use for fetching secrets
1449
+ A simple decorator that demonstrates using CardDecoratorInjector
1450
+ to inject a card and render simple markdown content.
1311
1451
  """
1312
1452
  ...
1313
1453
 
1314
- def coreweave_s3_proxy(*, integration_name: typing.Optional[str] = None, write_mode: typing.Optional[str] = None, debug: typing.Optional[bool] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1454
+ def s3_proxy(*, integration_name: typing.Optional[str] = None, write_mode: typing.Optional[str] = None, debug: typing.Optional[bool] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1315
1455
  """
1316
- `@coreweave_s3_proxy` is a CoreWeave-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
1317
- It exists to make it easier for users to know that this decorator should only be used with
1318
- a Neo Cloud like CoreWeave. The underlying mechanics of the decorator is the same as the `@s3_proxy`:
1319
-
1320
-
1321
1456
  Set up an S3 proxy that caches objects in an external, S3‑compatible bucket
1322
1457
  for S3 read and write requests.
1323
1458
 
@@ -1376,137 +1511,103 @@ def coreweave_s3_proxy(*, integration_name: typing.Optional[str] = None, write_m
1376
1511
  ...
1377
1512
 
1378
1513
  @typing.overload
1379
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1380
- """
1381
- Specifies environment variables to be set prior to the execution of a step.
1382
-
1383
-
1384
- Parameters
1385
- ----------
1386
- vars : Dict[str, str], default {}
1387
- Dictionary of environment variables to set.
1388
- """
1389
- ...
1390
-
1391
- @typing.overload
1392
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1393
- ...
1394
-
1395
- @typing.overload
1396
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1397
- ...
1398
-
1399
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
1400
- """
1401
- Specifies environment variables to be set prior to the execution of a step.
1402
-
1403
-
1404
- Parameters
1405
- ----------
1406
- vars : Dict[str, str], default {}
1407
- Dictionary of environment variables to set.
1408
- """
1409
- ...
1410
-
1411
- @typing.overload
1412
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1413
- """
1414
- Specifies the PyPI packages for the step.
1415
-
1416
- Information in this decorator will augment any
1417
- attributes set in the `@pyi_base` flow-level decorator. Hence,
1418
- you can use `@pypi_base` to set packages required by all
1419
- steps and use `@pypi` to specify step-specific overrides.
1420
-
1421
-
1422
- Parameters
1423
- ----------
1424
- packages : Dict[str, str], default: {}
1425
- Packages to use for this step. The key is the name of the package
1426
- and the value is the version to use.
1427
- python : str, optional, default: None
1428
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1429
- that the version used will correspond to the version of the Python interpreter used to start the run.
1430
- """
1431
- ...
1432
-
1433
- @typing.overload
1434
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1435
- ...
1436
-
1437
- @typing.overload
1438
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1439
- ...
1440
-
1441
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1514
+ def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1442
1515
  """
1443
- Specifies the PyPI packages for the step.
1516
+ Specifies the flow(s) that this flow depends on.
1444
1517
 
1445
- Information in this decorator will augment any
1446
- attributes set in the `@pyi_base` flow-level decorator. Hence,
1447
- you can use `@pypi_base` to set packages required by all
1448
- steps and use `@pypi` to specify step-specific overrides.
1518
+ ```
1519
+ @trigger_on_finish(flow='FooFlow')
1520
+ ```
1521
+ or
1522
+ ```
1523
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1524
+ ```
1525
+ This decorator respects the @project decorator and triggers the flow
1526
+ when upstream runs within the same namespace complete successfully
1449
1527
 
1528
+ Additionally, you can specify project aware upstream flow dependencies
1529
+ by specifying the fully qualified project_flow_name.
1530
+ ```
1531
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1532
+ ```
1533
+ or
1534
+ ```
1535
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1536
+ ```
1450
1537
 
1451
- Parameters
1452
- ----------
1453
- packages : Dict[str, str], default: {}
1454
- Packages to use for this step. The key is the name of the package
1455
- and the value is the version to use.
1456
- python : str, optional, default: None
1457
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1458
- that the version used will correspond to the version of the Python interpreter used to start the run.
1459
- """
1460
- ...
1461
-
1462
- @typing.overload
1463
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1464
- """
1465
- Specifies that the step will success under all circumstances.
1538
+ You can also specify just the project or project branch (other values will be
1539
+ inferred from the current project or project branch):
1540
+ ```
1541
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1542
+ ```
1466
1543
 
1467
- The decorator will create an optional artifact, specified by `var`, which
1468
- contains the exception raised. You can use it to detect the presence
1469
- of errors, indicating that all happy-path artifacts produced by the step
1470
- are missing.
1544
+ Note that `branch` is typically one of:
1545
+ - `prod`
1546
+ - `user.bob`
1547
+ - `test.my_experiment`
1548
+ - `prod.staging`
1471
1549
 
1472
1550
 
1473
1551
  Parameters
1474
1552
  ----------
1475
- var : str, optional, default None
1476
- Name of the artifact in which to store the caught exception.
1477
- If not specified, the exception is not stored.
1478
- print_exception : bool, default True
1479
- Determines whether or not the exception is printed to
1480
- stdout when caught.
1553
+ flow : Union[str, Dict[str, str]], optional, default None
1554
+ Upstream flow dependency for this flow.
1555
+ flows : List[Union[str, Dict[str, str]]], default []
1556
+ Upstream flow dependencies for this flow.
1557
+ options : Dict[str, Any], default {}
1558
+ Backend-specific configuration for tuning eventing behavior.
1481
1559
  """
1482
1560
  ...
1483
1561
 
1484
1562
  @typing.overload
1485
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1486
- ...
1487
-
1488
- @typing.overload
1489
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1563
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1490
1564
  ...
1491
1565
 
1492
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
1566
+ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1493
1567
  """
1494
- Specifies that the step will success under all circumstances.
1568
+ Specifies the flow(s) that this flow depends on.
1495
1569
 
1496
- The decorator will create an optional artifact, specified by `var`, which
1497
- contains the exception raised. You can use it to detect the presence
1498
- of errors, indicating that all happy-path artifacts produced by the step
1499
- are missing.
1570
+ ```
1571
+ @trigger_on_finish(flow='FooFlow')
1572
+ ```
1573
+ or
1574
+ ```
1575
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1576
+ ```
1577
+ This decorator respects the @project decorator and triggers the flow
1578
+ when upstream runs within the same namespace complete successfully
1579
+
1580
+ Additionally, you can specify project aware upstream flow dependencies
1581
+ by specifying the fully qualified project_flow_name.
1582
+ ```
1583
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1584
+ ```
1585
+ or
1586
+ ```
1587
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1588
+ ```
1589
+
1590
+ You can also specify just the project or project branch (other values will be
1591
+ inferred from the current project or project branch):
1592
+ ```
1593
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1594
+ ```
1595
+
1596
+ Note that `branch` is typically one of:
1597
+ - `prod`
1598
+ - `user.bob`
1599
+ - `test.my_experiment`
1600
+ - `prod.staging`
1500
1601
 
1501
1602
 
1502
1603
  Parameters
1503
1604
  ----------
1504
- var : str, optional, default None
1505
- Name of the artifact in which to store the caught exception.
1506
- If not specified, the exception is not stored.
1507
- print_exception : bool, default True
1508
- Determines whether or not the exception is printed to
1509
- stdout when caught.
1605
+ flow : Union[str, Dict[str, str]], optional, default None
1606
+ Upstream flow dependency for this flow.
1607
+ flows : List[Union[str, Dict[str, str]]], default []
1608
+ Upstream flow dependencies for this flow.
1609
+ options : Dict[str, Any], default {}
1610
+ Backend-specific configuration for tuning eventing behavior.
1510
1611
  """
1511
1612
  ...
1512
1613
 
@@ -1624,87 +1725,96 @@ def with_artifact_store(f: typing.Optional[typing.Type[FlowSpecDerived]] = None)
1624
1725
  """
1625
1726
  ...
1626
1727
 
1627
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1728
+ @typing.overload
1729
+ def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1628
1730
  """
1629
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1630
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1731
+ Specifies the event(s) that this flow depends on.
1631
1732
 
1733
+ ```
1734
+ @trigger(event='foo')
1735
+ ```
1736
+ or
1737
+ ```
1738
+ @trigger(events=['foo', 'bar'])
1739
+ ```
1632
1740
 
1633
- Parameters
1634
- ----------
1635
- timeout : int
1636
- Time, in seconds before the task times out and fails. (Default: 3600)
1637
- poke_interval : int
1638
- Time in seconds that the job should wait in between each try. (Default: 60)
1639
- mode : str
1640
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1641
- exponential_backoff : bool
1642
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1643
- pool : str
1644
- the slot pool this task should run in,
1645
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1646
- soft_fail : bool
1647
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1648
- name : str
1649
- Name of the sensor on Airflow
1650
- description : str
1651
- Description of sensor in the Airflow UI
1652
- external_dag_id : str
1653
- The dag_id that contains the task you want to wait for.
1654
- external_task_ids : List[str]
1655
- The list of task_ids that you want to wait for.
1656
- If None (default value) the sensor waits for the DAG. (Default: None)
1657
- allowed_states : List[str]
1658
- Iterable of allowed states, (Default: ['success'])
1659
- failed_states : List[str]
1660
- Iterable of failed or dis-allowed states. (Default: None)
1661
- execution_delta : datetime.timedelta
1662
- time difference with the previous execution to look at,
1663
- the default is the same logical date as the current task or DAG. (Default: None)
1664
- check_existence: bool
1665
- Set to True to check if the external task exists or check if
1666
- the DAG to wait for exists. (Default: True)
1667
- """
1668
- ...
1669
-
1670
- @typing.overload
1671
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1672
- """
1673
- Specifies the PyPI packages for all steps of the flow.
1741
+ Additionally, you can specify the parameter mappings
1742
+ to map event payload to Metaflow parameters for the flow.
1743
+ ```
1744
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1745
+ ```
1746
+ or
1747
+ ```
1748
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1749
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1750
+ ```
1751
+
1752
+ 'parameters' can also be a list of strings and tuples like so:
1753
+ ```
1754
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1755
+ ```
1756
+ This is equivalent to:
1757
+ ```
1758
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1759
+ ```
1674
1760
 
1675
- Use `@pypi_base` to set common packages required by all
1676
- steps and use `@pypi` to specify step-specific overrides.
1677
1761
 
1678
1762
  Parameters
1679
1763
  ----------
1680
- packages : Dict[str, str], default: {}
1681
- Packages to use for this flow. The key is the name of the package
1682
- and the value is the version to use.
1683
- python : str, optional, default: None
1684
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1685
- that the version used will correspond to the version of the Python interpreter used to start the run.
1764
+ event : Union[str, Dict[str, Any]], optional, default None
1765
+ Event dependency for this flow.
1766
+ events : List[Union[str, Dict[str, Any]]], default []
1767
+ Events dependency for this flow.
1768
+ options : Dict[str, Any], default {}
1769
+ Backend-specific configuration for tuning eventing behavior.
1686
1770
  """
1687
1771
  ...
1688
1772
 
1689
1773
  @typing.overload
1690
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1774
+ def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1691
1775
  ...
1692
1776
 
1693
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1777
+ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1694
1778
  """
1695
- Specifies the PyPI packages for all steps of the flow.
1779
+ Specifies the event(s) that this flow depends on.
1780
+
1781
+ ```
1782
+ @trigger(event='foo')
1783
+ ```
1784
+ or
1785
+ ```
1786
+ @trigger(events=['foo', 'bar'])
1787
+ ```
1788
+
1789
+ Additionally, you can specify the parameter mappings
1790
+ to map event payload to Metaflow parameters for the flow.
1791
+ ```
1792
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1793
+ ```
1794
+ or
1795
+ ```
1796
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1797
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1798
+ ```
1799
+
1800
+ 'parameters' can also be a list of strings and tuples like so:
1801
+ ```
1802
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1803
+ ```
1804
+ This is equivalent to:
1805
+ ```
1806
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1807
+ ```
1696
1808
 
1697
- Use `@pypi_base` to set common packages required by all
1698
- steps and use `@pypi` to specify step-specific overrides.
1699
1809
 
1700
1810
  Parameters
1701
1811
  ----------
1702
- packages : Dict[str, str], default: {}
1703
- Packages to use for this flow. The key is the name of the package
1704
- and the value is the version to use.
1705
- python : str, optional, default: None
1706
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1707
- that the version used will correspond to the version of the Python interpreter used to start the run.
1812
+ event : Union[str, Dict[str, Any]], optional, default None
1813
+ Event dependency for this flow.
1814
+ events : List[Union[str, Dict[str, Any]]], default []
1815
+ Events dependency for this flow.
1816
+ options : Dict[str, Any], default {}
1817
+ Backend-specific configuration for tuning eventing behavior.
1708
1818
  """
1709
1819
  ...
1710
1820
 
@@ -1741,48 +1851,13 @@ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, expone
1741
1851
  When it's specified as a full s3:// url, please leave `bucket_name` as None
1742
1852
  bucket_name : str
1743
1853
  Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1744
- When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1745
- wildcard_match : bool
1746
- whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1747
- aws_conn_id : str
1748
- a reference to the s3 connection on Airflow. (Default: None)
1749
- verify : bool
1750
- Whether or not to verify SSL certificates for S3 connection. (Default: None)
1751
- """
1752
- ...
1753
-
1754
- def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1755
- """
1756
- Specifies what flows belong to the same project.
1757
-
1758
- A project-specific namespace is created for all flows that
1759
- use the same `@project(name)`.
1760
-
1761
-
1762
- Parameters
1763
- ----------
1764
- name : str
1765
- Project name. Make sure that the name is unique amongst all
1766
- projects that use the same production scheduler. The name may
1767
- contain only lowercase alphanumeric characters and underscores.
1768
-
1769
- branch : Optional[str], default None
1770
- The branch to use. If not specified, the branch is set to
1771
- `user.<username>` unless `production` is set to `True`. This can
1772
- also be set on the command line using `--branch` as a top-level option.
1773
- It is an error to specify `branch` in the decorator and on the command line.
1774
-
1775
- production : bool, default False
1776
- Whether or not the branch is the production branch. This can also be set on the
1777
- command line using `--production` as a top-level option. It is an error to specify
1778
- `production` in the decorator and on the command line.
1779
- The project branch name will be:
1780
- - if `branch` is specified:
1781
- - if `production` is True: `prod.<branch>`
1782
- - if `production` is False: `test.<branch>`
1783
- - if `branch` is not specified:
1784
- - if `production` is True: `prod`
1785
- - if `production` is False: `user.<username>`
1854
+ When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1855
+ wildcard_match : bool
1856
+ whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1857
+ aws_conn_id : str
1858
+ a reference to the s3 connection on Airflow. (Default: None)
1859
+ verify : bool
1860
+ Whether or not to verify SSL certificates for S3 connection. (Default: None)
1786
1861
  """
1787
1862
  ...
1788
1863
 
@@ -1837,96 +1912,87 @@ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly:
1837
1912
  """
1838
1913
  ...
1839
1914
 
1840
- @typing.overload
1841
- def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1915
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1842
1916
  """
1843
- Specifies the event(s) that this flow depends on.
1844
-
1845
- ```
1846
- @trigger(event='foo')
1847
- ```
1848
- or
1849
- ```
1850
- @trigger(events=['foo', 'bar'])
1851
- ```
1917
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1918
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1852
1919
 
1853
- Additionally, you can specify the parameter mappings
1854
- to map event payload to Metaflow parameters for the flow.
1855
- ```
1856
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1857
- ```
1858
- or
1859
- ```
1860
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1861
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1862
- ```
1863
1920
 
1864
- 'parameters' can also be a list of strings and tuples like so:
1865
- ```
1866
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1867
- ```
1868
- This is equivalent to:
1869
- ```
1870
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1871
- ```
1921
+ Parameters
1922
+ ----------
1923
+ timeout : int
1924
+ Time, in seconds before the task times out and fails. (Default: 3600)
1925
+ poke_interval : int
1926
+ Time in seconds that the job should wait in between each try. (Default: 60)
1927
+ mode : str
1928
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1929
+ exponential_backoff : bool
1930
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1931
+ pool : str
1932
+ the slot pool this task should run in,
1933
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1934
+ soft_fail : bool
1935
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1936
+ name : str
1937
+ Name of the sensor on Airflow
1938
+ description : str
1939
+ Description of sensor in the Airflow UI
1940
+ external_dag_id : str
1941
+ The dag_id that contains the task you want to wait for.
1942
+ external_task_ids : List[str]
1943
+ The list of task_ids that you want to wait for.
1944
+ If None (default value) the sensor waits for the DAG. (Default: None)
1945
+ allowed_states : List[str]
1946
+ Iterable of allowed states, (Default: ['success'])
1947
+ failed_states : List[str]
1948
+ Iterable of failed or dis-allowed states. (Default: None)
1949
+ execution_delta : datetime.timedelta
1950
+ time difference with the previous execution to look at,
1951
+ the default is the same logical date as the current task or DAG. (Default: None)
1952
+ check_existence: bool
1953
+ Set to True to check if the external task exists or check if
1954
+ the DAG to wait for exists. (Default: True)
1955
+ """
1956
+ ...
1957
+
1958
+ @typing.overload
1959
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1960
+ """
1961
+ Specifies the PyPI packages for all steps of the flow.
1872
1962
 
1963
+ Use `@pypi_base` to set common packages required by all
1964
+ steps and use `@pypi` to specify step-specific overrides.
1873
1965
 
1874
1966
  Parameters
1875
1967
  ----------
1876
- event : Union[str, Dict[str, Any]], optional, default None
1877
- Event dependency for this flow.
1878
- events : List[Union[str, Dict[str, Any]]], default []
1879
- Events dependency for this flow.
1880
- options : Dict[str, Any], default {}
1881
- Backend-specific configuration for tuning eventing behavior.
1968
+ packages : Dict[str, str], default: {}
1969
+ Packages to use for this flow. The key is the name of the package
1970
+ and the value is the version to use.
1971
+ python : str, optional, default: None
1972
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1973
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1882
1974
  """
1883
1975
  ...
1884
1976
 
1885
1977
  @typing.overload
1886
- def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1978
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1887
1979
  ...
1888
1980
 
1889
- def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1981
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1890
1982
  """
1891
- Specifies the event(s) that this flow depends on.
1892
-
1893
- ```
1894
- @trigger(event='foo')
1895
- ```
1896
- or
1897
- ```
1898
- @trigger(events=['foo', 'bar'])
1899
- ```
1900
-
1901
- Additionally, you can specify the parameter mappings
1902
- to map event payload to Metaflow parameters for the flow.
1903
- ```
1904
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1905
- ```
1906
- or
1907
- ```
1908
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1909
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1910
- ```
1911
-
1912
- 'parameters' can also be a list of strings and tuples like so:
1913
- ```
1914
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1915
- ```
1916
- This is equivalent to:
1917
- ```
1918
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1919
- ```
1983
+ Specifies the PyPI packages for all steps of the flow.
1920
1984
 
1985
+ Use `@pypi_base` to set common packages required by all
1986
+ steps and use `@pypi` to specify step-specific overrides.
1921
1987
 
1922
1988
  Parameters
1923
1989
  ----------
1924
- event : Union[str, Dict[str, Any]], optional, default None
1925
- Event dependency for this flow.
1926
- events : List[Union[str, Dict[str, Any]]], default []
1927
- Events dependency for this flow.
1928
- options : Dict[str, Any], default {}
1929
- Backend-specific configuration for tuning eventing behavior.
1990
+ packages : Dict[str, str], default: {}
1991
+ Packages to use for this flow. The key is the name of the package
1992
+ and the value is the version to use.
1993
+ python : str, optional, default: None
1994
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1995
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1930
1996
  """
1931
1997
  ...
1932
1998
 
@@ -1981,104 +2047,38 @@ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packa
1981
2047
  """
1982
2048
  ...
1983
2049
 
1984
- @typing.overload
1985
- def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
2050
+ def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1986
2051
  """
1987
- Specifies the flow(s) that this flow depends on.
1988
-
1989
- ```
1990
- @trigger_on_finish(flow='FooFlow')
1991
- ```
1992
- or
1993
- ```
1994
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1995
- ```
1996
- This decorator respects the @project decorator and triggers the flow
1997
- when upstream runs within the same namespace complete successfully
1998
-
1999
- Additionally, you can specify project aware upstream flow dependencies
2000
- by specifying the fully qualified project_flow_name.
2001
- ```
2002
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
2003
- ```
2004
- or
2005
- ```
2006
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
2007
- ```
2008
-
2009
- You can also specify just the project or project branch (other values will be
2010
- inferred from the current project or project branch):
2011
- ```
2012
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
2013
- ```
2052
+ Specifies what flows belong to the same project.
2014
2053
 
2015
- Note that `branch` is typically one of:
2016
- - `prod`
2017
- - `user.bob`
2018
- - `test.my_experiment`
2019
- - `prod.staging`
2054
+ A project-specific namespace is created for all flows that
2055
+ use the same `@project(name)`.
2020
2056
 
2021
2057
 
2022
2058
  Parameters
2023
2059
  ----------
2024
- flow : Union[str, Dict[str, str]], optional, default None
2025
- Upstream flow dependency for this flow.
2026
- flows : List[Union[str, Dict[str, str]]], default []
2027
- Upstream flow dependencies for this flow.
2028
- options : Dict[str, Any], default {}
2029
- Backend-specific configuration for tuning eventing behavior.
2030
- """
2031
- ...
2032
-
2033
- @typing.overload
2034
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
2035
- ...
2036
-
2037
- def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
2038
- """
2039
- Specifies the flow(s) that this flow depends on.
2040
-
2041
- ```
2042
- @trigger_on_finish(flow='FooFlow')
2043
- ```
2044
- or
2045
- ```
2046
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
2047
- ```
2048
- This decorator respects the @project decorator and triggers the flow
2049
- when upstream runs within the same namespace complete successfully
2050
-
2051
- Additionally, you can specify project aware upstream flow dependencies
2052
- by specifying the fully qualified project_flow_name.
2053
- ```
2054
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
2055
- ```
2056
- or
2057
- ```
2058
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
2059
- ```
2060
-
2061
- You can also specify just the project or project branch (other values will be
2062
- inferred from the current project or project branch):
2063
- ```
2064
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
2065
- ```
2066
-
2067
- Note that `branch` is typically one of:
2068
- - `prod`
2069
- - `user.bob`
2070
- - `test.my_experiment`
2071
- - `prod.staging`
2060
+ name : str
2061
+ Project name. Make sure that the name is unique amongst all
2062
+ projects that use the same production scheduler. The name may
2063
+ contain only lowercase alphanumeric characters and underscores.
2072
2064
 
2065
+ branch : Optional[str], default None
2066
+ The branch to use. If not specified, the branch is set to
2067
+ `user.<username>` unless `production` is set to `True`. This can
2068
+ also be set on the command line using `--branch` as a top-level option.
2069
+ It is an error to specify `branch` in the decorator and on the command line.
2073
2070
 
2074
- Parameters
2075
- ----------
2076
- flow : Union[str, Dict[str, str]], optional, default None
2077
- Upstream flow dependency for this flow.
2078
- flows : List[Union[str, Dict[str, str]]], default []
2079
- Upstream flow dependencies for this flow.
2080
- options : Dict[str, Any], default {}
2081
- Backend-specific configuration for tuning eventing behavior.
2071
+ production : bool, default False
2072
+ Whether or not the branch is the production branch. This can also be set on the
2073
+ command line using `--production` as a top-level option. It is an error to specify
2074
+ `production` in the decorator and on the command line.
2075
+ The project branch name will be:
2076
+ - if `branch` is specified:
2077
+ - if `production` is True: `prod.<branch>`
2078
+ - if `production` is False: `test.<branch>`
2079
+ - if `branch` is not specified:
2080
+ - if `production` is True: `prod`
2081
+ - if `production` is False: `user.<username>`
2082
2082
  """
2083
2083
  ...
2084
2084