ob-metaflow-stubs 6.0.9.1__py2.py3-none-any.whl → 6.0.9.3__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ob-metaflow-stubs might be problematic. Click here for more details.

Files changed (262) hide show
  1. metaflow-stubs/__init__.pyi +967 -967
  2. metaflow-stubs/cards.pyi +2 -2
  3. metaflow-stubs/cli.pyi +2 -2
  4. metaflow-stubs/cli_components/__init__.pyi +2 -2
  5. metaflow-stubs/cli_components/utils.pyi +2 -2
  6. metaflow-stubs/client/__init__.pyi +2 -2
  7. metaflow-stubs/client/core.pyi +6 -6
  8. metaflow-stubs/client/filecache.pyi +2 -2
  9. metaflow-stubs/events.pyi +2 -2
  10. metaflow-stubs/exception.pyi +2 -2
  11. metaflow-stubs/flowspec.pyi +5 -5
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +4 -4
  14. metaflow-stubs/meta_files.pyi +2 -2
  15. metaflow-stubs/metadata_provider/__init__.pyi +2 -2
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +2 -2
  17. metaflow-stubs/metadata_provider/metadata.pyi +2 -2
  18. metaflow-stubs/metadata_provider/util.pyi +2 -2
  19. metaflow-stubs/metaflow_config.pyi +4 -2
  20. metaflow-stubs/metaflow_current.pyi +38 -38
  21. metaflow-stubs/metaflow_git.pyi +2 -2
  22. metaflow-stubs/mf_extensions/__init__.pyi +2 -2
  23. metaflow-stubs/mf_extensions/obcheckpoint/__init__.pyi +2 -2
  24. metaflow-stubs/mf_extensions/obcheckpoint/plugins/__init__.pyi +2 -2
  25. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/__init__.pyi +2 -2
  26. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/__init__.pyi +2 -2
  27. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/async_cards.pyi +2 -2
  28. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/deco_injection_mixin.pyi +2 -2
  29. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/extra_components.pyi +2 -2
  30. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/__init__.pyi +2 -2
  31. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/__init__.pyi +2 -2
  32. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/checkpoint_lister.pyi +4 -4
  33. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/lineage_card.pyi +2 -2
  34. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/checkpoint_storage.pyi +2 -2
  35. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/constructors.pyi +2 -2
  36. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/core.pyi +3 -3
  37. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/decorator.pyi +4 -4
  38. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/exceptions.pyi +2 -2
  39. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/final_api.pyi +3 -3
  40. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/lineage.pyi +2 -2
  41. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/__init__.pyi +2 -2
  42. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/context.pyi +3 -3
  43. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/core.pyi +3 -3
  44. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/decorator.pyi +2 -2
  45. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/exceptions.pyi +2 -2
  46. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/task_utils.pyi +2 -2
  47. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/utils.pyi +2 -2
  48. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastructures.pyi +2 -2
  49. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/exceptions.pyi +2 -2
  50. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/__init__.pyi +2 -2
  51. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/decorator.pyi +2 -2
  52. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/__init__.pyi +2 -2
  53. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/core.pyi +2 -2
  54. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/exceptions.pyi +2 -2
  55. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/model_storage.pyi +3 -3
  56. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/__init__.pyi +2 -2
  57. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/flowspec_utils.pyi +2 -2
  58. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/general.pyi +2 -2
  59. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/identity_utils.pyi +2 -2
  60. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/__init__.pyi +2 -2
  61. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/base.pyi +2 -2
  62. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/tar.pyi +2 -2
  63. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/tar_utils.pyi +2 -2
  64. metaflow-stubs/mf_extensions/outerbounds/__init__.pyi +2 -2
  65. metaflow-stubs/mf_extensions/outerbounds/plugins/__init__.pyi +2 -2
  66. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/__init__.pyi +2 -2
  67. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/__init__.pyi +2 -2
  68. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_state_machine.pyi +2 -2
  69. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/__init__.pyi +2 -2
  70. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/spinner/__init__.pyi +2 -2
  71. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/spinner/spinners.pyi +2 -2
  72. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/app_cli.pyi +2 -2
  73. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/app_config.pyi +2 -2
  74. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/capsule.pyi +4 -4
  75. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/click_importer.pyi +2 -2
  76. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/code_package/__init__.pyi +2 -2
  77. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/code_package/code_packager.pyi +3 -3
  78. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/__init__.pyi +2 -2
  79. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/cli_generator.pyi +2 -2
  80. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/config_utils.pyi +3 -3
  81. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/schema_export.pyi +2 -2
  82. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/typed_configs.pyi +2 -2
  83. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/unified_config.pyi +3 -3
  84. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/dependencies.pyi +3 -3
  85. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/deployer.pyi +5 -5
  86. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/experimental/__init__.pyi +2 -2
  87. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/perimeters.pyi +2 -2
  88. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/utils.pyi +3 -3
  89. metaflow-stubs/mf_extensions/outerbounds/plugins/aws/__init__.pyi +2 -2
  90. metaflow-stubs/mf_extensions/outerbounds/plugins/aws/assume_role_decorator.pyi +2 -2
  91. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/__init__.pyi +2 -2
  92. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/async_cards.pyi +3 -3
  93. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/injector.pyi +2 -2
  94. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/__init__.pyi +2 -2
  95. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/coreweave.pyi +3 -3
  96. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/nebius.pyi +3 -3
  97. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/__init__.pyi +2 -2
  98. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/baker.pyi +3 -3
  99. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/docker_environment.pyi +2 -2
  100. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/fast_bakery.pyi +2 -2
  101. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/__init__.pyi +2 -2
  102. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/pod_killer.pyi +2 -2
  103. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/__init__.pyi +2 -2
  104. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/constants.pyi +2 -2
  105. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/exceptions.pyi +2 -2
  106. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/ollama.pyi +2 -2
  107. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/status_card.pyi +2 -2
  108. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/__init__.pyi +2 -2
  109. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/snowflake.pyi +2 -2
  110. metaflow-stubs/mf_extensions/outerbounds/profilers/__init__.pyi +2 -2
  111. metaflow-stubs/mf_extensions/outerbounds/profilers/gpu.pyi +2 -2
  112. metaflow-stubs/mf_extensions/outerbounds/remote_config.pyi +2 -2
  113. metaflow-stubs/mf_extensions/outerbounds/toplevel/__init__.pyi +2 -2
  114. metaflow-stubs/mf_extensions/outerbounds/toplevel/global_aliases_for_metaflow_package.pyi +2 -2
  115. metaflow-stubs/mf_extensions/outerbounds/toplevel/s3_proxy.pyi +2 -2
  116. metaflow-stubs/multicore_utils.pyi +2 -2
  117. metaflow-stubs/ob_internal.pyi +2 -2
  118. metaflow-stubs/packaging_sys/__init__.pyi +4 -4
  119. metaflow-stubs/packaging_sys/backend.pyi +4 -4
  120. metaflow-stubs/packaging_sys/distribution_support.pyi +4 -4
  121. metaflow-stubs/packaging_sys/tar_backend.pyi +6 -6
  122. metaflow-stubs/packaging_sys/utils.pyi +2 -2
  123. metaflow-stubs/packaging_sys/v1.pyi +3 -3
  124. metaflow-stubs/parameters.pyi +3 -3
  125. metaflow-stubs/plugins/__init__.pyi +11 -11
  126. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  127. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  128. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  129. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  130. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
  131. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
  132. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
  133. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  134. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  135. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  136. metaflow-stubs/plugins/argo/argo_workflows.pyi +2 -2
  137. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +3 -3
  138. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +4 -4
  139. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +3 -3
  140. metaflow-stubs/plugins/argo/exit_hooks.pyi +2 -2
  141. metaflow-stubs/plugins/aws/__init__.pyi +2 -2
  142. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  143. metaflow-stubs/plugins/aws/aws_utils.pyi +5 -2
  144. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  145. metaflow-stubs/plugins/aws/batch/batch.pyi +4 -4
  146. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  147. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +8 -2
  148. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  149. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +4 -4
  150. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  151. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  152. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  153. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +3 -3
  154. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  155. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +4 -4
  156. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +2 -2
  157. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  158. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  159. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  160. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +4 -4
  161. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  162. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  163. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  164. metaflow-stubs/plugins/cards/__init__.pyi +6 -6
  165. metaflow-stubs/plugins/cards/card_client.pyi +2 -2
  166. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  167. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  168. metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
  169. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  170. metaflow-stubs/plugins/cards/card_modules/basic.pyi +3 -3
  171. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  172. metaflow-stubs/plugins/cards/card_modules/components.pyi +3 -3
  173. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  174. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  175. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  176. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  177. metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
  178. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  179. metaflow-stubs/plugins/catch_decorator.pyi +2 -2
  180. metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
  181. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  182. metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
  183. metaflow-stubs/plugins/datatools/s3/s3.pyi +2 -2
  184. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  185. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  186. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  187. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  188. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  189. metaflow-stubs/plugins/events_decorator.pyi +2 -2
  190. metaflow-stubs/plugins/exit_hook/__init__.pyi +2 -2
  191. metaflow-stubs/plugins/exit_hook/exit_hook_decorator.pyi +2 -2
  192. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  193. metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
  194. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  195. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +4 -4
  196. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  197. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  198. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  199. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  200. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  201. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +2 -2
  202. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
  203. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  204. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
  205. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
  206. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +2 -2
  207. metaflow-stubs/plugins/ollama/__init__.pyi +3 -3
  208. metaflow-stubs/plugins/optuna/__init__.pyi +2 -2
  209. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  210. metaflow-stubs/plugins/perimeters.pyi +2 -2
  211. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  212. metaflow-stubs/plugins/pypi/__init__.pyi +3 -3
  213. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  214. metaflow-stubs/plugins/pypi/conda_environment.pyi +3 -3
  215. metaflow-stubs/plugins/pypi/parsers.pyi +2 -2
  216. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  217. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  218. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  219. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  220. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  221. metaflow-stubs/plugins/secrets/__init__.pyi +3 -3
  222. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +4 -4
  223. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  224. metaflow-stubs/plugins/secrets/secrets_func.pyi +2 -2
  225. metaflow-stubs/plugins/secrets/secrets_spec.pyi +2 -2
  226. metaflow-stubs/plugins/secrets/utils.pyi +2 -2
  227. metaflow-stubs/plugins/snowflake/__init__.pyi +2 -2
  228. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  229. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +2 -2
  230. metaflow-stubs/plugins/timeout_decorator.pyi +2 -2
  231. metaflow-stubs/plugins/torchtune/__init__.pyi +2 -2
  232. metaflow-stubs/plugins/uv/__init__.pyi +2 -2
  233. metaflow-stubs/plugins/uv/uv_environment.pyi +2 -2
  234. metaflow-stubs/profilers/__init__.pyi +2 -2
  235. metaflow-stubs/pylint_wrapper.pyi +2 -2
  236. metaflow-stubs/runner/__init__.pyi +2 -2
  237. metaflow-stubs/runner/deployer.pyi +33 -33
  238. metaflow-stubs/runner/deployer_impl.pyi +3 -3
  239. metaflow-stubs/runner/metaflow_runner.pyi +3 -3
  240. metaflow-stubs/runner/nbdeploy.pyi +2 -2
  241. metaflow-stubs/runner/nbrun.pyi +2 -2
  242. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  243. metaflow-stubs/runner/utils.pyi +3 -3
  244. metaflow-stubs/system/__init__.pyi +2 -2
  245. metaflow-stubs/system/system_logger.pyi +3 -3
  246. metaflow-stubs/system/system_monitor.pyi +2 -2
  247. metaflow-stubs/tagging_util.pyi +2 -2
  248. metaflow-stubs/tuple_util.pyi +2 -2
  249. metaflow-stubs/user_configs/__init__.pyi +2 -2
  250. metaflow-stubs/user_configs/config_options.pyi +2 -2
  251. metaflow-stubs/user_configs/config_parameters.pyi +4 -4
  252. metaflow-stubs/user_decorators/__init__.pyi +2 -2
  253. metaflow-stubs/user_decorators/common.pyi +2 -2
  254. metaflow-stubs/user_decorators/mutable_flow.pyi +5 -5
  255. metaflow-stubs/user_decorators/mutable_step.pyi +4 -4
  256. metaflow-stubs/user_decorators/user_flow_decorator.pyi +4 -4
  257. metaflow-stubs/user_decorators/user_step_decorator.pyi +5 -5
  258. {ob_metaflow_stubs-6.0.9.1.dist-info → ob_metaflow_stubs-6.0.9.3.dist-info}/METADATA +1 -1
  259. ob_metaflow_stubs-6.0.9.3.dist-info/RECORD +262 -0
  260. ob_metaflow_stubs-6.0.9.1.dist-info/RECORD +0 -262
  261. {ob_metaflow_stubs-6.0.9.1.dist-info → ob_metaflow_stubs-6.0.9.3.dist-info}/WHEEL +0 -0
  262. {ob_metaflow_stubs-6.0.9.1.dist-info → ob_metaflow_stubs-6.0.9.3.dist-info}/top_level.txt +0 -0
@@ -1,7 +1,7 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.18.0.1+obcheckpoint(0.2.4);ob(v1) #
4
- # Generated on 2025-08-28T00:53:38.278497 #
3
+ # MF version: 2.18.1.1+obcheckpoint(0.2.4);ob(v1) #
4
+ # Generated on 2025-09-02T19:19:25.341768 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
@@ -40,9 +40,9 @@ from .user_decorators.user_step_decorator import StepMutator as StepMutator
40
40
  from .user_decorators.user_step_decorator import user_step_decorator as user_step_decorator
41
41
  from .user_decorators.user_flow_decorator import FlowMutator as FlowMutator
42
42
  from . import metaflow_git as metaflow_git
43
+ from . import events as events
43
44
  from . import cards as cards
44
45
  from . import tuple_util as tuple_util
45
- from . import events as events
46
46
  from . import runner as runner
47
47
  from . import plugins as plugins
48
48
  from .mf_extensions.outerbounds.toplevel.global_aliases_for_metaflow_package import S3 as S3
@@ -167,100 +167,6 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
167
167
  """
168
168
  ...
169
169
 
170
- @typing.overload
171
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
172
- """
173
- Specifies the PyPI packages for the step.
174
-
175
- Information in this decorator will augment any
176
- attributes set in the `@pyi_base` flow-level decorator. Hence,
177
- you can use `@pypi_base` to set packages required by all
178
- steps and use `@pypi` to specify step-specific overrides.
179
-
180
-
181
- Parameters
182
- ----------
183
- packages : Dict[str, str], default: {}
184
- Packages to use for this step. The key is the name of the package
185
- and the value is the version to use.
186
- python : str, optional, default: None
187
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
188
- that the version used will correspond to the version of the Python interpreter used to start the run.
189
- """
190
- ...
191
-
192
- @typing.overload
193
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
194
- ...
195
-
196
- @typing.overload
197
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
198
- ...
199
-
200
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
201
- """
202
- Specifies the PyPI packages for the step.
203
-
204
- Information in this decorator will augment any
205
- attributes set in the `@pyi_base` flow-level decorator. Hence,
206
- you can use `@pypi_base` to set packages required by all
207
- steps and use `@pypi` to specify step-specific overrides.
208
-
209
-
210
- Parameters
211
- ----------
212
- packages : Dict[str, str], default: {}
213
- Packages to use for this step. The key is the name of the package
214
- and the value is the version to use.
215
- python : str, optional, default: None
216
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
217
- that the version used will correspond to the version of the Python interpreter used to start the run.
218
- """
219
- ...
220
-
221
- def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy: str, force_cache_update: bool, debug: bool, circuit_breaker_config: dict, timeout_config: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
222
- """
223
- This decorator is used to run Ollama APIs as Metaflow task sidecars.
224
-
225
- User code call
226
- --------------
227
- @ollama(
228
- models=[...],
229
- ...
230
- )
231
-
232
- Valid backend options
233
- ---------------------
234
- - 'local': Run as a separate process on the local task machine.
235
- - (TODO) 'managed': Outerbounds hosts and selects compute provider.
236
- - (TODO) 'remote': Spin up separate instance to serve Ollama models.
237
-
238
- Valid model options
239
- -------------------
240
- Any model here https://ollama.com/search, e.g. 'llama3.2', 'llama3.3'
241
-
242
-
243
- Parameters
244
- ----------
245
- models: list[str]
246
- List of Ollama containers running models in sidecars.
247
- backend: str
248
- Determines where and how to run the Ollama process.
249
- force_pull: bool
250
- Whether to run `ollama pull` no matter what, or first check the remote cache in Metaflow datastore for this model key.
251
- cache_update_policy: str
252
- Cache update policy: "auto", "force", or "never".
253
- force_cache_update: bool
254
- Simple override for "force" cache update policy.
255
- debug: bool
256
- Whether to turn on verbose debugging logs.
257
- circuit_breaker_config: dict
258
- Configuration for circuit breaker protection. Keys: failure_threshold, recovery_timeout, reset_timeout.
259
- timeout_config: dict
260
- Configuration for various operation timeouts. Keys: pull, stop, health_check, install, server_startup.
261
- """
262
- ...
263
-
264
170
  @typing.overload
265
171
  def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
266
172
  """
@@ -321,193 +227,119 @@ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
321
227
  ...
322
228
 
323
229
  @typing.overload
324
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
230
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
325
231
  """
326
- Specifies environment variables to be set prior to the execution of a step.
232
+ Internal decorator to support Fast bakery
233
+ """
234
+ ...
235
+
236
+ @typing.overload
237
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
238
+ ...
239
+
240
+ def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
241
+ """
242
+ Internal decorator to support Fast bakery
243
+ """
244
+ ...
245
+
246
+ @typing.overload
247
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
248
+ """
249
+ Specifies that the step will success under all circumstances.
250
+
251
+ The decorator will create an optional artifact, specified by `var`, which
252
+ contains the exception raised. You can use it to detect the presence
253
+ of errors, indicating that all happy-path artifacts produced by the step
254
+ are missing.
327
255
 
328
256
 
329
257
  Parameters
330
258
  ----------
331
- vars : Dict[str, str], default {}
332
- Dictionary of environment variables to set.
259
+ var : str, optional, default None
260
+ Name of the artifact in which to store the caught exception.
261
+ If not specified, the exception is not stored.
262
+ print_exception : bool, default True
263
+ Determines whether or not the exception is printed to
264
+ stdout when caught.
333
265
  """
334
266
  ...
335
267
 
336
268
  @typing.overload
337
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
269
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
338
270
  ...
339
271
 
340
272
  @typing.overload
341
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
273
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
342
274
  ...
343
275
 
344
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
276
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
345
277
  """
346
- Specifies environment variables to be set prior to the execution of a step.
278
+ Specifies that the step will success under all circumstances.
279
+
280
+ The decorator will create an optional artifact, specified by `var`, which
281
+ contains the exception raised. You can use it to detect the presence
282
+ of errors, indicating that all happy-path artifacts produced by the step
283
+ are missing.
347
284
 
348
285
 
349
286
  Parameters
350
287
  ----------
351
- vars : Dict[str, str], default {}
352
- Dictionary of environment variables to set.
288
+ var : str, optional, default None
289
+ Name of the artifact in which to store the caught exception.
290
+ If not specified, the exception is not stored.
291
+ print_exception : bool, default True
292
+ Determines whether or not the exception is printed to
293
+ stdout when caught.
353
294
  """
354
295
  ...
355
296
 
356
- def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
297
+ @typing.overload
298
+ def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
357
299
  """
358
- Decorator that helps cache, version and store models/datasets from huggingface hub.
359
-
360
- > Examples
361
-
362
- **Usage: creating references of models from huggingface that may be loaded in downstream steps**
363
- ```python
364
- @huggingface_hub
365
- @step
366
- def pull_model_from_huggingface(self):
367
- # `current.huggingface_hub.snapshot_download` downloads the model from the Hugging Face Hub
368
- # and saves it in the backend storage based on the model's `repo_id`. If there exists a model
369
- # with the same `repo_id` in the backend storage, it will not download the model again. The return
370
- # value of the function is a reference to the model in the backend storage.
371
- # This reference can be used to load the model in the subsequent steps via `@model(load=["llama_model"])`
372
-
373
- self.model_id = "mistralai/Mistral-7B-Instruct-v0.1"
374
- self.llama_model = current.huggingface_hub.snapshot_download(
375
- repo_id=self.model_id,
376
- allow_patterns=["*.safetensors", "*.json", "tokenizer.*"],
377
- )
378
- self.next(self.train)
379
- ```
380
-
381
- **Usage: loading models directly from huggingface hub or from cache (from metaflow's datastore)**
382
- ```python
383
- @huggingface_hub(load=["mistralai/Mistral-7B-Instruct-v0.1"])
384
- @step
385
- def pull_model_from_huggingface(self):
386
- path_to_model = current.huggingface_hub.loaded["mistralai/Mistral-7B-Instruct-v0.1"]
387
- ```
388
-
389
- ```python
390
- @huggingface_hub(load=[("mistralai/Mistral-7B-Instruct-v0.1", "/my-directory"), ("myorg/mistral-lora, "/my-lora-directory")])
391
- @step
392
- def finetune_model(self):
393
- path_to_model = current.huggingface_hub.loaded["mistralai/Mistral-7B-Instruct-v0.1"]
394
- # path_to_model will be /my-directory
395
- ```
300
+ Creates a human-readable report, a Metaflow Card, after this step completes.
396
301
 
397
- ```python
398
- # Takes all the arguments passed to `snapshot_download`
399
- # except for `local_dir`
400
- @huggingface_hub(load=[
401
- {
402
- "repo_id": "mistralai/Mistral-7B-Instruct-v0.1",
403
- },
404
- {
405
- "repo_id": "myorg/mistral-lora",
406
- "repo_type": "model",
407
- },
408
- ])
409
- @step
410
- def finetune_model(self):
411
- path_to_model = current.huggingface_hub.loaded["mistralai/Mistral-7B-Instruct-v0.1"]
412
- # path_to_model will be /my-directory
413
- ```
302
+ Note that you may add multiple `@card` decorators in a step with different parameters.
414
303
 
415
304
 
416
305
  Parameters
417
306
  ----------
418
- temp_dir_root : str, optional
419
- The root directory that will hold the temporary directory where objects will be downloaded.
420
-
421
- load: Union[List[str], List[Tuple[Dict, str]], List[Tuple[str, str]], List[Dict], None]
422
- The list of repos (models/datasets) to load.
423
-
424
- Loaded repos can be accessed via `current.huggingface_hub.loaded`. If load is set, then the following happens:
425
-
426
- - If repo (model/dataset) is not found in the datastore:
427
- - Downloads the repo from Hugging Face Hub to a temporary directory (or uses specified path) for local access
428
- - Stores it in Metaflow's datastore (s3/gcs/azure etc.) with a unique name based on repo_type/repo_id
429
- - All HF models loaded for a `@step` will be cached separately under flow/step/namespace.
430
-
431
- - If repo is found in the datastore:
432
- - Loads it directly from datastore to local path (can be temporary directory or specified path)
307
+ type : str, default 'default'
308
+ Card type.
309
+ id : str, optional, default None
310
+ If multiple cards are present, use this id to identify this card.
311
+ options : Dict[str, Any], default {}
312
+ Options passed to the card. The contents depend on the card type.
313
+ timeout : int, default 45
314
+ Interrupt reporting if it takes more than this many seconds.
433
315
  """
434
316
  ...
435
317
 
436
318
  @typing.overload
437
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
438
- """
439
- Internal decorator to support Fast bakery
440
- """
319
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
441
320
  ...
442
321
 
443
322
  @typing.overload
444
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
445
- ...
446
-
447
- def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
448
- """
449
- Internal decorator to support Fast bakery
450
- """
323
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
451
324
  ...
452
325
 
453
- def s3_proxy(*, integration_name: typing.Optional[str] = None, write_mode: typing.Optional[str] = None, debug: typing.Optional[bool] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
326
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
454
327
  """
455
- S3 Proxy decorator for routing S3 requests through a local proxy service.
328
+ Creates a human-readable report, a Metaflow Card, after this step completes.
329
+
330
+ Note that you may add multiple `@card` decorators in a step with different parameters.
456
331
 
457
332
 
458
333
  Parameters
459
334
  ----------
460
- integration_name : str, optional
461
- Name of the S3 proxy integration. If not specified, will use the only
462
- available S3 proxy integration in the namespace (fails if multiple exist).
463
- write_mode : str, optional
464
- The desired behavior during write operations to target (origin) S3 bucket.
465
- allowed options are:
466
- "origin-and-cache" -> write to both the target S3 bucket and local object
467
- storage
468
- "origin" -> only write to the target S3 bucket
469
- "cache" -> only write to the object storage service used for caching
470
- debug : bool, optional
471
- Enable debug logging for proxy operations.
472
- """
473
- ...
474
-
475
- @typing.overload
476
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
477
- """
478
- Specifies secrets to be retrieved and injected as environment variables prior to
479
- the execution of a step.
480
-
481
-
482
- Parameters
483
- ----------
484
- sources : List[Union[str, Dict[str, Any]]], default: []
485
- List of secret specs, defining how the secrets are to be retrieved
486
- role : str, optional, default: None
487
- Role to use for fetching secrets
488
- """
489
- ...
490
-
491
- @typing.overload
492
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
493
- ...
494
-
495
- @typing.overload
496
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
497
- ...
498
-
499
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
500
- """
501
- Specifies secrets to be retrieved and injected as environment variables prior to
502
- the execution of a step.
503
-
504
-
505
- Parameters
506
- ----------
507
- sources : List[Union[str, Dict[str, Any]]], default: []
508
- List of secret specs, defining how the secrets are to be retrieved
509
- role : str, optional, default: None
510
- Role to use for fetching secrets
335
+ type : str, default 'default'
336
+ Card type.
337
+ id : str, optional, default None
338
+ If multiple cards are present, use this id to identify this card.
339
+ options : Dict[str, Any], default {}
340
+ Options passed to the card. The contents depend on the card type.
341
+ timeout : int, default 45
342
+ Interrupt reporting if it takes more than this many seconds.
511
343
  """
512
344
  ...
513
345
 
@@ -566,160 +398,66 @@ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
566
398
  """
567
399
  ...
568
400
 
569
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[typing.Dict[str, str]] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
401
+ def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
570
402
  """
571
- Specifies that this step should execute on Kubernetes.
403
+ Specifies that this step should execute on DGX cloud.
572
404
 
573
405
 
574
406
  Parameters
575
407
  ----------
576
- cpu : int, default 1
577
- Number of CPUs required for this step. If `@resources` is
578
- also present, the maximum value from all decorators is used.
579
- memory : int, default 4096
580
- Memory size (in MB) required for this step. If
581
- `@resources` is also present, the maximum value from all decorators is
582
- used.
583
- disk : int, default 10240
584
- Disk size (in MB) required for this step. If
585
- `@resources` is also present, the maximum value from all decorators is
586
- used.
587
- image : str, optional, default None
588
- Docker image to use when launching on Kubernetes. If not specified, and
589
- METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
590
- not, a default Docker image mapping to the current version of Python is used.
591
- image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
592
- If given, the imagePullPolicy to be applied to the Docker image of the step.
593
- image_pull_secrets: List[str], default []
594
- The default is extracted from METAFLOW_KUBERNETES_IMAGE_PULL_SECRETS.
595
- Kubernetes image pull secrets to use when pulling container images
596
- in Kubernetes.
597
- service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
598
- Kubernetes service account to use when launching pod in Kubernetes.
599
- secrets : List[str], optional, default None
600
- Kubernetes secrets to use when launching pod in Kubernetes. These
601
- secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
602
- in Metaflow configuration.
603
- node_selector: Union[Dict[str,str], str], optional, default None
604
- Kubernetes node selector(s) to apply to the pod running the task.
605
- Can be passed in as a comma separated string of values e.g.
606
- 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
607
- {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
608
- namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
609
- Kubernetes namespace to use when launching pod in Kubernetes.
610
- gpu : int, optional, default None
611
- Number of GPUs required for this step. A value of zero implies that
612
- the scheduled node should not have GPUs.
613
- gpu_vendor : str, default KUBERNETES_GPU_VENDOR
614
- The vendor of the GPUs to be used for this step.
615
- tolerations : List[Dict[str,str]], default []
616
- The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
617
- Kubernetes tolerations to use when launching pod in Kubernetes.
618
- labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
619
- Kubernetes labels to use when launching pod in Kubernetes.
620
- annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
621
- Kubernetes annotations to use when launching pod in Kubernetes.
622
- use_tmpfs : bool, default False
623
- This enables an explicit tmpfs mount for this step.
624
- tmpfs_tempdir : bool, default True
625
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
626
- tmpfs_size : int, optional, default: None
627
- The value for the size (in MiB) of the tmpfs mount for this step.
628
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
629
- memory allocated for this step.
630
- tmpfs_path : str, optional, default /metaflow_temp
631
- Path to tmpfs mount for this step.
632
- persistent_volume_claims : Dict[str, str], optional, default None
633
- A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
634
- volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
635
- shared_memory: int, optional
636
- Shared memory size (in MiB) required for this step
637
- port: int, optional
638
- Port number to specify in the Kubernetes job object
639
- compute_pool : str, optional, default None
640
- Compute pool to be used for for this step.
641
- If not specified, any accessible compute pool within the perimeter is used.
642
- hostname_resolution_timeout: int, default 10 * 60
643
- Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
644
- Only applicable when @parallel is used.
645
- qos: str, default: Burstable
646
- Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
647
-
648
- security_context: Dict[str, Any], optional, default None
649
- Container security context. Applies to the task container. Allows the following keys:
650
- - privileged: bool, optional, default None
651
- - allow_privilege_escalation: bool, optional, default None
652
- - run_as_user: int, optional, default None
653
- - run_as_group: int, optional, default None
654
- - run_as_non_root: bool, optional, default None
408
+ gpu : int
409
+ Number of GPUs to use.
410
+ gpu_type : str
411
+ Type of Nvidia GPU to use.
412
+ queue_timeout : int
413
+ Time to keep the job in NVCF's queue.
655
414
  """
656
415
  ...
657
416
 
658
417
  @typing.overload
659
- def app_deploy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
418
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
660
419
  """
661
- Decorator prototype for all step decorators. This function gets specialized
662
- and imported for all decorators types by _import_plugin_decorators().
420
+ Specifies environment variables to be set prior to the execution of a step.
421
+
422
+
423
+ Parameters
424
+ ----------
425
+ vars : Dict[str, str], default {}
426
+ Dictionary of environment variables to set.
663
427
  """
664
428
  ...
665
429
 
666
430
  @typing.overload
667
- def app_deploy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
431
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
668
432
  ...
669
433
 
670
- def app_deploy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
671
- """
672
- Decorator prototype for all step decorators. This function gets specialized
673
- and imported for all decorators types by _import_plugin_decorators().
674
- """
434
+ @typing.overload
435
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
675
436
  ...
676
437
 
677
- @typing.overload
678
- def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
438
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
679
439
  """
680
- Creates a human-readable report, a Metaflow Card, after this step completes.
681
-
682
- Note that you may add multiple `@card` decorators in a step with different parameters.
440
+ Specifies environment variables to be set prior to the execution of a step.
683
441
 
684
442
 
685
443
  Parameters
686
444
  ----------
687
- type : str, default 'default'
688
- Card type.
689
- id : str, optional, default None
690
- If multiple cards are present, use this id to identify this card.
691
- options : Dict[str, Any], default {}
692
- Options passed to the card. The contents depend on the card type.
693
- timeout : int, default 45
694
- Interrupt reporting if it takes more than this many seconds.
445
+ vars : Dict[str, str], default {}
446
+ Dictionary of environment variables to set.
695
447
  """
696
448
  ...
697
449
 
698
- @typing.overload
699
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
700
- ...
701
-
702
- @typing.overload
703
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
704
- ...
705
-
706
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
450
+ def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
707
451
  """
708
- Creates a human-readable report, a Metaflow Card, after this step completes.
709
-
710
- Note that you may add multiple `@card` decorators in a step with different parameters.
452
+ Specifies that this step should execute on DGX cloud.
711
453
 
712
454
 
713
455
  Parameters
714
456
  ----------
715
- type : str, default 'default'
716
- Card type.
717
- id : str, optional, default None
718
- If multiple cards are present, use this id to identify this card.
719
- options : Dict[str, Any], default {}
720
- Options passed to the card. The contents depend on the card type.
721
- timeout : int, default 45
722
- Interrupt reporting if it takes more than this many seconds.
457
+ gpu : int
458
+ Number of GPUs to use.
459
+ gpu_type : str
460
+ Type of Nvidia GPU to use.
723
461
  """
724
462
  ...
725
463
 
@@ -802,128 +540,172 @@ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None]
802
540
  """
803
541
  ...
804
542
 
805
- @typing.overload
806
- def test_append_card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
807
- """
808
- A simple decorator that demonstrates using CardDecoratorInjector
809
- to inject a card and render simple markdown content.
810
- """
811
- ...
812
-
813
- @typing.overload
814
- def test_append_card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
815
- ...
816
-
817
- def test_append_card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
818
- """
819
- A simple decorator that demonstrates using CardDecoratorInjector
820
- to inject a card and render simple markdown content.
821
- """
822
- ...
823
-
824
- @typing.overload
825
- def coreweave_s3_proxy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
543
+ def s3_proxy(*, integration_name: typing.Optional[str] = None, write_mode: typing.Optional[str] = None, debug: typing.Optional[bool] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
826
544
  """
827
- CoreWeave-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
828
- It exists to make it easier for users to know that this decorator should only be used with
829
- a Neo Cloud like CoreWeave.
545
+ S3 Proxy decorator for routing S3 requests through a local proxy service.
546
+
547
+
548
+ Parameters
549
+ ----------
550
+ integration_name : str, optional
551
+ Name of the S3 proxy integration. If not specified, will use the only
552
+ available S3 proxy integration in the namespace (fails if multiple exist).
553
+ write_mode : str, optional
554
+ The desired behavior during write operations to target (origin) S3 bucket.
555
+ allowed options are:
556
+ "origin-and-cache" -> write to both the target S3 bucket and local object
557
+ storage
558
+ "origin" -> only write to the target S3 bucket
559
+ "cache" -> only write to the object storage service used for caching
560
+ debug : bool, optional
561
+ Enable debug logging for proxy operations.
830
562
  """
831
563
  ...
832
564
 
833
565
  @typing.overload
834
- def coreweave_s3_proxy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
835
- ...
836
-
837
- def coreweave_s3_proxy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
566
+ def checkpoint(*, load_policy: str = 'fresh', temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
838
567
  """
839
- CoreWeave-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
840
- It exists to make it easier for users to know that this decorator should only be used with
841
- a Neo Cloud like CoreWeave.
842
- """
843
- ...
844
-
845
- def vllm(*, model: str, backend: str, openai_api_server: bool, debug: bool, card_refresh_interval: int, max_retries: int, retry_alert_frequency: int, engine_args: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
846
- """
847
- This decorator is used to run vllm APIs as Metaflow task sidecars.
568
+ Enables checkpointing for a step.
848
569
 
849
- User code call
850
- --------------
851
- @vllm(
852
- model="...",
853
- ...
854
- )
570
+ > Examples
855
571
 
856
- Valid backend options
857
- ---------------------
858
- - 'local': Run as a separate process on the local task machine.
572
+ - Saving Checkpoints
859
573
 
860
- Valid model options
861
- -------------------
862
- Any HuggingFace model identifier, e.g. 'meta-llama/Llama-3.2-1B'
574
+ ```python
575
+ @checkpoint
576
+ @step
577
+ def train(self):
578
+ model = create_model(self.parameters, checkpoint_path = None)
579
+ for i in range(self.epochs):
580
+ # some training logic
581
+ loss = model.train(self.dataset)
582
+ if i % 10 == 0:
583
+ model.save(
584
+ current.checkpoint.directory,
585
+ )
586
+ # saves the contents of the `current.checkpoint.directory` as a checkpoint
587
+ # and returns a reference dictionary to the checkpoint saved in the datastore
588
+ self.latest_checkpoint = current.checkpoint.save(
589
+ name="epoch_checkpoint",
590
+ metadata={
591
+ "epoch": i,
592
+ "loss": loss,
593
+ }
594
+ )
595
+ ```
863
596
 
864
- NOTE: vLLM's OpenAI-compatible server serves ONE model per server instance.
865
- If you need multiple models, you must create multiple @vllm decorators.
597
+ - Using Loaded Checkpoints
598
+
599
+ ```python
600
+ @retry(times=3)
601
+ @checkpoint
602
+ @step
603
+ def train(self):
604
+ # Assume that the task has restarted and the previous attempt of the task
605
+ # saved a checkpoint
606
+ checkpoint_path = None
607
+ if current.checkpoint.is_loaded: # Check if a checkpoint is loaded
608
+ print("Loaded checkpoint from the previous attempt")
609
+ checkpoint_path = current.checkpoint.directory
610
+
611
+ model = create_model(self.parameters, checkpoint_path = checkpoint_path)
612
+ for i in range(self.epochs):
613
+ ...
614
+ ```
866
615
 
867
616
 
868
617
  Parameters
869
618
  ----------
870
- model: str
871
- HuggingFace model identifier to be served by vLLM.
872
- backend: str
873
- Determines where and how to run the vLLM process.
874
- openai_api_server: bool
875
- Whether to use OpenAI-compatible API server mode (subprocess) instead of native engine.
876
- Default is False (uses native engine).
877
- Set to True for backward compatibility with existing code.
878
- debug: bool
879
- Whether to turn on verbose debugging logs.
880
- card_refresh_interval: int
881
- Interval in seconds for refreshing the vLLM status card.
882
- Only used when openai_api_server=True.
883
- max_retries: int
884
- Maximum number of retries checking for vLLM server startup.
885
- Only used when openai_api_server=True.
886
- retry_alert_frequency: int
887
- Frequency of alert logs for vLLM server startup retries.
888
- Only used when openai_api_server=True.
889
- engine_args : dict
890
- Additional keyword arguments to pass to the vLLM engine.
891
- For example, `tensor_parallel_size=2`.
619
+ load_policy : str, default: "fresh"
620
+ The policy for loading the checkpoint. The following policies are supported:
621
+ - "eager": Loads the the latest available checkpoint within the namespace.
622
+ With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
623
+ will be loaded at the start of the task.
624
+ - "none": Do not load any checkpoint
625
+ - "fresh": Loads the lastest checkpoint created within the running Task.
626
+ This mode helps loading checkpoints across various retry attempts of the same task.
627
+ With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
628
+ created within the task will be loaded when the task is retries execution on failure.
629
+
630
+ temp_dir_root : str, default: None
631
+ The root directory under which `current.checkpoint.directory` will be created.
892
632
  """
893
633
  ...
894
634
 
895
635
  @typing.overload
896
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
897
- """
898
- Decorator prototype for all step decorators. This function gets specialized
899
- and imported for all decorators types by _import_plugin_decorators().
900
- """
636
+ def checkpoint(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
901
637
  ...
902
638
 
903
639
  @typing.overload
904
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
905
- ...
906
-
907
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
908
- """
909
- Decorator prototype for all step decorators. This function gets specialized
910
- and imported for all decorators types by _import_plugin_decorators().
911
- """
640
+ def checkpoint(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
912
641
  ...
913
642
 
914
- def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
643
+ def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load_policy: str = 'fresh', temp_dir_root: str = None):
915
644
  """
916
- Specifies that this step should execute on DGX cloud.
645
+ Enables checkpointing for a step.
646
+
647
+ > Examples
648
+
649
+ - Saving Checkpoints
650
+
651
+ ```python
652
+ @checkpoint
653
+ @step
654
+ def train(self):
655
+ model = create_model(self.parameters, checkpoint_path = None)
656
+ for i in range(self.epochs):
657
+ # some training logic
658
+ loss = model.train(self.dataset)
659
+ if i % 10 == 0:
660
+ model.save(
661
+ current.checkpoint.directory,
662
+ )
663
+ # saves the contents of the `current.checkpoint.directory` as a checkpoint
664
+ # and returns a reference dictionary to the checkpoint saved in the datastore
665
+ self.latest_checkpoint = current.checkpoint.save(
666
+ name="epoch_checkpoint",
667
+ metadata={
668
+ "epoch": i,
669
+ "loss": loss,
670
+ }
671
+ )
672
+ ```
673
+
674
+ - Using Loaded Checkpoints
675
+
676
+ ```python
677
+ @retry(times=3)
678
+ @checkpoint
679
+ @step
680
+ def train(self):
681
+ # Assume that the task has restarted and the previous attempt of the task
682
+ # saved a checkpoint
683
+ checkpoint_path = None
684
+ if current.checkpoint.is_loaded: # Check if a checkpoint is loaded
685
+ print("Loaded checkpoint from the previous attempt")
686
+ checkpoint_path = current.checkpoint.directory
687
+
688
+ model = create_model(self.parameters, checkpoint_path = checkpoint_path)
689
+ for i in range(self.epochs):
690
+ ...
691
+ ```
917
692
 
918
693
 
919
694
  Parameters
920
695
  ----------
921
- gpu : int
922
- Number of GPUs to use.
923
- gpu_type : str
924
- Type of Nvidia GPU to use.
925
- queue_timeout : int
926
- Time to keep the job in NVCF's queue.
696
+ load_policy : str, default: "fresh"
697
+ The policy for loading the checkpoint. The following policies are supported:
698
+ - "eager": Loads the the latest available checkpoint within the namespace.
699
+ With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
700
+ will be loaded at the start of the task.
701
+ - "none": Do not load any checkpoint
702
+ - "fresh": Loads the lastest checkpoint created within the running Task.
703
+ This mode helps loading checkpoints across various retry attempts of the same task.
704
+ With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
705
+ created within the task will be loaded when the task is retries execution on failure.
706
+
707
+ temp_dir_root : str, default: None
708
+ The root directory under which `current.checkpoint.directory` will be created.
927
709
  """
928
710
  ...
929
711
 
@@ -986,380 +768,460 @@ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
986
768
  """
987
769
  ...
988
770
 
989
- @typing.overload
990
- def model(*, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
771
+ def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy: str, force_cache_update: bool, debug: bool, circuit_breaker_config: dict, timeout_config: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
991
772
  """
992
- Enables loading / saving of models within a step.
773
+ This decorator is used to run Ollama APIs as Metaflow task sidecars.
993
774
 
994
- > Examples
995
- - Saving Models
996
- ```python
997
- @model
998
- @step
999
- def train(self):
1000
- # current.model.save returns a dictionary reference to the model saved
1001
- self.my_model = current.model.save(
1002
- path_to_my_model,
1003
- label="my_model",
1004
- metadata={
1005
- "epochs": 10,
1006
- "batch-size": 32,
1007
- "learning-rate": 0.001,
1008
- }
1009
- )
1010
- self.next(self.test)
775
+ User code call
776
+ --------------
777
+ @ollama(
778
+ models=[...],
779
+ ...
780
+ )
1011
781
 
1012
- @model(load="my_model")
1013
- @step
1014
- def test(self):
1015
- # `current.model.loaded` returns a dictionary of the loaded models
1016
- # where the key is the name of the artifact and the value is the path to the model
1017
- print(os.listdir(current.model.loaded["my_model"]))
1018
- self.next(self.end)
1019
- ```
782
+ Valid backend options
783
+ ---------------------
784
+ - 'local': Run as a separate process on the local task machine.
785
+ - (TODO) 'managed': Outerbounds hosts and selects compute provider.
786
+ - (TODO) 'remote': Spin up separate instance to serve Ollama models.
1020
787
 
1021
- - Loading models
1022
- ```python
1023
- @step
1024
- def train(self):
1025
- # current.model.load returns the path to the model loaded
1026
- checkpoint_path = current.model.load(
1027
- self.checkpoint_key,
1028
- )
1029
- model_path = current.model.load(
1030
- self.model,
1031
- )
1032
- self.next(self.test)
1033
- ```
788
+ Valid model options
789
+ -------------------
790
+ Any model here https://ollama.com/search, e.g. 'llama3.2', 'llama3.3'
1034
791
 
1035
792
 
1036
793
  Parameters
1037
794
  ----------
1038
- load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
1039
- Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
1040
- These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
1041
- If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
1042
- the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
1043
- If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
1044
-
1045
- temp_dir_root : str, default: None
1046
- The root directory under which `current.model.loaded` will store loaded models
1047
- """
1048
- ...
1049
-
1050
- @typing.overload
1051
- def model(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1052
- ...
795
+ models: list[str]
796
+ List of Ollama containers running models in sidecars.
797
+ backend: str
798
+ Determines where and how to run the Ollama process.
799
+ force_pull: bool
800
+ Whether to run `ollama pull` no matter what, or first check the remote cache in Metaflow datastore for this model key.
801
+ cache_update_policy: str
802
+ Cache update policy: "auto", "force", or "never".
803
+ force_cache_update: bool
804
+ Simple override for "force" cache update policy.
805
+ debug: bool
806
+ Whether to turn on verbose debugging logs.
807
+ circuit_breaker_config: dict
808
+ Configuration for circuit breaker protection. Keys: failure_threshold, recovery_timeout, reset_timeout.
809
+ timeout_config: dict
810
+ Configuration for various operation timeouts. Keys: pull, stop, health_check, install, server_startup.
811
+ """
812
+ ...
1053
813
 
1054
814
  @typing.overload
1055
- def model(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
815
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
816
+ """
817
+ Decorator prototype for all step decorators. This function gets specialized
818
+ and imported for all decorators types by _import_plugin_decorators().
819
+ """
1056
820
  ...
1057
821
 
1058
- def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None):
822
+ @typing.overload
823
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
824
+ ...
825
+
826
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1059
827
  """
1060
- Enables loading / saving of models within a step.
828
+ Decorator prototype for all step decorators. This function gets specialized
829
+ and imported for all decorators types by _import_plugin_decorators().
830
+ """
831
+ ...
832
+
833
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[typing.Dict[str, str]] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
834
+ """
835
+ Specifies that this step should execute on Kubernetes.
836
+
837
+
838
+ Parameters
839
+ ----------
840
+ cpu : int, default 1
841
+ Number of CPUs required for this step. If `@resources` is
842
+ also present, the maximum value from all decorators is used.
843
+ memory : int, default 4096
844
+ Memory size (in MB) required for this step. If
845
+ `@resources` is also present, the maximum value from all decorators is
846
+ used.
847
+ disk : int, default 10240
848
+ Disk size (in MB) required for this step. If
849
+ `@resources` is also present, the maximum value from all decorators is
850
+ used.
851
+ image : str, optional, default None
852
+ Docker image to use when launching on Kubernetes. If not specified, and
853
+ METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
854
+ not, a default Docker image mapping to the current version of Python is used.
855
+ image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
856
+ If given, the imagePullPolicy to be applied to the Docker image of the step.
857
+ image_pull_secrets: List[str], default []
858
+ The default is extracted from METAFLOW_KUBERNETES_IMAGE_PULL_SECRETS.
859
+ Kubernetes image pull secrets to use when pulling container images
860
+ in Kubernetes.
861
+ service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
862
+ Kubernetes service account to use when launching pod in Kubernetes.
863
+ secrets : List[str], optional, default None
864
+ Kubernetes secrets to use when launching pod in Kubernetes. These
865
+ secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
866
+ in Metaflow configuration.
867
+ node_selector: Union[Dict[str,str], str], optional, default None
868
+ Kubernetes node selector(s) to apply to the pod running the task.
869
+ Can be passed in as a comma separated string of values e.g.
870
+ 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
871
+ {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
872
+ namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
873
+ Kubernetes namespace to use when launching pod in Kubernetes.
874
+ gpu : int, optional, default None
875
+ Number of GPUs required for this step. A value of zero implies that
876
+ the scheduled node should not have GPUs.
877
+ gpu_vendor : str, default KUBERNETES_GPU_VENDOR
878
+ The vendor of the GPUs to be used for this step.
879
+ tolerations : List[Dict[str,str]], default []
880
+ The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
881
+ Kubernetes tolerations to use when launching pod in Kubernetes.
882
+ labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
883
+ Kubernetes labels to use when launching pod in Kubernetes.
884
+ annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
885
+ Kubernetes annotations to use when launching pod in Kubernetes.
886
+ use_tmpfs : bool, default False
887
+ This enables an explicit tmpfs mount for this step.
888
+ tmpfs_tempdir : bool, default True
889
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
890
+ tmpfs_size : int, optional, default: None
891
+ The value for the size (in MiB) of the tmpfs mount for this step.
892
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
893
+ memory allocated for this step.
894
+ tmpfs_path : str, optional, default /metaflow_temp
895
+ Path to tmpfs mount for this step.
896
+ persistent_volume_claims : Dict[str, str], optional, default None
897
+ A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
898
+ volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
899
+ shared_memory: int, optional
900
+ Shared memory size (in MiB) required for this step
901
+ port: int, optional
902
+ Port number to specify in the Kubernetes job object
903
+ compute_pool : str, optional, default None
904
+ Compute pool to be used for for this step.
905
+ If not specified, any accessible compute pool within the perimeter is used.
906
+ hostname_resolution_timeout: int, default 10 * 60
907
+ Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
908
+ Only applicable when @parallel is used.
909
+ qos: str, default: Burstable
910
+ Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
911
+
912
+ security_context: Dict[str, Any], optional, default None
913
+ Container security context. Applies to the task container. Allows the following keys:
914
+ - privileged: bool, optional, default None
915
+ - allow_privilege_escalation: bool, optional, default None
916
+ - run_as_user: int, optional, default None
917
+ - run_as_group: int, optional, default None
918
+ - run_as_non_root: bool, optional, default None
919
+ """
920
+ ...
921
+
922
+ def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
923
+ """
924
+ Decorator that helps cache, version and store models/datasets from huggingface hub.
1061
925
 
1062
926
  > Examples
1063
- - Saving Models
927
+
928
+ **Usage: creating references of models from huggingface that may be loaded in downstream steps**
1064
929
  ```python
1065
- @model
1066
- @step
1067
- def train(self):
1068
- # current.model.save returns a dictionary reference to the model saved
1069
- self.my_model = current.model.save(
1070
- path_to_my_model,
1071
- label="my_model",
1072
- metadata={
1073
- "epochs": 10,
1074
- "batch-size": 32,
1075
- "learning-rate": 0.001,
1076
- }
1077
- )
1078
- self.next(self.test)
930
+ @huggingface_hub
931
+ @step
932
+ def pull_model_from_huggingface(self):
933
+ # `current.huggingface_hub.snapshot_download` downloads the model from the Hugging Face Hub
934
+ # and saves it in the backend storage based on the model's `repo_id`. If there exists a model
935
+ # with the same `repo_id` in the backend storage, it will not download the model again. The return
936
+ # value of the function is a reference to the model in the backend storage.
937
+ # This reference can be used to load the model in the subsequent steps via `@model(load=["llama_model"])`
1079
938
 
1080
- @model(load="my_model")
1081
- @step
1082
- def test(self):
1083
- # `current.model.loaded` returns a dictionary of the loaded models
1084
- # where the key is the name of the artifact and the value is the path to the model
1085
- print(os.listdir(current.model.loaded["my_model"]))
1086
- self.next(self.end)
939
+ self.model_id = "mistralai/Mistral-7B-Instruct-v0.1"
940
+ self.llama_model = current.huggingface_hub.snapshot_download(
941
+ repo_id=self.model_id,
942
+ allow_patterns=["*.safetensors", "*.json", "tokenizer.*"],
943
+ )
944
+ self.next(self.train)
1087
945
  ```
1088
946
 
1089
- - Loading models
947
+ **Usage: loading models directly from huggingface hub or from cache (from metaflow's datastore)**
1090
948
  ```python
1091
- @step
1092
- def train(self):
1093
- # current.model.load returns the path to the model loaded
1094
- checkpoint_path = current.model.load(
1095
- self.checkpoint_key,
1096
- )
1097
- model_path = current.model.load(
1098
- self.model,
1099
- )
1100
- self.next(self.test)
949
+ @huggingface_hub(load=["mistralai/Mistral-7B-Instruct-v0.1"])
950
+ @step
951
+ def pull_model_from_huggingface(self):
952
+ path_to_model = current.huggingface_hub.loaded["mistralai/Mistral-7B-Instruct-v0.1"]
953
+ ```
954
+
955
+ ```python
956
+ @huggingface_hub(load=[("mistralai/Mistral-7B-Instruct-v0.1", "/my-directory"), ("myorg/mistral-lora, "/my-lora-directory")])
957
+ @step
958
+ def finetune_model(self):
959
+ path_to_model = current.huggingface_hub.loaded["mistralai/Mistral-7B-Instruct-v0.1"]
960
+ # path_to_model will be /my-directory
961
+ ```
962
+
963
+ ```python
964
+ # Takes all the arguments passed to `snapshot_download`
965
+ # except for `local_dir`
966
+ @huggingface_hub(load=[
967
+ {
968
+ "repo_id": "mistralai/Mistral-7B-Instruct-v0.1",
969
+ },
970
+ {
971
+ "repo_id": "myorg/mistral-lora",
972
+ "repo_type": "model",
973
+ },
974
+ ])
975
+ @step
976
+ def finetune_model(self):
977
+ path_to_model = current.huggingface_hub.loaded["mistralai/Mistral-7B-Instruct-v0.1"]
978
+ # path_to_model will be /my-directory
1101
979
  ```
1102
980
 
1103
981
 
1104
982
  Parameters
1105
983
  ----------
1106
- load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
1107
- Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
1108
- These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
1109
- If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
1110
- the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
1111
- If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
984
+ temp_dir_root : str, optional
985
+ The root directory that will hold the temporary directory where objects will be downloaded.
1112
986
 
1113
- temp_dir_root : str, default: None
1114
- The root directory under which `current.model.loaded` will store loaded models
987
+ load: Union[List[str], List[Tuple[Dict, str]], List[Tuple[str, str]], List[Dict], None]
988
+ The list of repos (models/datasets) to load.
989
+
990
+ Loaded repos can be accessed via `current.huggingface_hub.loaded`. If load is set, then the following happens:
991
+
992
+ - If repo (model/dataset) is not found in the datastore:
993
+ - Downloads the repo from Hugging Face Hub to a temporary directory (or uses specified path) for local access
994
+ - Stores it in Metaflow's datastore (s3/gcs/azure etc.) with a unique name based on repo_type/repo_id
995
+ - All HF models loaded for a `@step` will be cached separately under flow/step/namespace.
996
+
997
+ - If repo is found in the datastore:
998
+ - Loads it directly from datastore to local path (can be temporary directory or specified path)
1115
999
  """
1116
1000
  ...
1117
1001
 
1118
1002
  @typing.overload
1119
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1003
+ def coreweave_s3_proxy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1120
1004
  """
1121
- Specifies that the step will success under all circumstances.
1122
-
1123
- The decorator will create an optional artifact, specified by `var`, which
1124
- contains the exception raised. You can use it to detect the presence
1125
- of errors, indicating that all happy-path artifacts produced by the step
1126
- are missing.
1005
+ CoreWeave-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
1006
+ It exists to make it easier for users to know that this decorator should only be used with
1007
+ a Neo Cloud like CoreWeave.
1008
+ """
1009
+ ...
1010
+
1011
+ @typing.overload
1012
+ def coreweave_s3_proxy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1013
+ ...
1014
+
1015
+ def coreweave_s3_proxy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1016
+ """
1017
+ CoreWeave-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
1018
+ It exists to make it easier for users to know that this decorator should only be used with
1019
+ a Neo Cloud like CoreWeave.
1020
+ """
1021
+ ...
1022
+
1023
+ @typing.overload
1024
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1025
+ """
1026
+ Specifies secrets to be retrieved and injected as environment variables prior to
1027
+ the execution of a step.
1127
1028
 
1128
1029
 
1129
1030
  Parameters
1130
1031
  ----------
1131
- var : str, optional, default None
1132
- Name of the artifact in which to store the caught exception.
1133
- If not specified, the exception is not stored.
1134
- print_exception : bool, default True
1135
- Determines whether or not the exception is printed to
1136
- stdout when caught.
1032
+ sources : List[Union[str, Dict[str, Any]]], default: []
1033
+ List of secret specs, defining how the secrets are to be retrieved
1034
+ role : str, optional, default: None
1035
+ Role to use for fetching secrets
1137
1036
  """
1138
1037
  ...
1139
1038
 
1140
1039
  @typing.overload
1141
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1040
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1142
1041
  ...
1143
1042
 
1144
1043
  @typing.overload
1145
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1044
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1146
1045
  ...
1147
1046
 
1148
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
1047
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
1149
1048
  """
1150
- Specifies that the step will success under all circumstances.
1151
-
1152
- The decorator will create an optional artifact, specified by `var`, which
1153
- contains the exception raised. You can use it to detect the presence
1154
- of errors, indicating that all happy-path artifacts produced by the step
1155
- are missing.
1049
+ Specifies secrets to be retrieved and injected as environment variables prior to
1050
+ the execution of a step.
1156
1051
 
1157
1052
 
1158
1053
  Parameters
1159
1054
  ----------
1160
- var : str, optional, default None
1161
- Name of the artifact in which to store the caught exception.
1162
- If not specified, the exception is not stored.
1163
- print_exception : bool, default True
1164
- Determines whether or not the exception is printed to
1165
- stdout when caught.
1055
+ sources : List[Union[str, Dict[str, Any]]], default: []
1056
+ List of secret specs, defining how the secrets are to be retrieved
1057
+ role : str, optional, default: None
1058
+ Role to use for fetching secrets
1166
1059
  """
1167
1060
  ...
1168
1061
 
1169
1062
  @typing.overload
1170
- def checkpoint(*, load_policy: str = 'fresh', temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1063
+ def model(*, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1171
1064
  """
1172
- Enables checkpointing for a step.
1065
+ Enables loading / saving of models within a step.
1173
1066
 
1174
1067
  > Examples
1175
-
1176
- - Saving Checkpoints
1177
-
1068
+ - Saving Models
1178
1069
  ```python
1179
- @checkpoint
1070
+ @model
1180
1071
  @step
1181
1072
  def train(self):
1182
- model = create_model(self.parameters, checkpoint_path = None)
1183
- for i in range(self.epochs):
1184
- # some training logic
1185
- loss = model.train(self.dataset)
1186
- if i % 10 == 0:
1187
- model.save(
1188
- current.checkpoint.directory,
1189
- )
1190
- # saves the contents of the `current.checkpoint.directory` as a checkpoint
1191
- # and returns a reference dictionary to the checkpoint saved in the datastore
1192
- self.latest_checkpoint = current.checkpoint.save(
1193
- name="epoch_checkpoint",
1194
- metadata={
1195
- "epoch": i,
1196
- "loss": loss,
1197
- }
1198
- )
1199
- ```
1073
+ # current.model.save returns a dictionary reference to the model saved
1074
+ self.my_model = current.model.save(
1075
+ path_to_my_model,
1076
+ label="my_model",
1077
+ metadata={
1078
+ "epochs": 10,
1079
+ "batch-size": 32,
1080
+ "learning-rate": 0.001,
1081
+ }
1082
+ )
1083
+ self.next(self.test)
1200
1084
 
1201
- - Using Loaded Checkpoints
1085
+ @model(load="my_model")
1086
+ @step
1087
+ def test(self):
1088
+ # `current.model.loaded` returns a dictionary of the loaded models
1089
+ # where the key is the name of the artifact and the value is the path to the model
1090
+ print(os.listdir(current.model.loaded["my_model"]))
1091
+ self.next(self.end)
1092
+ ```
1202
1093
 
1094
+ - Loading models
1203
1095
  ```python
1204
- @retry(times=3)
1205
- @checkpoint
1206
1096
  @step
1207
1097
  def train(self):
1208
- # Assume that the task has restarted and the previous attempt of the task
1209
- # saved a checkpoint
1210
- checkpoint_path = None
1211
- if current.checkpoint.is_loaded: # Check if a checkpoint is loaded
1212
- print("Loaded checkpoint from the previous attempt")
1213
- checkpoint_path = current.checkpoint.directory
1214
-
1215
- model = create_model(self.parameters, checkpoint_path = checkpoint_path)
1216
- for i in range(self.epochs):
1217
- ...
1098
+ # current.model.load returns the path to the model loaded
1099
+ checkpoint_path = current.model.load(
1100
+ self.checkpoint_key,
1101
+ )
1102
+ model_path = current.model.load(
1103
+ self.model,
1104
+ )
1105
+ self.next(self.test)
1218
1106
  ```
1219
1107
 
1220
1108
 
1221
1109
  Parameters
1222
1110
  ----------
1223
- load_policy : str, default: "fresh"
1224
- The policy for loading the checkpoint. The following policies are supported:
1225
- - "eager": Loads the the latest available checkpoint within the namespace.
1226
- With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
1227
- will be loaded at the start of the task.
1228
- - "none": Do not load any checkpoint
1229
- - "fresh": Loads the lastest checkpoint created within the running Task.
1230
- This mode helps loading checkpoints across various retry attempts of the same task.
1231
- With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
1232
- created within the task will be loaded when the task is retries execution on failure.
1111
+ load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
1112
+ Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
1113
+ These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
1114
+ If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
1115
+ the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
1116
+ If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
1233
1117
 
1234
1118
  temp_dir_root : str, default: None
1235
- The root directory under which `current.checkpoint.directory` will be created.
1119
+ The root directory under which `current.model.loaded` will store loaded models
1236
1120
  """
1237
1121
  ...
1238
1122
 
1239
1123
  @typing.overload
1240
- def checkpoint(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1124
+ def model(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1241
1125
  ...
1242
1126
 
1243
1127
  @typing.overload
1244
- def checkpoint(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1128
+ def model(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1245
1129
  ...
1246
1130
 
1247
- def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load_policy: str = 'fresh', temp_dir_root: str = None):
1131
+ def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None):
1248
1132
  """
1249
- Enables checkpointing for a step.
1250
-
1251
- > Examples
1252
-
1253
- - Saving Checkpoints
1133
+ Enables loading / saving of models within a step.
1254
1134
 
1135
+ > Examples
1136
+ - Saving Models
1255
1137
  ```python
1256
- @checkpoint
1138
+ @model
1257
1139
  @step
1258
1140
  def train(self):
1259
- model = create_model(self.parameters, checkpoint_path = None)
1260
- for i in range(self.epochs):
1261
- # some training logic
1262
- loss = model.train(self.dataset)
1263
- if i % 10 == 0:
1264
- model.save(
1265
- current.checkpoint.directory,
1266
- )
1267
- # saves the contents of the `current.checkpoint.directory` as a checkpoint
1268
- # and returns a reference dictionary to the checkpoint saved in the datastore
1269
- self.latest_checkpoint = current.checkpoint.save(
1270
- name="epoch_checkpoint",
1271
- metadata={
1272
- "epoch": i,
1273
- "loss": loss,
1274
- }
1275
- )
1276
- ```
1141
+ # current.model.save returns a dictionary reference to the model saved
1142
+ self.my_model = current.model.save(
1143
+ path_to_my_model,
1144
+ label="my_model",
1145
+ metadata={
1146
+ "epochs": 10,
1147
+ "batch-size": 32,
1148
+ "learning-rate": 0.001,
1149
+ }
1150
+ )
1151
+ self.next(self.test)
1277
1152
 
1278
- - Using Loaded Checkpoints
1153
+ @model(load="my_model")
1154
+ @step
1155
+ def test(self):
1156
+ # `current.model.loaded` returns a dictionary of the loaded models
1157
+ # where the key is the name of the artifact and the value is the path to the model
1158
+ print(os.listdir(current.model.loaded["my_model"]))
1159
+ self.next(self.end)
1160
+ ```
1279
1161
 
1162
+ - Loading models
1280
1163
  ```python
1281
- @retry(times=3)
1282
- @checkpoint
1283
1164
  @step
1284
1165
  def train(self):
1285
- # Assume that the task has restarted and the previous attempt of the task
1286
- # saved a checkpoint
1287
- checkpoint_path = None
1288
- if current.checkpoint.is_loaded: # Check if a checkpoint is loaded
1289
- print("Loaded checkpoint from the previous attempt")
1290
- checkpoint_path = current.checkpoint.directory
1291
-
1292
- model = create_model(self.parameters, checkpoint_path = checkpoint_path)
1293
- for i in range(self.epochs):
1294
- ...
1166
+ # current.model.load returns the path to the model loaded
1167
+ checkpoint_path = current.model.load(
1168
+ self.checkpoint_key,
1169
+ )
1170
+ model_path = current.model.load(
1171
+ self.model,
1172
+ )
1173
+ self.next(self.test)
1295
1174
  ```
1296
1175
 
1297
1176
 
1298
1177
  Parameters
1299
1178
  ----------
1300
- load_policy : str, default: "fresh"
1301
- The policy for loading the checkpoint. The following policies are supported:
1302
- - "eager": Loads the the latest available checkpoint within the namespace.
1303
- With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
1304
- will be loaded at the start of the task.
1305
- - "none": Do not load any checkpoint
1306
- - "fresh": Loads the lastest checkpoint created within the running Task.
1307
- This mode helps loading checkpoints across various retry attempts of the same task.
1308
- With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
1309
- created within the task will be loaded when the task is retries execution on failure.
1179
+ load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
1180
+ Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
1181
+ These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
1182
+ If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
1183
+ the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
1184
+ If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
1310
1185
 
1311
1186
  temp_dir_root : str, default: None
1312
- The root directory under which `current.checkpoint.directory` will be created.
1187
+ The root directory under which `current.model.loaded` will store loaded models
1313
1188
  """
1314
1189
  ...
1315
1190
 
1316
1191
  @typing.overload
1317
- def nebius_s3_proxy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1192
+ def app_deploy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1318
1193
  """
1319
- Nebius-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
1320
- It exists to make it easier for users to know that this decorator should only be used with
1321
- a Neo Cloud like Nebius.
1194
+ Decorator prototype for all step decorators. This function gets specialized
1195
+ and imported for all decorators types by _import_plugin_decorators().
1322
1196
  """
1323
1197
  ...
1324
1198
 
1325
1199
  @typing.overload
1326
- def nebius_s3_proxy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1327
- ...
1328
-
1329
- def nebius_s3_proxy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1330
- """
1331
- Nebius-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
1332
- It exists to make it easier for users to know that this decorator should only be used with
1333
- a Neo Cloud like Nebius.
1334
- """
1200
+ def app_deploy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1335
1201
  ...
1336
1202
 
1337
- def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1203
+ def app_deploy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1338
1204
  """
1339
- Specifies that this step should execute on DGX cloud.
1340
-
1341
-
1342
- Parameters
1343
- ----------
1344
- gpu : int
1345
- Number of GPUs to use.
1346
- gpu_type : str
1347
- Type of Nvidia GPU to use.
1205
+ Decorator prototype for all step decorators. This function gets specialized
1206
+ and imported for all decorators types by _import_plugin_decorators().
1348
1207
  """
1349
1208
  ...
1350
1209
 
1351
1210
  @typing.overload
1352
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1211
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1353
1212
  """
1354
- Specifies the PyPI packages for all steps of the flow.
1213
+ Specifies the PyPI packages for the step.
1355
1214
 
1356
- Use `@pypi_base` to set common packages required by all
1215
+ Information in this decorator will augment any
1216
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
1217
+ you can use `@pypi_base` to set packages required by all
1357
1218
  steps and use `@pypi` to specify step-specific overrides.
1358
1219
 
1220
+
1359
1221
  Parameters
1360
1222
  ----------
1361
1223
  packages : Dict[str, str], default: {}
1362
- Packages to use for this flow. The key is the name of the package
1224
+ Packages to use for this step. The key is the name of the package
1363
1225
  and the value is the version to use.
1364
1226
  python : str, optional, default: None
1365
1227
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
@@ -1368,20 +1230,27 @@ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[s
1368
1230
  ...
1369
1231
 
1370
1232
  @typing.overload
1371
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1233
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1372
1234
  ...
1373
1235
 
1374
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1236
+ @typing.overload
1237
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1238
+ ...
1239
+
1240
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1375
1241
  """
1376
- Specifies the PyPI packages for all steps of the flow.
1242
+ Specifies the PyPI packages for the step.
1377
1243
 
1378
- Use `@pypi_base` to set common packages required by all
1244
+ Information in this decorator will augment any
1245
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
1246
+ you can use `@pypi_base` to set packages required by all
1379
1247
  steps and use `@pypi` to specify step-specific overrides.
1380
1248
 
1249
+
1381
1250
  Parameters
1382
1251
  ----------
1383
1252
  packages : Dict[str, str], default: {}
1384
- Packages to use for this flow. The key is the name of the package
1253
+ Packages to use for this step. The key is the name of the package
1385
1254
  and the value is the version to use.
1386
1255
  python : str, optional, default: None
1387
1256
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
@@ -1390,154 +1259,92 @@ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packag
1390
1259
  ...
1391
1260
 
1392
1261
  @typing.overload
1393
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1262
+ def test_append_card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1394
1263
  """
1395
- Specifies the times when the flow should be run when running on a
1396
- production scheduler.
1397
-
1398
-
1399
- Parameters
1400
- ----------
1401
- hourly : bool, default False
1402
- Run the workflow hourly.
1403
- daily : bool, default True
1404
- Run the workflow daily.
1405
- weekly : bool, default False
1406
- Run the workflow weekly.
1407
- cron : str, optional, default None
1408
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1409
- specified by this expression.
1410
- timezone : str, optional, default None
1411
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1412
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1264
+ A simple decorator that demonstrates using CardDecoratorInjector
1265
+ to inject a card and render simple markdown content.
1413
1266
  """
1414
1267
  ...
1415
1268
 
1416
1269
  @typing.overload
1417
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1270
+ def test_append_card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1418
1271
  ...
1419
1272
 
1420
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1273
+ def test_append_card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1421
1274
  """
1422
- Specifies the times when the flow should be run when running on a
1423
- production scheduler.
1424
-
1425
-
1426
- Parameters
1427
- ----------
1428
- hourly : bool, default False
1429
- Run the workflow hourly.
1430
- daily : bool, default True
1431
- Run the workflow daily.
1432
- weekly : bool, default False
1433
- Run the workflow weekly.
1434
- cron : str, optional, default None
1435
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1436
- specified by this expression.
1437
- timezone : str, optional, default None
1438
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1439
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1275
+ A simple decorator that demonstrates using CardDecoratorInjector
1276
+ to inject a card and render simple markdown content.
1440
1277
  """
1441
1278
  ...
1442
1279
 
1443
1280
  @typing.overload
1444
- def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1281
+ def nebius_s3_proxy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1445
1282
  """
1446
- Specifies the flow(s) that this flow depends on.
1447
-
1448
- ```
1449
- @trigger_on_finish(flow='FooFlow')
1450
- ```
1451
- or
1452
- ```
1453
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1454
- ```
1455
- This decorator respects the @project decorator and triggers the flow
1456
- when upstream runs within the same namespace complete successfully
1457
-
1458
- Additionally, you can specify project aware upstream flow dependencies
1459
- by specifying the fully qualified project_flow_name.
1460
- ```
1461
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1462
- ```
1463
- or
1464
- ```
1465
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1466
- ```
1467
-
1468
- You can also specify just the project or project branch (other values will be
1469
- inferred from the current project or project branch):
1470
- ```
1471
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1472
- ```
1473
-
1474
- Note that `branch` is typically one of:
1475
- - `prod`
1476
- - `user.bob`
1477
- - `test.my_experiment`
1478
- - `prod.staging`
1479
-
1480
-
1481
- Parameters
1482
- ----------
1483
- flow : Union[str, Dict[str, str]], optional, default None
1484
- Upstream flow dependency for this flow.
1485
- flows : List[Union[str, Dict[str, str]]], default []
1486
- Upstream flow dependencies for this flow.
1487
- options : Dict[str, Any], default {}
1488
- Backend-specific configuration for tuning eventing behavior.
1283
+ Nebius-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
1284
+ It exists to make it easier for users to know that this decorator should only be used with
1285
+ a Neo Cloud like Nebius.
1489
1286
  """
1490
1287
  ...
1491
1288
 
1492
1289
  @typing.overload
1493
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1290
+ def nebius_s3_proxy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1494
1291
  ...
1495
1292
 
1496
- def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1293
+ def nebius_s3_proxy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1497
1294
  """
1498
- Specifies the flow(s) that this flow depends on.
1499
-
1500
- ```
1501
- @trigger_on_finish(flow='FooFlow')
1502
- ```
1503
- or
1504
- ```
1505
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1506
- ```
1507
- This decorator respects the @project decorator and triggers the flow
1508
- when upstream runs within the same namespace complete successfully
1295
+ Nebius-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
1296
+ It exists to make it easier for users to know that this decorator should only be used with
1297
+ a Neo Cloud like Nebius.
1298
+ """
1299
+ ...
1300
+
1301
+ def vllm(*, model: str, backend: str, openai_api_server: bool, debug: bool, card_refresh_interval: int, max_retries: int, retry_alert_frequency: int, engine_args: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1302
+ """
1303
+ This decorator is used to run vllm APIs as Metaflow task sidecars.
1509
1304
 
1510
- Additionally, you can specify project aware upstream flow dependencies
1511
- by specifying the fully qualified project_flow_name.
1512
- ```
1513
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1514
- ```
1515
- or
1516
- ```
1517
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1518
- ```
1305
+ User code call
1306
+ --------------
1307
+ @vllm(
1308
+ model="...",
1309
+ ...
1310
+ )
1519
1311
 
1520
- You can also specify just the project or project branch (other values will be
1521
- inferred from the current project or project branch):
1522
- ```
1523
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1524
- ```
1312
+ Valid backend options
1313
+ ---------------------
1314
+ - 'local': Run as a separate process on the local task machine.
1525
1315
 
1526
- Note that `branch` is typically one of:
1527
- - `prod`
1528
- - `user.bob`
1529
- - `test.my_experiment`
1530
- - `prod.staging`
1316
+ Valid model options
1317
+ -------------------
1318
+ Any HuggingFace model identifier, e.g. 'meta-llama/Llama-3.2-1B'
1319
+
1320
+ NOTE: vLLM's OpenAI-compatible server serves ONE model per server instance.
1321
+ If you need multiple models, you must create multiple @vllm decorators.
1531
1322
 
1532
1323
 
1533
1324
  Parameters
1534
1325
  ----------
1535
- flow : Union[str, Dict[str, str]], optional, default None
1536
- Upstream flow dependency for this flow.
1537
- flows : List[Union[str, Dict[str, str]]], default []
1538
- Upstream flow dependencies for this flow.
1539
- options : Dict[str, Any], default {}
1540
- Backend-specific configuration for tuning eventing behavior.
1326
+ model: str
1327
+ HuggingFace model identifier to be served by vLLM.
1328
+ backend: str
1329
+ Determines where and how to run the vLLM process.
1330
+ openai_api_server: bool
1331
+ Whether to use OpenAI-compatible API server mode (subprocess) instead of native engine.
1332
+ Default is False (uses native engine).
1333
+ Set to True for backward compatibility with existing code.
1334
+ debug: bool
1335
+ Whether to turn on verbose debugging logs.
1336
+ card_refresh_interval: int
1337
+ Interval in seconds for refreshing the vLLM status card.
1338
+ Only used when openai_api_server=True.
1339
+ max_retries: int
1340
+ Maximum number of retries checking for vLLM server startup.
1341
+ Only used when openai_api_server=True.
1342
+ retry_alert_frequency: int
1343
+ Frequency of alert logs for vLLM server startup retries.
1344
+ Only used when openai_api_server=True.
1345
+ engine_args : dict
1346
+ Additional keyword arguments to pass to the vLLM engine.
1347
+ For example, `tensor_parallel_size=2`.
1541
1348
  """
1542
1349
  ...
1543
1350
 
@@ -1592,6 +1399,99 @@ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packa
1592
1399
  """
1593
1400
  ...
1594
1401
 
1402
+ @typing.overload
1403
+ def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1404
+ """
1405
+ Specifies the event(s) that this flow depends on.
1406
+
1407
+ ```
1408
+ @trigger(event='foo')
1409
+ ```
1410
+ or
1411
+ ```
1412
+ @trigger(events=['foo', 'bar'])
1413
+ ```
1414
+
1415
+ Additionally, you can specify the parameter mappings
1416
+ to map event payload to Metaflow parameters for the flow.
1417
+ ```
1418
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1419
+ ```
1420
+ or
1421
+ ```
1422
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1423
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1424
+ ```
1425
+
1426
+ 'parameters' can also be a list of strings and tuples like so:
1427
+ ```
1428
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1429
+ ```
1430
+ This is equivalent to:
1431
+ ```
1432
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1433
+ ```
1434
+
1435
+
1436
+ Parameters
1437
+ ----------
1438
+ event : Union[str, Dict[str, Any]], optional, default None
1439
+ Event dependency for this flow.
1440
+ events : List[Union[str, Dict[str, Any]]], default []
1441
+ Events dependency for this flow.
1442
+ options : Dict[str, Any], default {}
1443
+ Backend-specific configuration for tuning eventing behavior.
1444
+ """
1445
+ ...
1446
+
1447
+ @typing.overload
1448
+ def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1449
+ ...
1450
+
1451
+ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1452
+ """
1453
+ Specifies the event(s) that this flow depends on.
1454
+
1455
+ ```
1456
+ @trigger(event='foo')
1457
+ ```
1458
+ or
1459
+ ```
1460
+ @trigger(events=['foo', 'bar'])
1461
+ ```
1462
+
1463
+ Additionally, you can specify the parameter mappings
1464
+ to map event payload to Metaflow parameters for the flow.
1465
+ ```
1466
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1467
+ ```
1468
+ or
1469
+ ```
1470
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1471
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1472
+ ```
1473
+
1474
+ 'parameters' can also be a list of strings and tuples like so:
1475
+ ```
1476
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1477
+ ```
1478
+ This is equivalent to:
1479
+ ```
1480
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1481
+ ```
1482
+
1483
+
1484
+ Parameters
1485
+ ----------
1486
+ event : Union[str, Dict[str, Any]], optional, default None
1487
+ Event dependency for this flow.
1488
+ events : List[Union[str, Dict[str, Any]]], default []
1489
+ Events dependency for this flow.
1490
+ options : Dict[str, Any], default {}
1491
+ Backend-specific configuration for tuning eventing behavior.
1492
+ """
1493
+ ...
1494
+
1595
1495
  def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1596
1496
  """
1597
1497
  Specifies what flows belong to the same project.
@@ -1627,49 +1527,6 @@ def project(*, name: str, branch: typing.Optional[str] = None, production: bool
1627
1527
  """
1628
1528
  ...
1629
1529
 
1630
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1631
- """
1632
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1633
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1634
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1635
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
1636
- starts only after all sensors finish.
1637
-
1638
-
1639
- Parameters
1640
- ----------
1641
- timeout : int
1642
- Time, in seconds before the task times out and fails. (Default: 3600)
1643
- poke_interval : int
1644
- Time in seconds that the job should wait in between each try. (Default: 60)
1645
- mode : str
1646
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1647
- exponential_backoff : bool
1648
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1649
- pool : str
1650
- the slot pool this task should run in,
1651
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1652
- soft_fail : bool
1653
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1654
- name : str
1655
- Name of the sensor on Airflow
1656
- description : str
1657
- Description of sensor in the Airflow UI
1658
- bucket_key : Union[str, List[str]]
1659
- The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1660
- When it's specified as a full s3:// url, please leave `bucket_name` as None
1661
- bucket_name : str
1662
- Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1663
- When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1664
- wildcard_match : bool
1665
- whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1666
- aws_conn_id : str
1667
- a reference to the s3 connection on Airflow. (Default: None)
1668
- verify : bool
1669
- Whether or not to verify SSL certificates for S3 connection. (Default: None)
1670
- """
1671
- ...
1672
-
1673
1530
  def with_artifact_store(f: typing.Optional[typing.Type[FlowSpecDerived]] = None):
1674
1531
  """
1675
1532
  Allows setting external datastores to save data for the
@@ -1769,154 +1626,297 @@ def with_artifact_store(f: typing.Optional[typing.Type[FlowSpecDerived]] = None)
1769
1626
  Parameters:
1770
1627
  ----------
1771
1628
 
1772
- type: str
1773
- The type of the datastore. Can be one of 's3', 'gcs', 'azure' or any other supported metaflow Datastore.
1629
+ type: str
1630
+ The type of the datastore. Can be one of 's3', 'gcs', 'azure' or any other supported metaflow Datastore.
1631
+
1632
+ config: dict or Callable
1633
+ Dictionary of configuration options for the datastore. The following keys are required:
1634
+ - root: The root path in the datastore where the data will be saved. (needs to be in the format expected by the datastore)
1635
+ - example: 's3://bucket-name/path/to/root'
1636
+ - example: 'gs://bucket-name/path/to/root'
1637
+ - example: 'https://myblockacc.blob.core.windows.net/metaflow/'
1638
+ - role_arn (optional): AWS IAM role to access s3 bucket (only when `type` is 's3')
1639
+ - session_vars (optional): AWS session variables to access s3 bucket (only when `type` is 's3')
1640
+ - client_params (optional): AWS client parameters to access s3 bucket (only when `type` is 's3')
1641
+ """
1642
+ ...
1643
+
1644
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1645
+ """
1646
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1647
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1648
+
1649
+
1650
+ Parameters
1651
+ ----------
1652
+ timeout : int
1653
+ Time, in seconds before the task times out and fails. (Default: 3600)
1654
+ poke_interval : int
1655
+ Time in seconds that the job should wait in between each try. (Default: 60)
1656
+ mode : str
1657
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1658
+ exponential_backoff : bool
1659
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1660
+ pool : str
1661
+ the slot pool this task should run in,
1662
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1663
+ soft_fail : bool
1664
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1665
+ name : str
1666
+ Name of the sensor on Airflow
1667
+ description : str
1668
+ Description of sensor in the Airflow UI
1669
+ external_dag_id : str
1670
+ The dag_id that contains the task you want to wait for.
1671
+ external_task_ids : List[str]
1672
+ The list of task_ids that you want to wait for.
1673
+ If None (default value) the sensor waits for the DAG. (Default: None)
1674
+ allowed_states : List[str]
1675
+ Iterable of allowed states, (Default: ['success'])
1676
+ failed_states : List[str]
1677
+ Iterable of failed or dis-allowed states. (Default: None)
1678
+ execution_delta : datetime.timedelta
1679
+ time difference with the previous execution to look at,
1680
+ the default is the same logical date as the current task or DAG. (Default: None)
1681
+ check_existence: bool
1682
+ Set to True to check if the external task exists or check if
1683
+ the DAG to wait for exists. (Default: True)
1684
+ """
1685
+ ...
1686
+
1687
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1688
+ """
1689
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1690
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1691
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1692
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
1693
+ starts only after all sensors finish.
1694
+
1774
1695
 
1775
- config: dict or Callable
1776
- Dictionary of configuration options for the datastore. The following keys are required:
1777
- - root: The root path in the datastore where the data will be saved. (needs to be in the format expected by the datastore)
1778
- - example: 's3://bucket-name/path/to/root'
1779
- - example: 'gs://bucket-name/path/to/root'
1780
- - example: 'https://myblockacc.blob.core.windows.net/metaflow/'
1781
- - role_arn (optional): AWS IAM role to access s3 bucket (only when `type` is 's3')
1782
- - session_vars (optional): AWS session variables to access s3 bucket (only when `type` is 's3')
1783
- - client_params (optional): AWS client parameters to access s3 bucket (only when `type` is 's3')
1696
+ Parameters
1697
+ ----------
1698
+ timeout : int
1699
+ Time, in seconds before the task times out and fails. (Default: 3600)
1700
+ poke_interval : int
1701
+ Time in seconds that the job should wait in between each try. (Default: 60)
1702
+ mode : str
1703
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1704
+ exponential_backoff : bool
1705
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1706
+ pool : str
1707
+ the slot pool this task should run in,
1708
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1709
+ soft_fail : bool
1710
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1711
+ name : str
1712
+ Name of the sensor on Airflow
1713
+ description : str
1714
+ Description of sensor in the Airflow UI
1715
+ bucket_key : Union[str, List[str]]
1716
+ The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1717
+ When it's specified as a full s3:// url, please leave `bucket_name` as None
1718
+ bucket_name : str
1719
+ Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1720
+ When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1721
+ wildcard_match : bool
1722
+ whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1723
+ aws_conn_id : str
1724
+ a reference to the s3 connection on Airflow. (Default: None)
1725
+ verify : bool
1726
+ Whether or not to verify SSL certificates for S3 connection. (Default: None)
1784
1727
  """
1785
1728
  ...
1786
1729
 
1787
1730
  @typing.overload
1788
- def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1731
+ def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1789
1732
  """
1790
- Specifies the event(s) that this flow depends on.
1733
+ Specifies the flow(s) that this flow depends on.
1791
1734
 
1792
1735
  ```
1793
- @trigger(event='foo')
1736
+ @trigger_on_finish(flow='FooFlow')
1794
1737
  ```
1795
1738
  or
1796
1739
  ```
1797
- @trigger(events=['foo', 'bar'])
1740
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1798
1741
  ```
1742
+ This decorator respects the @project decorator and triggers the flow
1743
+ when upstream runs within the same namespace complete successfully
1799
1744
 
1800
- Additionally, you can specify the parameter mappings
1801
- to map event payload to Metaflow parameters for the flow.
1745
+ Additionally, you can specify project aware upstream flow dependencies
1746
+ by specifying the fully qualified project_flow_name.
1802
1747
  ```
1803
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1748
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1804
1749
  ```
1805
1750
  or
1806
1751
  ```
1807
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1808
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1752
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1809
1753
  ```
1810
1754
 
1811
- 'parameters' can also be a list of strings and tuples like so:
1812
- ```
1813
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1814
- ```
1815
- This is equivalent to:
1755
+ You can also specify just the project or project branch (other values will be
1756
+ inferred from the current project or project branch):
1816
1757
  ```
1817
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1758
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1818
1759
  ```
1819
1760
 
1761
+ Note that `branch` is typically one of:
1762
+ - `prod`
1763
+ - `user.bob`
1764
+ - `test.my_experiment`
1765
+ - `prod.staging`
1766
+
1820
1767
 
1821
1768
  Parameters
1822
1769
  ----------
1823
- event : Union[str, Dict[str, Any]], optional, default None
1824
- Event dependency for this flow.
1825
- events : List[Union[str, Dict[str, Any]]], default []
1826
- Events dependency for this flow.
1770
+ flow : Union[str, Dict[str, str]], optional, default None
1771
+ Upstream flow dependency for this flow.
1772
+ flows : List[Union[str, Dict[str, str]]], default []
1773
+ Upstream flow dependencies for this flow.
1827
1774
  options : Dict[str, Any], default {}
1828
1775
  Backend-specific configuration for tuning eventing behavior.
1829
1776
  """
1830
1777
  ...
1831
1778
 
1832
1779
  @typing.overload
1833
- def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1780
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1834
1781
  ...
1835
1782
 
1836
- def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1783
+ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1837
1784
  """
1838
- Specifies the event(s) that this flow depends on.
1785
+ Specifies the flow(s) that this flow depends on.
1839
1786
 
1840
1787
  ```
1841
- @trigger(event='foo')
1788
+ @trigger_on_finish(flow='FooFlow')
1842
1789
  ```
1843
1790
  or
1844
1791
  ```
1845
- @trigger(events=['foo', 'bar'])
1792
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1846
1793
  ```
1794
+ This decorator respects the @project decorator and triggers the flow
1795
+ when upstream runs within the same namespace complete successfully
1847
1796
 
1848
- Additionally, you can specify the parameter mappings
1849
- to map event payload to Metaflow parameters for the flow.
1797
+ Additionally, you can specify project aware upstream flow dependencies
1798
+ by specifying the fully qualified project_flow_name.
1850
1799
  ```
1851
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1800
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1852
1801
  ```
1853
1802
  or
1854
1803
  ```
1855
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1856
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1804
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1857
1805
  ```
1858
1806
 
1859
- 'parameters' can also be a list of strings and tuples like so:
1860
- ```
1861
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1862
- ```
1863
- This is equivalent to:
1807
+ You can also specify just the project or project branch (other values will be
1808
+ inferred from the current project or project branch):
1864
1809
  ```
1865
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1810
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1866
1811
  ```
1867
1812
 
1813
+ Note that `branch` is typically one of:
1814
+ - `prod`
1815
+ - `user.bob`
1816
+ - `test.my_experiment`
1817
+ - `prod.staging`
1818
+
1868
1819
 
1869
1820
  Parameters
1870
1821
  ----------
1871
- event : Union[str, Dict[str, Any]], optional, default None
1872
- Event dependency for this flow.
1873
- events : List[Union[str, Dict[str, Any]]], default []
1874
- Events dependency for this flow.
1822
+ flow : Union[str, Dict[str, str]], optional, default None
1823
+ Upstream flow dependency for this flow.
1824
+ flows : List[Union[str, Dict[str, str]]], default []
1825
+ Upstream flow dependencies for this flow.
1875
1826
  options : Dict[str, Any], default {}
1876
1827
  Backend-specific configuration for tuning eventing behavior.
1877
1828
  """
1878
1829
  ...
1879
1830
 
1880
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1831
+ @typing.overload
1832
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1881
1833
  """
1882
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1883
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1834
+ Specifies the PyPI packages for all steps of the flow.
1884
1835
 
1836
+ Use `@pypi_base` to set common packages required by all
1837
+ steps and use `@pypi` to specify step-specific overrides.
1885
1838
 
1886
1839
  Parameters
1887
1840
  ----------
1888
- timeout : int
1889
- Time, in seconds before the task times out and fails. (Default: 3600)
1890
- poke_interval : int
1891
- Time in seconds that the job should wait in between each try. (Default: 60)
1892
- mode : str
1893
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1894
- exponential_backoff : bool
1895
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1896
- pool : str
1897
- the slot pool this task should run in,
1898
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1899
- soft_fail : bool
1900
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1901
- name : str
1902
- Name of the sensor on Airflow
1903
- description : str
1904
- Description of sensor in the Airflow UI
1905
- external_dag_id : str
1906
- The dag_id that contains the task you want to wait for.
1907
- external_task_ids : List[str]
1908
- The list of task_ids that you want to wait for.
1909
- If None (default value) the sensor waits for the DAG. (Default: None)
1910
- allowed_states : List[str]
1911
- Iterable of allowed states, (Default: ['success'])
1912
- failed_states : List[str]
1913
- Iterable of failed or dis-allowed states. (Default: None)
1914
- execution_delta : datetime.timedelta
1915
- time difference with the previous execution to look at,
1916
- the default is the same logical date as the current task or DAG. (Default: None)
1917
- check_existence: bool
1918
- Set to True to check if the external task exists or check if
1919
- the DAG to wait for exists. (Default: True)
1841
+ packages : Dict[str, str], default: {}
1842
+ Packages to use for this flow. The key is the name of the package
1843
+ and the value is the version to use.
1844
+ python : str, optional, default: None
1845
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1846
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1847
+ """
1848
+ ...
1849
+
1850
+ @typing.overload
1851
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1852
+ ...
1853
+
1854
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1855
+ """
1856
+ Specifies the PyPI packages for all steps of the flow.
1857
+
1858
+ Use `@pypi_base` to set common packages required by all
1859
+ steps and use `@pypi` to specify step-specific overrides.
1860
+
1861
+ Parameters
1862
+ ----------
1863
+ packages : Dict[str, str], default: {}
1864
+ Packages to use for this flow. The key is the name of the package
1865
+ and the value is the version to use.
1866
+ python : str, optional, default: None
1867
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1868
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1869
+ """
1870
+ ...
1871
+
1872
+ @typing.overload
1873
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1874
+ """
1875
+ Specifies the times when the flow should be run when running on a
1876
+ production scheduler.
1877
+
1878
+
1879
+ Parameters
1880
+ ----------
1881
+ hourly : bool, default False
1882
+ Run the workflow hourly.
1883
+ daily : bool, default True
1884
+ Run the workflow daily.
1885
+ weekly : bool, default False
1886
+ Run the workflow weekly.
1887
+ cron : str, optional, default None
1888
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1889
+ specified by this expression.
1890
+ timezone : str, optional, default None
1891
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1892
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1893
+ """
1894
+ ...
1895
+
1896
+ @typing.overload
1897
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1898
+ ...
1899
+
1900
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1901
+ """
1902
+ Specifies the times when the flow should be run when running on a
1903
+ production scheduler.
1904
+
1905
+
1906
+ Parameters
1907
+ ----------
1908
+ hourly : bool, default False
1909
+ Run the workflow hourly.
1910
+ daily : bool, default True
1911
+ Run the workflow daily.
1912
+ weekly : bool, default False
1913
+ Run the workflow weekly.
1914
+ cron : str, optional, default None
1915
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1916
+ specified by this expression.
1917
+ timezone : str, optional, default None
1918
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1919
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1920
1920
  """
1921
1921
  ...
1922
1922