ob-metaflow-stubs 6.0.4.3__py2.py3-none-any.whl → 6.0.4.4__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (260) hide show
  1. metaflow-stubs/__init__.pyi +795 -795
  2. metaflow-stubs/cards.pyi +2 -2
  3. metaflow-stubs/cli.pyi +2 -2
  4. metaflow-stubs/cli_components/__init__.pyi +2 -2
  5. metaflow-stubs/cli_components/utils.pyi +2 -2
  6. metaflow-stubs/client/__init__.pyi +2 -2
  7. metaflow-stubs/client/core.pyi +7 -7
  8. metaflow-stubs/client/filecache.pyi +2 -2
  9. metaflow-stubs/events.pyi +3 -3
  10. metaflow-stubs/exception.pyi +2 -2
  11. metaflow-stubs/flowspec.pyi +5 -5
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +4 -4
  14. metaflow-stubs/meta_files.pyi +2 -2
  15. metaflow-stubs/metadata_provider/__init__.pyi +2 -2
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +2 -2
  17. metaflow-stubs/metadata_provider/metadata.pyi +3 -3
  18. metaflow-stubs/metadata_provider/util.pyi +2 -2
  19. metaflow-stubs/metaflow_config.pyi +2 -2
  20. metaflow-stubs/metaflow_current.pyi +57 -57
  21. metaflow-stubs/metaflow_git.pyi +2 -2
  22. metaflow-stubs/mf_extensions/__init__.pyi +2 -2
  23. metaflow-stubs/mf_extensions/obcheckpoint/__init__.pyi +2 -2
  24. metaflow-stubs/mf_extensions/obcheckpoint/plugins/__init__.pyi +2 -2
  25. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/__init__.pyi +2 -2
  26. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/__init__.pyi +2 -2
  27. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/async_cards.pyi +2 -2
  28. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/deco_injection_mixin.pyi +2 -2
  29. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/extra_components.pyi +3 -3
  30. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/__init__.pyi +2 -2
  31. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/__init__.pyi +2 -2
  32. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/checkpoint_lister.pyi +5 -5
  33. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/lineage_card.pyi +2 -2
  34. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/checkpoint_storage.pyi +4 -4
  35. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/constructors.pyi +2 -2
  36. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/core.pyi +4 -4
  37. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/decorator.pyi +5 -5
  38. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/exceptions.pyi +2 -2
  39. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/final_api.pyi +3 -3
  40. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/lineage.pyi +2 -2
  41. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/__init__.pyi +2 -2
  42. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/context.pyi +3 -3
  43. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/core.pyi +2 -2
  44. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/decorator.pyi +2 -2
  45. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/exceptions.pyi +2 -2
  46. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/task_utils.pyi +4 -4
  47. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/utils.pyi +2 -2
  48. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastructures.pyi +3 -3
  49. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/exceptions.pyi +2 -2
  50. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/__init__.pyi +2 -2
  51. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/decorator.pyi +2 -2
  52. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/__init__.pyi +2 -2
  53. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/core.pyi +4 -4
  54. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/exceptions.pyi +2 -2
  55. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/model_storage.pyi +4 -4
  56. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/__init__.pyi +2 -2
  57. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/flowspec_utils.pyi +2 -2
  58. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/general.pyi +2 -2
  59. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/identity_utils.pyi +3 -3
  60. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/__init__.pyi +2 -2
  61. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/base.pyi +2 -2
  62. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/tar.pyi +3 -3
  63. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/tar_utils.pyi +3 -3
  64. metaflow-stubs/mf_extensions/outerbounds/__init__.pyi +2 -2
  65. metaflow-stubs/mf_extensions/outerbounds/plugins/__init__.pyi +2 -2
  66. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/__init__.pyi +2 -2
  67. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/__init__.pyi +2 -2
  68. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_state_machine.pyi +2 -2
  69. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/__init__.pyi +2 -2
  70. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/spinner/__init__.pyi +2 -2
  71. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/spinner/spinners.pyi +2 -2
  72. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/app_cli.pyi +3 -3
  73. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/app_config.pyi +3 -3
  74. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/capsule.pyi +3 -3
  75. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/click_importer.pyi +2 -2
  76. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/code_package/__init__.pyi +2 -2
  77. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/code_package/code_packager.pyi +2 -2
  78. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/__init__.pyi +2 -2
  79. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/cli_generator.pyi +2 -2
  80. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/config_utils.pyi +4 -4
  81. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/schema_export.pyi +2 -2
  82. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/typed_configs.pyi +3 -3
  83. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/unified_config.pyi +3 -3
  84. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/dependencies.pyi +3 -3
  85. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/deployer.pyi +5 -5
  86. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/experimental/__init__.pyi +2 -2
  87. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/perimeters.pyi +2 -2
  88. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/utils.pyi +4 -4
  89. metaflow-stubs/mf_extensions/outerbounds/plugins/aws/__init__.pyi +2 -2
  90. metaflow-stubs/mf_extensions/outerbounds/plugins/aws/assume_role_decorator.pyi +2 -2
  91. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/__init__.pyi +2 -2
  92. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/async_cards.pyi +2 -2
  93. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/injector.pyi +2 -2
  94. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/__init__.pyi +2 -2
  95. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/coreweave.pyi +2 -2
  96. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/nebius.pyi +2 -2
  97. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/__init__.pyi +2 -2
  98. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/baker.pyi +4 -4
  99. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/docker_environment.pyi +2 -2
  100. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/fast_bakery.pyi +2 -2
  101. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/__init__.pyi +2 -2
  102. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/pod_killer.pyi +2 -2
  103. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/__init__.pyi +2 -2
  104. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/constants.pyi +2 -2
  105. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/exceptions.pyi +2 -2
  106. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/ollama.pyi +2 -2
  107. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/status_card.pyi +2 -2
  108. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/__init__.pyi +2 -2
  109. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/snowflake.pyi +2 -2
  110. metaflow-stubs/mf_extensions/outerbounds/profilers/__init__.pyi +2 -2
  111. metaflow-stubs/mf_extensions/outerbounds/profilers/gpu.pyi +2 -2
  112. metaflow-stubs/mf_extensions/outerbounds/remote_config.pyi +2 -2
  113. metaflow-stubs/mf_extensions/outerbounds/toplevel/__init__.pyi +2 -2
  114. metaflow-stubs/mf_extensions/outerbounds/toplevel/global_aliases_for_metaflow_package.pyi +2 -2
  115. metaflow-stubs/multicore_utils.pyi +2 -2
  116. metaflow-stubs/ob_internal.pyi +2 -2
  117. metaflow-stubs/packaging_sys/__init__.pyi +6 -6
  118. metaflow-stubs/packaging_sys/backend.pyi +4 -4
  119. metaflow-stubs/packaging_sys/distribution_support.pyi +6 -6
  120. metaflow-stubs/packaging_sys/tar_backend.pyi +6 -6
  121. metaflow-stubs/packaging_sys/utils.pyi +2 -2
  122. metaflow-stubs/packaging_sys/v1.pyi +3 -3
  123. metaflow-stubs/parameters.pyi +4 -4
  124. metaflow-stubs/plugins/__init__.pyi +9 -9
  125. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  126. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  127. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  128. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  129. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
  130. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
  131. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
  132. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  133. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  134. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  135. metaflow-stubs/plugins/argo/argo_workflows.pyi +5 -5
  136. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +3 -3
  137. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +4 -4
  138. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +4 -4
  139. metaflow-stubs/plugins/argo/exit_hooks.pyi +3 -3
  140. metaflow-stubs/plugins/aws/__init__.pyi +2 -2
  141. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  142. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  143. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  144. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  145. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  146. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
  147. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  148. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +3 -3
  149. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  150. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  151. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  152. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  153. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  154. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +3 -3
  155. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +3 -3
  156. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  157. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  158. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  159. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +3 -3
  160. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  161. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  162. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  163. metaflow-stubs/plugins/cards/__init__.pyi +6 -6
  164. metaflow-stubs/plugins/cards/card_client.pyi +2 -2
  165. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  166. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  167. metaflow-stubs/plugins/cards/card_decorator.pyi +3 -3
  168. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  169. metaflow-stubs/plugins/cards/card_modules/basic.pyi +2 -2
  170. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  171. metaflow-stubs/plugins/cards/card_modules/components.pyi +4 -4
  172. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  173. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  174. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  175. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  176. metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
  177. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  178. metaflow-stubs/plugins/catch_decorator.pyi +2 -2
  179. metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
  180. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  181. metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
  182. metaflow-stubs/plugins/datatools/s3/s3.pyi +4 -4
  183. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  184. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  185. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  186. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  187. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  188. metaflow-stubs/plugins/events_decorator.pyi +2 -2
  189. metaflow-stubs/plugins/exit_hook/__init__.pyi +2 -2
  190. metaflow-stubs/plugins/exit_hook/exit_hook_decorator.pyi +2 -2
  191. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  192. metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
  193. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  194. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +3 -3
  195. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  196. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  197. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  198. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  199. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  200. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +3 -3
  201. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
  202. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  203. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
  204. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
  205. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +2 -2
  206. metaflow-stubs/plugins/ollama/__init__.pyi +2 -2
  207. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  208. metaflow-stubs/plugins/perimeters.pyi +2 -2
  209. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  210. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  211. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  212. metaflow-stubs/plugins/pypi/conda_environment.pyi +5 -5
  213. metaflow-stubs/plugins/pypi/parsers.pyi +2 -2
  214. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  215. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  216. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  217. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  218. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  219. metaflow-stubs/plugins/secrets/__init__.pyi +3 -3
  220. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +3 -3
  221. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  222. metaflow-stubs/plugins/secrets/secrets_func.pyi +2 -2
  223. metaflow-stubs/plugins/secrets/secrets_spec.pyi +2 -2
  224. metaflow-stubs/plugins/secrets/utils.pyi +2 -2
  225. metaflow-stubs/plugins/snowflake/__init__.pyi +2 -2
  226. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  227. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +3 -3
  228. metaflow-stubs/plugins/timeout_decorator.pyi +2 -2
  229. metaflow-stubs/plugins/torchtune/__init__.pyi +2 -2
  230. metaflow-stubs/plugins/uv/__init__.pyi +2 -2
  231. metaflow-stubs/plugins/uv/uv_environment.pyi +2 -2
  232. metaflow-stubs/profilers/__init__.pyi +2 -2
  233. metaflow-stubs/pylint_wrapper.pyi +2 -2
  234. metaflow-stubs/runner/__init__.pyi +2 -2
  235. metaflow-stubs/runner/deployer.pyi +6 -6
  236. metaflow-stubs/runner/deployer_impl.pyi +3 -3
  237. metaflow-stubs/runner/metaflow_runner.pyi +4 -4
  238. metaflow-stubs/runner/nbdeploy.pyi +2 -2
  239. metaflow-stubs/runner/nbrun.pyi +2 -2
  240. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  241. metaflow-stubs/runner/utils.pyi +2 -2
  242. metaflow-stubs/system/__init__.pyi +2 -2
  243. metaflow-stubs/system/system_logger.pyi +3 -3
  244. metaflow-stubs/system/system_monitor.pyi +2 -2
  245. metaflow-stubs/tagging_util.pyi +2 -2
  246. metaflow-stubs/tuple_util.pyi +2 -2
  247. metaflow-stubs/user_configs/__init__.pyi +2 -2
  248. metaflow-stubs/user_configs/config_options.pyi +4 -4
  249. metaflow-stubs/user_configs/config_parameters.pyi +6 -6
  250. metaflow-stubs/user_decorators/__init__.pyi +2 -2
  251. metaflow-stubs/user_decorators/common.pyi +2 -2
  252. metaflow-stubs/user_decorators/mutable_flow.pyi +6 -6
  253. metaflow-stubs/user_decorators/mutable_step.pyi +5 -5
  254. metaflow-stubs/user_decorators/user_flow_decorator.pyi +5 -5
  255. metaflow-stubs/user_decorators/user_step_decorator.pyi +7 -7
  256. {ob_metaflow_stubs-6.0.4.3.dist-info → ob_metaflow_stubs-6.0.4.4.dist-info}/METADATA +1 -1
  257. ob_metaflow_stubs-6.0.4.4.dist-info/RECORD +260 -0
  258. ob_metaflow_stubs-6.0.4.3.dist-info/RECORD +0 -260
  259. {ob_metaflow_stubs-6.0.4.3.dist-info → ob_metaflow_stubs-6.0.4.4.dist-info}/WHEEL +0 -0
  260. {ob_metaflow_stubs-6.0.4.3.dist-info → ob_metaflow_stubs-6.0.4.4.dist-info}/top_level.txt +0 -0
@@ -1,15 +1,15 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.16.1.1+obcheckpoint(0.2.4);ob(v1) #
4
- # Generated on 2025-07-15T20:32:21.659179 #
3
+ # MF version: 2.16.2.1+obcheckpoint(0.2.4);ob(v1) #
4
+ # Generated on 2025-07-16T08:15:48.145976 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import typing
12
11
  import datetime
12
+ import typing
13
13
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
14
14
  StepFlag = typing.NewType("StepFlag", bool)
15
15
 
@@ -48,9 +48,9 @@ from . import plugins as plugins
48
48
  from .mf_extensions.outerbounds.toplevel.global_aliases_for_metaflow_package import S3 as S3
49
49
  from . import includefile as includefile
50
50
  from .includefile import IncludeFile as IncludeFile
51
+ from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
51
52
  from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
52
53
  from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
53
- from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
54
54
  from . import client as client
55
55
  from .client.core import namespace as namespace
56
56
  from .client.core import get_namespace as get_namespace
@@ -163,119 +163,226 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
163
163
  ...
164
164
 
165
165
  @typing.overload
166
- def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
166
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
167
167
  """
168
- Creates a human-readable report, a Metaflow Card, after this step completes.
169
-
170
- Note that you may add multiple `@card` decorators in a step with different parameters.
171
-
172
-
173
- Parameters
174
- ----------
175
- type : str, default 'default'
176
- Card type.
177
- id : str, optional, default None
178
- If multiple cards are present, use this id to identify this card.
179
- options : Dict[str, Any], default {}
180
- Options passed to the card. The contents depend on the card type.
181
- timeout : int, default 45
182
- Interrupt reporting if it takes more than this many seconds.
168
+ Decorator prototype for all step decorators. This function gets specialized
169
+ and imported for all decorators types by _import_plugin_decorators().
183
170
  """
184
171
  ...
185
172
 
186
173
  @typing.overload
187
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
174
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
175
+ ...
176
+
177
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
178
+ """
179
+ Decorator prototype for all step decorators. This function gets specialized
180
+ and imported for all decorators types by _import_plugin_decorators().
181
+ """
188
182
  ...
189
183
 
190
184
  @typing.overload
191
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
185
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
186
+ """
187
+ Internal decorator to support Fast bakery
188
+ """
192
189
  ...
193
190
 
194
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
191
+ @typing.overload
192
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
193
+ ...
194
+
195
+ def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
195
196
  """
196
- Creates a human-readable report, a Metaflow Card, after this step completes.
197
+ Internal decorator to support Fast bakery
198
+ """
199
+ ...
200
+
201
+ @typing.overload
202
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
203
+ """
204
+ Specifies a timeout for your step.
197
205
 
198
- Note that you may add multiple `@card` decorators in a step with different parameters.
206
+ This decorator is useful if this step may hang indefinitely.
207
+
208
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
209
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
210
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
211
+
212
+ Note that all the values specified in parameters are added together so if you specify
213
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
199
214
 
200
215
 
201
216
  Parameters
202
217
  ----------
203
- type : str, default 'default'
204
- Card type.
205
- id : str, optional, default None
206
- If multiple cards are present, use this id to identify this card.
207
- options : Dict[str, Any], default {}
208
- Options passed to the card. The contents depend on the card type.
209
- timeout : int, default 45
210
- Interrupt reporting if it takes more than this many seconds.
218
+ seconds : int, default 0
219
+ Number of seconds to wait prior to timing out.
220
+ minutes : int, default 0
221
+ Number of minutes to wait prior to timing out.
222
+ hours : int, default 0
223
+ Number of hours to wait prior to timing out.
211
224
  """
212
225
  ...
213
226
 
214
227
  @typing.overload
215
- def app_deploy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
216
- """
217
- Decorator prototype for all step decorators. This function gets specialized
218
- and imported for all decorators types by _import_plugin_decorators().
219
- """
228
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
220
229
  ...
221
230
 
222
231
  @typing.overload
223
- def app_deploy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
224
- ...
225
-
226
- def app_deploy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
227
- """
228
- Decorator prototype for all step decorators. This function gets specialized
229
- and imported for all decorators types by _import_plugin_decorators().
230
- """
232
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
231
233
  ...
232
234
 
233
- def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
235
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
234
236
  """
235
- Specifies that this step should execute on DGX cloud.
237
+ Specifies a timeout for your step.
238
+
239
+ This decorator is useful if this step may hang indefinitely.
240
+
241
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
242
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
243
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
244
+
245
+ Note that all the values specified in parameters are added together so if you specify
246
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
236
247
 
237
248
 
238
249
  Parameters
239
250
  ----------
240
- gpu : int
241
- Number of GPUs to use.
242
- gpu_type : str
243
- Type of Nvidia GPU to use.
244
- queue_timeout : int
245
- Time to keep the job in NVCF's queue.
251
+ seconds : int, default 0
252
+ Number of seconds to wait prior to timing out.
253
+ minutes : int, default 0
254
+ Number of minutes to wait prior to timing out.
255
+ hours : int, default 0
256
+ Number of hours to wait prior to timing out.
246
257
  """
247
258
  ...
248
259
 
249
260
  @typing.overload
250
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
261
+ def model(*, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
251
262
  """
252
- Specifies environment variables to be set prior to the execution of a step.
263
+ Enables loading / saving of models within a step.
264
+
265
+ > Examples
266
+ - Saving Models
267
+ ```python
268
+ @model
269
+ @step
270
+ def train(self):
271
+ # current.model.save returns a dictionary reference to the model saved
272
+ self.my_model = current.model.save(
273
+ path_to_my_model,
274
+ label="my_model",
275
+ metadata={
276
+ "epochs": 10,
277
+ "batch-size": 32,
278
+ "learning-rate": 0.001,
279
+ }
280
+ )
281
+ self.next(self.test)
282
+
283
+ @model(load="my_model")
284
+ @step
285
+ def test(self):
286
+ # `current.model.loaded` returns a dictionary of the loaded models
287
+ # where the key is the name of the artifact and the value is the path to the model
288
+ print(os.listdir(current.model.loaded["my_model"]))
289
+ self.next(self.end)
290
+ ```
291
+
292
+ - Loading models
293
+ ```python
294
+ @step
295
+ def train(self):
296
+ # current.model.load returns the path to the model loaded
297
+ checkpoint_path = current.model.load(
298
+ self.checkpoint_key,
299
+ )
300
+ model_path = current.model.load(
301
+ self.model,
302
+ )
303
+ self.next(self.test)
304
+ ```
253
305
 
254
306
 
255
307
  Parameters
256
308
  ----------
257
- vars : Dict[str, str], default {}
258
- Dictionary of environment variables to set.
309
+ load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
310
+ Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
311
+ These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
312
+ If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
313
+ the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
314
+ If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
315
+
316
+ temp_dir_root : str, default: None
317
+ The root directory under which `current.model.loaded` will store loaded models
259
318
  """
260
319
  ...
261
320
 
262
321
  @typing.overload
263
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
322
+ def model(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
264
323
  ...
265
324
 
266
325
  @typing.overload
267
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
326
+ def model(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
268
327
  ...
269
328
 
270
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
329
+ def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None):
271
330
  """
272
- Specifies environment variables to be set prior to the execution of a step.
331
+ Enables loading / saving of models within a step.
332
+
333
+ > Examples
334
+ - Saving Models
335
+ ```python
336
+ @model
337
+ @step
338
+ def train(self):
339
+ # current.model.save returns a dictionary reference to the model saved
340
+ self.my_model = current.model.save(
341
+ path_to_my_model,
342
+ label="my_model",
343
+ metadata={
344
+ "epochs": 10,
345
+ "batch-size": 32,
346
+ "learning-rate": 0.001,
347
+ }
348
+ )
349
+ self.next(self.test)
350
+
351
+ @model(load="my_model")
352
+ @step
353
+ def test(self):
354
+ # `current.model.loaded` returns a dictionary of the loaded models
355
+ # where the key is the name of the artifact and the value is the path to the model
356
+ print(os.listdir(current.model.loaded["my_model"]))
357
+ self.next(self.end)
358
+ ```
359
+
360
+ - Loading models
361
+ ```python
362
+ @step
363
+ def train(self):
364
+ # current.model.load returns the path to the model loaded
365
+ checkpoint_path = current.model.load(
366
+ self.checkpoint_key,
367
+ )
368
+ model_path = current.model.load(
369
+ self.model,
370
+ )
371
+ self.next(self.test)
372
+ ```
273
373
 
274
374
 
275
375
  Parameters
276
376
  ----------
277
- vars : Dict[str, str], default {}
278
- Dictionary of environment variables to set.
377
+ load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
378
+ Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
379
+ These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
380
+ If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
381
+ the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
382
+ If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
383
+
384
+ temp_dir_root : str, default: None
385
+ The root directory under which `current.model.loaded` will store loaded models
279
386
  """
280
387
  ...
281
388
 
@@ -426,54 +533,68 @@ def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None
426
533
  """
427
534
  ...
428
535
 
429
- @typing.overload
430
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
536
+ def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
431
537
  """
432
- Specifies that the step will success under all circumstances.
433
-
434
- The decorator will create an optional artifact, specified by `var`, which
435
- contains the exception raised. You can use it to detect the presence
436
- of errors, indicating that all happy-path artifacts produced by the step
437
- are missing.
538
+ Specifies that this step should execute on DGX cloud.
438
539
 
439
540
 
440
541
  Parameters
441
542
  ----------
442
- var : str, optional, default None
443
- Name of the artifact in which to store the caught exception.
444
- If not specified, the exception is not stored.
445
- print_exception : bool, default True
446
- Determines whether or not the exception is printed to
447
- stdout when caught.
543
+ gpu : int
544
+ Number of GPUs to use.
545
+ gpu_type : str
546
+ Type of Nvidia GPU to use.
448
547
  """
449
548
  ...
450
549
 
451
550
  @typing.overload
452
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
453
- ...
454
-
455
- @typing.overload
456
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
551
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
552
+ """
553
+ Specifies the PyPI packages for the step.
554
+
555
+ Information in this decorator will augment any
556
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
557
+ you can use `@pypi_base` to set packages required by all
558
+ steps and use `@pypi` to specify step-specific overrides.
559
+
560
+
561
+ Parameters
562
+ ----------
563
+ packages : Dict[str, str], default: {}
564
+ Packages to use for this step. The key is the name of the package
565
+ and the value is the version to use.
566
+ python : str, optional, default: None
567
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
568
+ that the version used will correspond to the version of the Python interpreter used to start the run.
569
+ """
457
570
  ...
458
571
 
459
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
572
+ @typing.overload
573
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
574
+ ...
575
+
576
+ @typing.overload
577
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
578
+ ...
579
+
580
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
460
581
  """
461
- Specifies that the step will success under all circumstances.
582
+ Specifies the PyPI packages for the step.
462
583
 
463
- The decorator will create an optional artifact, specified by `var`, which
464
- contains the exception raised. You can use it to detect the presence
465
- of errors, indicating that all happy-path artifacts produced by the step
466
- are missing.
584
+ Information in this decorator will augment any
585
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
586
+ you can use `@pypi_base` to set packages required by all
587
+ steps and use `@pypi` to specify step-specific overrides.
467
588
 
468
589
 
469
590
  Parameters
470
591
  ----------
471
- var : str, optional, default None
472
- Name of the artifact in which to store the caught exception.
473
- If not specified, the exception is not stored.
474
- print_exception : bool, default True
475
- Determines whether or not the exception is printed to
476
- stdout when caught.
592
+ packages : Dict[str, str], default: {}
593
+ Packages to use for this step. The key is the name of the package
594
+ and the value is the version to use.
595
+ python : str, optional, default: None
596
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
597
+ that the version used will correspond to the version of the Python interpreter used to start the run.
477
598
  """
478
599
  ...
479
600
 
@@ -567,535 +688,493 @@ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: ty
567
688
  ...
568
689
 
569
690
  @typing.overload
570
- def model(*, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
691
+ def app_deploy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
571
692
  """
572
- Enables loading / saving of models within a step.
573
-
574
- > Examples
575
- - Saving Models
576
- ```python
577
- @model
578
- @step
579
- def train(self):
580
- # current.model.save returns a dictionary reference to the model saved
581
- self.my_model = current.model.save(
582
- path_to_my_model,
583
- label="my_model",
584
- metadata={
585
- "epochs": 10,
586
- "batch-size": 32,
587
- "learning-rate": 0.001,
588
- }
589
- )
590
- self.next(self.test)
693
+ Decorator prototype for all step decorators. This function gets specialized
694
+ and imported for all decorators types by _import_plugin_decorators().
695
+ """
696
+ ...
697
+
698
+ @typing.overload
699
+ def app_deploy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
700
+ ...
701
+
702
+ def app_deploy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
703
+ """
704
+ Decorator prototype for all step decorators. This function gets specialized
705
+ and imported for all decorators types by _import_plugin_decorators().
706
+ """
707
+ ...
708
+
709
+ @typing.overload
710
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
711
+ """
712
+ Specifies the number of times the task corresponding
713
+ to a step needs to be retried.
591
714
 
592
- @model(load="my_model")
593
- @step
594
- def test(self):
595
- # `current.model.loaded` returns a dictionary of the loaded models
596
- # where the key is the name of the artifact and the value is the path to the model
597
- print(os.listdir(current.model.loaded["my_model"]))
598
- self.next(self.end)
599
- ```
715
+ This decorator is useful for handling transient errors, such as networking issues.
716
+ If your task contains operations that can't be retried safely, e.g. database updates,
717
+ it is advisable to annotate it with `@retry(times=0)`.
600
718
 
601
- - Loading models
602
- ```python
603
- @step
604
- def train(self):
605
- # current.model.load returns the path to the model loaded
606
- checkpoint_path = current.model.load(
607
- self.checkpoint_key,
608
- )
609
- model_path = current.model.load(
610
- self.model,
611
- )
612
- self.next(self.test)
613
- ```
719
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
720
+ decorator will execute a no-op task after all retries have been exhausted,
721
+ ensuring that the flow execution can continue.
614
722
 
615
723
 
616
724
  Parameters
617
725
  ----------
618
- load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
619
- Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
620
- These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
621
- If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
622
- the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
623
- If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
624
-
625
- temp_dir_root : str, default: None
626
- The root directory under which `current.model.loaded` will store loaded models
726
+ times : int, default 3
727
+ Number of times to retry this task.
728
+ minutes_between_retries : int, default 2
729
+ Number of minutes between retries.
627
730
  """
628
731
  ...
629
732
 
630
733
  @typing.overload
631
- def model(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
734
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
632
735
  ...
633
736
 
634
737
  @typing.overload
635
- def model(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
738
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
636
739
  ...
637
740
 
638
- def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None):
741
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
639
742
  """
640
- Enables loading / saving of models within a step.
641
-
642
- > Examples
643
- - Saving Models
644
- ```python
645
- @model
646
- @step
647
- def train(self):
648
- # current.model.save returns a dictionary reference to the model saved
649
- self.my_model = current.model.save(
650
- path_to_my_model,
651
- label="my_model",
652
- metadata={
653
- "epochs": 10,
654
- "batch-size": 32,
655
- "learning-rate": 0.001,
656
- }
657
- )
658
- self.next(self.test)
743
+ Specifies the number of times the task corresponding
744
+ to a step needs to be retried.
659
745
 
660
- @model(load="my_model")
661
- @step
662
- def test(self):
663
- # `current.model.loaded` returns a dictionary of the loaded models
664
- # where the key is the name of the artifact and the value is the path to the model
665
- print(os.listdir(current.model.loaded["my_model"]))
666
- self.next(self.end)
667
- ```
746
+ This decorator is useful for handling transient errors, such as networking issues.
747
+ If your task contains operations that can't be retried safely, e.g. database updates,
748
+ it is advisable to annotate it with `@retry(times=0)`.
668
749
 
669
- - Loading models
670
- ```python
671
- @step
672
- def train(self):
673
- # current.model.load returns the path to the model loaded
674
- checkpoint_path = current.model.load(
675
- self.checkpoint_key,
676
- )
677
- model_path = current.model.load(
678
- self.model,
679
- )
680
- self.next(self.test)
681
- ```
750
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
751
+ decorator will execute a no-op task after all retries have been exhausted,
752
+ ensuring that the flow execution can continue.
682
753
 
683
754
 
684
755
  Parameters
685
756
  ----------
686
- load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
687
- Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
688
- These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
689
- If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
690
- the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
691
- If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
692
-
693
- temp_dir_root : str, default: None
694
- The root directory under which `current.model.loaded` will store loaded models
757
+ times : int, default 3
758
+ Number of times to retry this task.
759
+ minutes_between_retries : int, default 2
760
+ Number of minutes between retries.
695
761
  """
696
762
  ...
697
763
 
698
764
  @typing.overload
699
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
765
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
700
766
  """
701
- Specifies the PyPI packages for the step.
767
+ Specifies the Conda environment for the step.
702
768
 
703
769
  Information in this decorator will augment any
704
- attributes set in the `@pyi_base` flow-level decorator. Hence,
705
- you can use `@pypi_base` to set packages required by all
706
- steps and use `@pypi` to specify step-specific overrides.
770
+ attributes set in the `@conda_base` flow-level decorator. Hence,
771
+ you can use `@conda_base` to set packages required by all
772
+ steps and use `@conda` to specify step-specific overrides.
707
773
 
708
774
 
709
775
  Parameters
710
776
  ----------
711
- packages : Dict[str, str], default: {}
777
+ packages : Dict[str, str], default {}
712
778
  Packages to use for this step. The key is the name of the package
713
779
  and the value is the version to use.
714
- python : str, optional, default: None
780
+ libraries : Dict[str, str], default {}
781
+ Supported for backward compatibility. When used with packages, packages will take precedence.
782
+ python : str, optional, default None
715
783
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
716
784
  that the version used will correspond to the version of the Python interpreter used to start the run.
785
+ disabled : bool, default False
786
+ If set to True, disables @conda.
717
787
  """
718
788
  ...
719
789
 
720
790
  @typing.overload
721
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
791
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
722
792
  ...
723
793
 
724
794
  @typing.overload
725
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
795
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
726
796
  ...
727
797
 
728
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
798
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
729
799
  """
730
- Specifies the PyPI packages for the step.
800
+ Specifies the Conda environment for the step.
731
801
 
732
802
  Information in this decorator will augment any
733
- attributes set in the `@pyi_base` flow-level decorator. Hence,
734
- you can use `@pypi_base` to set packages required by all
735
- steps and use `@pypi` to specify step-specific overrides.
803
+ attributes set in the `@conda_base` flow-level decorator. Hence,
804
+ you can use `@conda_base` to set packages required by all
805
+ steps and use `@conda` to specify step-specific overrides.
736
806
 
737
807
 
738
808
  Parameters
739
809
  ----------
740
- packages : Dict[str, str], default: {}
810
+ packages : Dict[str, str], default {}
741
811
  Packages to use for this step. The key is the name of the package
742
812
  and the value is the version to use.
743
- python : str, optional, default: None
813
+ libraries : Dict[str, str], default {}
814
+ Supported for backward compatibility. When used with packages, packages will take precedence.
815
+ python : str, optional, default None
744
816
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
745
817
  that the version used will correspond to the version of the Python interpreter used to start the run.
818
+ disabled : bool, default False
819
+ If set to True, disables @conda.
746
820
  """
747
821
  ...
748
822
 
749
- def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
823
+ def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
750
824
  """
751
- Decorator that helps cache, version and store models/datasets from huggingface hub.
752
-
753
- > Examples
825
+ Specifies that this step should execute on DGX cloud.
754
826
 
755
- **Usage: creating references of models from huggingface that may be loaded in downstream steps**
756
- ```python
757
- @huggingface_hub
758
- @step
759
- def pull_model_from_huggingface(self):
760
- # `current.huggingface_hub.snapshot_download` downloads the model from the Hugging Face Hub
761
- # and saves it in the backend storage based on the model's `repo_id`. If there exists a model
762
- # with the same `repo_id` in the backend storage, it will not download the model again. The return
763
- # value of the function is a reference to the model in the backend storage.
764
- # This reference can be used to load the model in the subsequent steps via `@model(load=["llama_model"])`
765
827
 
766
- self.model_id = "mistralai/Mistral-7B-Instruct-v0.1"
767
- self.llama_model = current.huggingface_hub.snapshot_download(
768
- repo_id=self.model_id,
769
- allow_patterns=["*.safetensors", "*.json", "tokenizer.*"],
770
- )
771
- self.next(self.train)
772
- ```
828
+ Parameters
829
+ ----------
830
+ gpu : int
831
+ Number of GPUs to use.
832
+ gpu_type : str
833
+ Type of Nvidia GPU to use.
834
+ queue_timeout : int
835
+ Time to keep the job in NVCF's queue.
836
+ """
837
+ ...
838
+
839
+ @typing.overload
840
+ def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
841
+ """
842
+ Specifies the resources needed when executing this step.
773
843
 
774
- **Usage: loading models directly from huggingface hub or from cache (from metaflow's datastore)**
775
- ```python
776
- @huggingface_hub(load=["mistralai/Mistral-7B-Instruct-v0.1"])
777
- @step
778
- def pull_model_from_huggingface(self):
779
- path_to_model = current.huggingface_hub.loaded["mistralai/Mistral-7B-Instruct-v0.1"]
780
- ```
844
+ Use `@resources` to specify the resource requirements
845
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
781
846
 
782
- ```python
783
- @huggingface_hub(load=[("mistralai/Mistral-7B-Instruct-v0.1", "/my-directory"), ("myorg/mistral-lora, "/my-lora-directory")])
784
- @step
785
- def finetune_model(self):
786
- path_to_model = current.huggingface_hub.loaded["mistralai/Mistral-7B-Instruct-v0.1"]
787
- # path_to_model will be /my-directory
847
+ You can choose the compute layer on the command line by executing e.g.
788
848
  ```
789
-
790
- ```python
791
- # Takes all the arguments passed to `snapshot_download`
792
- # except for `local_dir`
793
- @huggingface_hub(load=[
794
- {
795
- "repo_id": "mistralai/Mistral-7B-Instruct-v0.1",
796
- },
797
- {
798
- "repo_id": "myorg/mistral-lora",
799
- "repo_type": "model",
800
- },
801
- ])
802
- @step
803
- def finetune_model(self):
804
- path_to_model = current.huggingface_hub.loaded["mistralai/Mistral-7B-Instruct-v0.1"]
805
- # path_to_model will be /my-directory
849
+ python myflow.py run --with batch
850
+ ```
851
+ or
806
852
  ```
853
+ python myflow.py run --with kubernetes
854
+ ```
855
+ which executes the flow on the desired system using the
856
+ requirements specified in `@resources`.
807
857
 
808
858
 
809
859
  Parameters
810
860
  ----------
811
- temp_dir_root : str, optional
812
- The root directory that will hold the temporary directory where objects will be downloaded.
813
-
814
- load: Union[List[str], List[Tuple[Dict, str]], List[Tuple[str, str]], List[Dict], None]
815
- The list of repos (models/datasets) to load.
816
-
817
- Loaded repos can be accessed via `current.huggingface_hub.loaded`. If load is set, then the following happens:
818
-
819
- - If repo (model/dataset) is not found in the datastore:
820
- - Downloads the repo from Hugging Face Hub to a temporary directory (or uses specified path) for local access
821
- - Stores it in Metaflow's datastore (s3/gcs/azure etc.) with a unique name based on repo_type/repo_id
822
- - All HF models loaded for a `@step` will be cached separately under flow/step/namespace.
823
-
824
- - If repo is found in the datastore:
825
- - Loads it directly from datastore to local path (can be temporary directory or specified path)
861
+ cpu : int, default 1
862
+ Number of CPUs required for this step.
863
+ gpu : int, optional, default None
864
+ Number of GPUs required for this step.
865
+ disk : int, optional, default None
866
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
867
+ memory : int, default 4096
868
+ Memory size (in MB) required for this step.
869
+ shared_memory : int, optional, default None
870
+ The value for the size (in MiB) of the /dev/shm volume for this step.
871
+ This parameter maps to the `--shm-size` option in Docker.
826
872
  """
827
873
  ...
828
874
 
829
875
  @typing.overload
830
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
831
- """
832
- Internal decorator to support Fast bakery
833
- """
876
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
834
877
  ...
835
878
 
836
879
  @typing.overload
837
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
838
- ...
839
-
840
- def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
841
- """
842
- Internal decorator to support Fast bakery
843
- """
880
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
844
881
  ...
845
882
 
846
- @typing.overload
847
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
883
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
848
884
  """
849
- Decorator prototype for all step decorators. This function gets specialized
850
- and imported for all decorators types by _import_plugin_decorators().
885
+ Specifies the resources needed when executing this step.
886
+
887
+ Use `@resources` to specify the resource requirements
888
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
889
+
890
+ You can choose the compute layer on the command line by executing e.g.
891
+ ```
892
+ python myflow.py run --with batch
893
+ ```
894
+ or
895
+ ```
896
+ python myflow.py run --with kubernetes
897
+ ```
898
+ which executes the flow on the desired system using the
899
+ requirements specified in `@resources`.
900
+
901
+
902
+ Parameters
903
+ ----------
904
+ cpu : int, default 1
905
+ Number of CPUs required for this step.
906
+ gpu : int, optional, default None
907
+ Number of GPUs required for this step.
908
+ disk : int, optional, default None
909
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
910
+ memory : int, default 4096
911
+ Memory size (in MB) required for this step.
912
+ shared_memory : int, optional, default None
913
+ The value for the size (in MiB) of the /dev/shm volume for this step.
914
+ This parameter maps to the `--shm-size` option in Docker.
851
915
  """
852
916
  ...
853
917
 
854
- @typing.overload
855
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
856
- ...
857
-
858
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
918
+ def vllm(*, model: str, backend: str, openai_api_server: bool, debug: bool, card_refresh_interval: int, max_retries: int, retry_alert_frequency: int, engine_args: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
859
919
  """
860
- Decorator prototype for all step decorators. This function gets specialized
861
- and imported for all decorators types by _import_plugin_decorators().
920
+ This decorator is used to run vllm APIs as Metaflow task sidecars.
921
+
922
+ User code call
923
+ --------------
924
+ @vllm(
925
+ model="...",
926
+ ...
927
+ )
928
+
929
+ Valid backend options
930
+ ---------------------
931
+ - 'local': Run as a separate process on the local task machine.
932
+
933
+ Valid model options
934
+ -------------------
935
+ Any HuggingFace model identifier, e.g. 'meta-llama/Llama-3.2-1B'
936
+
937
+ NOTE: vLLM's OpenAI-compatible server serves ONE model per server instance.
938
+ If you need multiple models, you must create multiple @vllm decorators.
939
+
940
+
941
+ Parameters
942
+ ----------
943
+ model: str
944
+ HuggingFace model identifier to be served by vLLM.
945
+ backend: str
946
+ Determines where and how to run the vLLM process.
947
+ openai_api_server: bool
948
+ Whether to use OpenAI-compatible API server mode (subprocess) instead of native engine.
949
+ Default is False (uses native engine).
950
+ Set to True for backward compatibility with existing code.
951
+ debug: bool
952
+ Whether to turn on verbose debugging logs.
953
+ card_refresh_interval: int
954
+ Interval in seconds for refreshing the vLLM status card.
955
+ Only used when openai_api_server=True.
956
+ max_retries: int
957
+ Maximum number of retries checking for vLLM server startup.
958
+ Only used when openai_api_server=True.
959
+ retry_alert_frequency: int
960
+ Frequency of alert logs for vLLM server startup retries.
961
+ Only used when openai_api_server=True.
962
+ engine_args : dict
963
+ Additional keyword arguments to pass to the vLLM engine.
964
+ For example, `tensor_parallel_size=2`.
862
965
  """
863
966
  ...
864
967
 
865
968
  @typing.overload
866
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
969
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
867
970
  """
868
- Specifies the Conda environment for the step.
869
-
870
- Information in this decorator will augment any
871
- attributes set in the `@conda_base` flow-level decorator. Hence,
872
- you can use `@conda_base` to set packages required by all
873
- steps and use `@conda` to specify step-specific overrides.
971
+ Specifies environment variables to be set prior to the execution of a step.
874
972
 
875
973
 
876
974
  Parameters
877
975
  ----------
878
- packages : Dict[str, str], default {}
879
- Packages to use for this step. The key is the name of the package
880
- and the value is the version to use.
881
- libraries : Dict[str, str], default {}
882
- Supported for backward compatibility. When used with packages, packages will take precedence.
883
- python : str, optional, default None
884
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
885
- that the version used will correspond to the version of the Python interpreter used to start the run.
886
- disabled : bool, default False
887
- If set to True, disables @conda.
976
+ vars : Dict[str, str], default {}
977
+ Dictionary of environment variables to set.
888
978
  """
889
979
  ...
890
980
 
891
981
  @typing.overload
892
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
982
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
893
983
  ...
894
984
 
895
985
  @typing.overload
896
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
986
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
897
987
  ...
898
988
 
899
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
989
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
900
990
  """
901
- Specifies the Conda environment for the step.
902
-
903
- Information in this decorator will augment any
904
- attributes set in the `@conda_base` flow-level decorator. Hence,
905
- you can use `@conda_base` to set packages required by all
906
- steps and use `@conda` to specify step-specific overrides.
991
+ Specifies environment variables to be set prior to the execution of a step.
907
992
 
908
993
 
909
994
  Parameters
910
995
  ----------
911
- packages : Dict[str, str], default {}
912
- Packages to use for this step. The key is the name of the package
913
- and the value is the version to use.
914
- libraries : Dict[str, str], default {}
915
- Supported for backward compatibility. When used with packages, packages will take precedence.
916
- python : str, optional, default None
917
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
918
- that the version used will correspond to the version of the Python interpreter used to start the run.
919
- disabled : bool, default False
920
- If set to True, disables @conda.
996
+ vars : Dict[str, str], default {}
997
+ Dictionary of environment variables to set.
921
998
  """
922
999
  ...
923
1000
 
924
- @typing.overload
925
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1001
+ def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
926
1002
  """
927
- Specifies a timeout for your step.
1003
+ Decorator that helps cache, version and store models/datasets from huggingface hub.
928
1004
 
929
- This decorator is useful if this step may hang indefinitely.
1005
+ > Examples
930
1006
 
931
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
932
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
933
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
1007
+ **Usage: creating references of models from huggingface that may be loaded in downstream steps**
1008
+ ```python
1009
+ @huggingface_hub
1010
+ @step
1011
+ def pull_model_from_huggingface(self):
1012
+ # `current.huggingface_hub.snapshot_download` downloads the model from the Hugging Face Hub
1013
+ # and saves it in the backend storage based on the model's `repo_id`. If there exists a model
1014
+ # with the same `repo_id` in the backend storage, it will not download the model again. The return
1015
+ # value of the function is a reference to the model in the backend storage.
1016
+ # This reference can be used to load the model in the subsequent steps via `@model(load=["llama_model"])`
1017
+
1018
+ self.model_id = "mistralai/Mistral-7B-Instruct-v0.1"
1019
+ self.llama_model = current.huggingface_hub.snapshot_download(
1020
+ repo_id=self.model_id,
1021
+ allow_patterns=["*.safetensors", "*.json", "tokenizer.*"],
1022
+ )
1023
+ self.next(self.train)
1024
+ ```
1025
+
1026
+ **Usage: loading models directly from huggingface hub or from cache (from metaflow's datastore)**
1027
+ ```python
1028
+ @huggingface_hub(load=["mistralai/Mistral-7B-Instruct-v0.1"])
1029
+ @step
1030
+ def pull_model_from_huggingface(self):
1031
+ path_to_model = current.huggingface_hub.loaded["mistralai/Mistral-7B-Instruct-v0.1"]
1032
+ ```
1033
+
1034
+ ```python
1035
+ @huggingface_hub(load=[("mistralai/Mistral-7B-Instruct-v0.1", "/my-directory"), ("myorg/mistral-lora, "/my-lora-directory")])
1036
+ @step
1037
+ def finetune_model(self):
1038
+ path_to_model = current.huggingface_hub.loaded["mistralai/Mistral-7B-Instruct-v0.1"]
1039
+ # path_to_model will be /my-directory
1040
+ ```
1041
+
1042
+ ```python
1043
+ # Takes all the arguments passed to `snapshot_download`
1044
+ # except for `local_dir`
1045
+ @huggingface_hub(load=[
1046
+ {
1047
+ "repo_id": "mistralai/Mistral-7B-Instruct-v0.1",
1048
+ },
1049
+ {
1050
+ "repo_id": "myorg/mistral-lora",
1051
+ "repo_type": "model",
1052
+ },
1053
+ ])
1054
+ @step
1055
+ def finetune_model(self):
1056
+ path_to_model = current.huggingface_hub.loaded["mistralai/Mistral-7B-Instruct-v0.1"]
1057
+ # path_to_model will be /my-directory
1058
+ ```
1059
+
1060
+
1061
+ Parameters
1062
+ ----------
1063
+ temp_dir_root : str, optional
1064
+ The root directory that will hold the temporary directory where objects will be downloaded.
1065
+
1066
+ load: Union[List[str], List[Tuple[Dict, str]], List[Tuple[str, str]], List[Dict], None]
1067
+ The list of repos (models/datasets) to load.
934
1068
 
935
- Note that all the values specified in parameters are added together so if you specify
936
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1069
+ Loaded repos can be accessed via `current.huggingface_hub.loaded`. If load is set, then the following happens:
937
1070
 
1071
+ - If repo (model/dataset) is not found in the datastore:
1072
+ - Downloads the repo from Hugging Face Hub to a temporary directory (or uses specified path) for local access
1073
+ - Stores it in Metaflow's datastore (s3/gcs/azure etc.) with a unique name based on repo_type/repo_id
1074
+ - All HF models loaded for a `@step` will be cached separately under flow/step/namespace.
938
1075
 
939
- Parameters
940
- ----------
941
- seconds : int, default 0
942
- Number of seconds to wait prior to timing out.
943
- minutes : int, default 0
944
- Number of minutes to wait prior to timing out.
945
- hours : int, default 0
946
- Number of hours to wait prior to timing out.
1076
+ - If repo is found in the datastore:
1077
+ - Loads it directly from datastore to local path (can be temporary directory or specified path)
947
1078
  """
948
1079
  ...
949
1080
 
950
1081
  @typing.overload
951
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
952
- ...
953
-
954
- @typing.overload
955
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
956
- ...
957
-
958
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
1082
+ def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
959
1083
  """
960
- Specifies a timeout for your step.
961
-
962
- This decorator is useful if this step may hang indefinitely.
963
-
964
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
965
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
966
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
1084
+ Creates a human-readable report, a Metaflow Card, after this step completes.
967
1085
 
968
- Note that all the values specified in parameters are added together so if you specify
969
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1086
+ Note that you may add multiple `@card` decorators in a step with different parameters.
970
1087
 
971
1088
 
972
1089
  Parameters
973
1090
  ----------
974
- seconds : int, default 0
975
- Number of seconds to wait prior to timing out.
976
- minutes : int, default 0
977
- Number of minutes to wait prior to timing out.
978
- hours : int, default 0
979
- Number of hours to wait prior to timing out.
1091
+ type : str, default 'default'
1092
+ Card type.
1093
+ id : str, optional, default None
1094
+ If multiple cards are present, use this id to identify this card.
1095
+ options : Dict[str, Any], default {}
1096
+ Options passed to the card. The contents depend on the card type.
1097
+ timeout : int, default 45
1098
+ Interrupt reporting if it takes more than this many seconds.
980
1099
  """
981
1100
  ...
982
1101
 
983
- def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
984
- """
985
- Specifies that this step should execute on DGX cloud.
986
-
987
-
988
- Parameters
989
- ----------
990
- gpu : int
991
- Number of GPUs to use.
992
- gpu_type : str
993
- Type of Nvidia GPU to use.
994
- """
1102
+ @typing.overload
1103
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
995
1104
  ...
996
1105
 
997
- def vllm(*, model: str, backend: str, openai_api_server: bool, debug: bool, card_refresh_interval: int, max_retries: int, retry_alert_frequency: int, engine_args: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1106
+ @typing.overload
1107
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1108
+ ...
1109
+
1110
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
998
1111
  """
999
- This decorator is used to run vllm APIs as Metaflow task sidecars.
1000
-
1001
- User code call
1002
- --------------
1003
- @vllm(
1004
- model="...",
1005
- ...
1006
- )
1007
-
1008
- Valid backend options
1009
- ---------------------
1010
- - 'local': Run as a separate process on the local task machine.
1011
-
1012
- Valid model options
1013
- -------------------
1014
- Any HuggingFace model identifier, e.g. 'meta-llama/Llama-3.2-1B'
1112
+ Creates a human-readable report, a Metaflow Card, after this step completes.
1015
1113
 
1016
- NOTE: vLLM's OpenAI-compatible server serves ONE model per server instance.
1017
- If you need multiple models, you must create multiple @vllm decorators.
1114
+ Note that you may add multiple `@card` decorators in a step with different parameters.
1018
1115
 
1019
1116
 
1020
1117
  Parameters
1021
1118
  ----------
1022
- model: str
1023
- HuggingFace model identifier to be served by vLLM.
1024
- backend: str
1025
- Determines where and how to run the vLLM process.
1026
- openai_api_server: bool
1027
- Whether to use OpenAI-compatible API server mode (subprocess) instead of native engine.
1028
- Default is False (uses native engine).
1029
- Set to True for backward compatibility with existing code.
1030
- debug: bool
1031
- Whether to turn on verbose debugging logs.
1032
- card_refresh_interval: int
1033
- Interval in seconds for refreshing the vLLM status card.
1034
- Only used when openai_api_server=True.
1035
- max_retries: int
1036
- Maximum number of retries checking for vLLM server startup.
1037
- Only used when openai_api_server=True.
1038
- retry_alert_frequency: int
1039
- Frequency of alert logs for vLLM server startup retries.
1040
- Only used when openai_api_server=True.
1041
- engine_args : dict
1042
- Additional keyword arguments to pass to the vLLM engine.
1043
- For example, `tensor_parallel_size=2`.
1119
+ type : str, default 'default'
1120
+ Card type.
1121
+ id : str, optional, default None
1122
+ If multiple cards are present, use this id to identify this card.
1123
+ options : Dict[str, Any], default {}
1124
+ Options passed to the card. The contents depend on the card type.
1125
+ timeout : int, default 45
1126
+ Interrupt reporting if it takes more than this many seconds.
1044
1127
  """
1045
1128
  ...
1046
1129
 
1047
1130
  @typing.overload
1048
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1131
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1049
1132
  """
1050
- Specifies the number of times the task corresponding
1051
- to a step needs to be retried.
1052
-
1053
- This decorator is useful for handling transient errors, such as networking issues.
1054
- If your task contains operations that can't be retried safely, e.g. database updates,
1055
- it is advisable to annotate it with `@retry(times=0)`.
1133
+ Specifies that the step will success under all circumstances.
1056
1134
 
1057
- This can be used in conjunction with the `@catch` decorator. The `@catch`
1058
- decorator will execute a no-op task after all retries have been exhausted,
1059
- ensuring that the flow execution can continue.
1135
+ The decorator will create an optional artifact, specified by `var`, which
1136
+ contains the exception raised. You can use it to detect the presence
1137
+ of errors, indicating that all happy-path artifacts produced by the step
1138
+ are missing.
1060
1139
 
1061
1140
 
1062
1141
  Parameters
1063
1142
  ----------
1064
- times : int, default 3
1065
- Number of times to retry this task.
1066
- minutes_between_retries : int, default 2
1067
- Number of minutes between retries.
1143
+ var : str, optional, default None
1144
+ Name of the artifact in which to store the caught exception.
1145
+ If not specified, the exception is not stored.
1146
+ print_exception : bool, default True
1147
+ Determines whether or not the exception is printed to
1148
+ stdout when caught.
1068
1149
  """
1069
1150
  ...
1070
1151
 
1071
1152
  @typing.overload
1072
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1153
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1073
1154
  ...
1074
1155
 
1075
1156
  @typing.overload
1076
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1157
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1077
1158
  ...
1078
1159
 
1079
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
1160
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
1080
1161
  """
1081
- Specifies the number of times the task corresponding
1082
- to a step needs to be retried.
1083
-
1084
- This decorator is useful for handling transient errors, such as networking issues.
1085
- If your task contains operations that can't be retried safely, e.g. database updates,
1086
- it is advisable to annotate it with `@retry(times=0)`.
1162
+ Specifies that the step will success under all circumstances.
1087
1163
 
1088
- This can be used in conjunction with the `@catch` decorator. The `@catch`
1089
- decorator will execute a no-op task after all retries have been exhausted,
1090
- ensuring that the flow execution can continue.
1164
+ The decorator will create an optional artifact, specified by `var`, which
1165
+ contains the exception raised. You can use it to detect the presence
1166
+ of errors, indicating that all happy-path artifacts produced by the step
1167
+ are missing.
1091
1168
 
1092
1169
 
1093
1170
  Parameters
1094
1171
  ----------
1095
- times : int, default 3
1096
- Number of times to retry this task.
1097
- minutes_between_retries : int, default 2
1098
- Number of minutes between retries.
1172
+ var : str, optional, default None
1173
+ Name of the artifact in which to store the caught exception.
1174
+ If not specified, the exception is not stored.
1175
+ print_exception : bool, default True
1176
+ Determines whether or not the exception is printed to
1177
+ stdout when caught.
1099
1178
  """
1100
1179
  ...
1101
1180
 
@@ -1138,85 +1217,6 @@ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
1138
1217
  """
1139
1218
  ...
1140
1219
 
1141
- @typing.overload
1142
- def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1143
- """
1144
- Specifies the resources needed when executing this step.
1145
-
1146
- Use `@resources` to specify the resource requirements
1147
- independently of the specific compute layer (`@batch`, `@kubernetes`).
1148
-
1149
- You can choose the compute layer on the command line by executing e.g.
1150
- ```
1151
- python myflow.py run --with batch
1152
- ```
1153
- or
1154
- ```
1155
- python myflow.py run --with kubernetes
1156
- ```
1157
- which executes the flow on the desired system using the
1158
- requirements specified in `@resources`.
1159
-
1160
-
1161
- Parameters
1162
- ----------
1163
- cpu : int, default 1
1164
- Number of CPUs required for this step.
1165
- gpu : int, optional, default None
1166
- Number of GPUs required for this step.
1167
- disk : int, optional, default None
1168
- Disk size (in MB) required for this step. Only applies on Kubernetes.
1169
- memory : int, default 4096
1170
- Memory size (in MB) required for this step.
1171
- shared_memory : int, optional, default None
1172
- The value for the size (in MiB) of the /dev/shm volume for this step.
1173
- This parameter maps to the `--shm-size` option in Docker.
1174
- """
1175
- ...
1176
-
1177
- @typing.overload
1178
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1179
- ...
1180
-
1181
- @typing.overload
1182
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1183
- ...
1184
-
1185
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
1186
- """
1187
- Specifies the resources needed when executing this step.
1188
-
1189
- Use `@resources` to specify the resource requirements
1190
- independently of the specific compute layer (`@batch`, `@kubernetes`).
1191
-
1192
- You can choose the compute layer on the command line by executing e.g.
1193
- ```
1194
- python myflow.py run --with batch
1195
- ```
1196
- or
1197
- ```
1198
- python myflow.py run --with kubernetes
1199
- ```
1200
- which executes the flow on the desired system using the
1201
- requirements specified in `@resources`.
1202
-
1203
-
1204
- Parameters
1205
- ----------
1206
- cpu : int, default 1
1207
- Number of CPUs required for this step.
1208
- gpu : int, optional, default None
1209
- Number of GPUs required for this step.
1210
- disk : int, optional, default None
1211
- Disk size (in MB) required for this step. Only applies on Kubernetes.
1212
- memory : int, default 4096
1213
- Memory size (in MB) required for this step.
1214
- shared_memory : int, optional, default None
1215
- The value for the size (in MiB) of the /dev/shm volume for this step.
1216
- This parameter maps to the `--shm-size` option in Docker.
1217
- """
1218
- ...
1219
-
1220
1220
  def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy: str, force_cache_update: bool, debug: bool, circuit_breaker_config: dict, timeout_config: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1221
1221
  """
1222
1222
  This decorator is used to run Ollama APIs as Metaflow task sidecars.
@@ -1261,53 +1261,154 @@ def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy:
1261
1261
  ...
1262
1262
 
1263
1263
  @typing.overload
1264
- def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1264
+ def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1265
1265
  """
1266
- Specifies the Conda environment for all steps of the flow.
1266
+ Specifies the flow(s) that this flow depends on.
1267
1267
 
1268
- Use `@conda_base` to set common libraries required by all
1269
- steps and use `@conda` to specify step-specific additions.
1268
+ ```
1269
+ @trigger_on_finish(flow='FooFlow')
1270
+ ```
1271
+ or
1272
+ ```
1273
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1274
+ ```
1275
+ This decorator respects the @project decorator and triggers the flow
1276
+ when upstream runs within the same namespace complete successfully
1277
+
1278
+ Additionally, you can specify project aware upstream flow dependencies
1279
+ by specifying the fully qualified project_flow_name.
1280
+ ```
1281
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1282
+ ```
1283
+ or
1284
+ ```
1285
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1286
+ ```
1287
+
1288
+ You can also specify just the project or project branch (other values will be
1289
+ inferred from the current project or project branch):
1290
+ ```
1291
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1292
+ ```
1293
+
1294
+ Note that `branch` is typically one of:
1295
+ - `prod`
1296
+ - `user.bob`
1297
+ - `test.my_experiment`
1298
+ - `prod.staging`
1270
1299
 
1271
1300
 
1272
1301
  Parameters
1273
1302
  ----------
1274
- packages : Dict[str, str], default {}
1275
- Packages to use for this flow. The key is the name of the package
1276
- and the value is the version to use.
1277
- libraries : Dict[str, str], default {}
1278
- Supported for backward compatibility. When used with packages, packages will take precedence.
1279
- python : str, optional, default None
1280
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1281
- that the version used will correspond to the version of the Python interpreter used to start the run.
1282
- disabled : bool, default False
1283
- If set to True, disables Conda.
1303
+ flow : Union[str, Dict[str, str]], optional, default None
1304
+ Upstream flow dependency for this flow.
1305
+ flows : List[Union[str, Dict[str, str]]], default []
1306
+ Upstream flow dependencies for this flow.
1307
+ options : Dict[str, Any], default {}
1308
+ Backend-specific configuration for tuning eventing behavior.
1284
1309
  """
1285
1310
  ...
1286
1311
 
1287
1312
  @typing.overload
1288
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1313
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1289
1314
  ...
1290
1315
 
1291
- def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1316
+ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1292
1317
  """
1293
- Specifies the Conda environment for all steps of the flow.
1318
+ Specifies the flow(s) that this flow depends on.
1294
1319
 
1295
- Use `@conda_base` to set common libraries required by all
1296
- steps and use `@conda` to specify step-specific additions.
1320
+ ```
1321
+ @trigger_on_finish(flow='FooFlow')
1322
+ ```
1323
+ or
1324
+ ```
1325
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1326
+ ```
1327
+ This decorator respects the @project decorator and triggers the flow
1328
+ when upstream runs within the same namespace complete successfully
1329
+
1330
+ Additionally, you can specify project aware upstream flow dependencies
1331
+ by specifying the fully qualified project_flow_name.
1332
+ ```
1333
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1334
+ ```
1335
+ or
1336
+ ```
1337
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1338
+ ```
1339
+
1340
+ You can also specify just the project or project branch (other values will be
1341
+ inferred from the current project or project branch):
1342
+ ```
1343
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1344
+ ```
1345
+
1346
+ Note that `branch` is typically one of:
1347
+ - `prod`
1348
+ - `user.bob`
1349
+ - `test.my_experiment`
1350
+ - `prod.staging`
1297
1351
 
1298
1352
 
1299
1353
  Parameters
1300
1354
  ----------
1301
- packages : Dict[str, str], default {}
1302
- Packages to use for this flow. The key is the name of the package
1303
- and the value is the version to use.
1304
- libraries : Dict[str, str], default {}
1305
- Supported for backward compatibility. When used with packages, packages will take precedence.
1306
- python : str, optional, default None
1307
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1308
- that the version used will correspond to the version of the Python interpreter used to start the run.
1309
- disabled : bool, default False
1310
- If set to True, disables Conda.
1355
+ flow : Union[str, Dict[str, str]], optional, default None
1356
+ Upstream flow dependency for this flow.
1357
+ flows : List[Union[str, Dict[str, str]]], default []
1358
+ Upstream flow dependencies for this flow.
1359
+ options : Dict[str, Any], default {}
1360
+ Backend-specific configuration for tuning eventing behavior.
1361
+ """
1362
+ ...
1363
+
1364
+ @typing.overload
1365
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1366
+ """
1367
+ Specifies the times when the flow should be run when running on a
1368
+ production scheduler.
1369
+
1370
+
1371
+ Parameters
1372
+ ----------
1373
+ hourly : bool, default False
1374
+ Run the workflow hourly.
1375
+ daily : bool, default True
1376
+ Run the workflow daily.
1377
+ weekly : bool, default False
1378
+ Run the workflow weekly.
1379
+ cron : str, optional, default None
1380
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1381
+ specified by this expression.
1382
+ timezone : str, optional, default None
1383
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1384
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1385
+ """
1386
+ ...
1387
+
1388
+ @typing.overload
1389
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1390
+ ...
1391
+
1392
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1393
+ """
1394
+ Specifies the times when the flow should be run when running on a
1395
+ production scheduler.
1396
+
1397
+
1398
+ Parameters
1399
+ ----------
1400
+ hourly : bool, default False
1401
+ Run the workflow hourly.
1402
+ daily : bool, default True
1403
+ Run the workflow daily.
1404
+ weekly : bool, default False
1405
+ Run the workflow weekly.
1406
+ cron : str, optional, default None
1407
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1408
+ specified by this expression.
1409
+ timezone : str, optional, default None
1410
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1411
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1311
1412
  """
1312
1413
  ...
1313
1414
 
@@ -1425,130 +1526,132 @@ def with_artifact_store(f: typing.Optional[typing.Type[FlowSpecDerived]] = None)
1425
1526
  """
1426
1527
  ...
1427
1528
 
1428
- @typing.overload
1429
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1529
+ def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1430
1530
  """
1431
- Specifies the times when the flow should be run when running on a
1432
- production scheduler.
1531
+ Specifies what flows belong to the same project.
1532
+
1533
+ A project-specific namespace is created for all flows that
1534
+ use the same `@project(name)`.
1433
1535
 
1434
1536
 
1435
1537
  Parameters
1436
1538
  ----------
1437
- hourly : bool, default False
1438
- Run the workflow hourly.
1439
- daily : bool, default True
1440
- Run the workflow daily.
1441
- weekly : bool, default False
1442
- Run the workflow weekly.
1443
- cron : str, optional, default None
1444
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1445
- specified by this expression.
1446
- timezone : str, optional, default None
1447
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1448
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1539
+ name : str
1540
+ Project name. Make sure that the name is unique amongst all
1541
+ projects that use the same production scheduler. The name may
1542
+ contain only lowercase alphanumeric characters and underscores.
1543
+
1544
+ branch : Optional[str], default None
1545
+ The branch to use. If not specified, the branch is set to
1546
+ `user.<username>` unless `production` is set to `True`. This can
1547
+ also be set on the command line using `--branch` as a top-level option.
1548
+ It is an error to specify `branch` in the decorator and on the command line.
1549
+
1550
+ production : bool, default False
1551
+ Whether or not the branch is the production branch. This can also be set on the
1552
+ command line using `--production` as a top-level option. It is an error to specify
1553
+ `production` in the decorator and on the command line.
1554
+ The project branch name will be:
1555
+ - if `branch` is specified:
1556
+ - if `production` is True: `prod.<branch>`
1557
+ - if `production` is False: `test.<branch>`
1558
+ - if `branch` is not specified:
1559
+ - if `production` is True: `prod`
1560
+ - if `production` is False: `user.<username>`
1449
1561
  """
1450
1562
  ...
1451
1563
 
1452
- @typing.overload
1453
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1454
- ...
1455
-
1456
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1564
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1457
1565
  """
1458
- Specifies the times when the flow should be run when running on a
1459
- production scheduler.
1566
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1567
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1460
1568
 
1461
1569
 
1462
1570
  Parameters
1463
1571
  ----------
1464
- hourly : bool, default False
1465
- Run the workflow hourly.
1466
- daily : bool, default True
1467
- Run the workflow daily.
1468
- weekly : bool, default False
1469
- Run the workflow weekly.
1470
- cron : str, optional, default None
1471
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1472
- specified by this expression.
1473
- timezone : str, optional, default None
1474
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1475
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1572
+ timeout : int
1573
+ Time, in seconds before the task times out and fails. (Default: 3600)
1574
+ poke_interval : int
1575
+ Time in seconds that the job should wait in between each try. (Default: 60)
1576
+ mode : str
1577
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1578
+ exponential_backoff : bool
1579
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1580
+ pool : str
1581
+ the slot pool this task should run in,
1582
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1583
+ soft_fail : bool
1584
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1585
+ name : str
1586
+ Name of the sensor on Airflow
1587
+ description : str
1588
+ Description of sensor in the Airflow UI
1589
+ external_dag_id : str
1590
+ The dag_id that contains the task you want to wait for.
1591
+ external_task_ids : List[str]
1592
+ The list of task_ids that you want to wait for.
1593
+ If None (default value) the sensor waits for the DAG. (Default: None)
1594
+ allowed_states : List[str]
1595
+ Iterable of allowed states, (Default: ['success'])
1596
+ failed_states : List[str]
1597
+ Iterable of failed or dis-allowed states. (Default: None)
1598
+ execution_delta : datetime.timedelta
1599
+ time difference with the previous execution to look at,
1600
+ the default is the same logical date as the current task or DAG. (Default: None)
1601
+ check_existence: bool
1602
+ Set to True to check if the external task exists or check if
1603
+ the DAG to wait for exists. (Default: True)
1476
1604
  """
1477
1605
  ...
1478
1606
 
1479
1607
  @typing.overload
1480
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1608
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1481
1609
  """
1482
- Specifies the PyPI packages for all steps of the flow.
1610
+ Specifies the Conda environment for all steps of the flow.
1611
+
1612
+ Use `@conda_base` to set common libraries required by all
1613
+ steps and use `@conda` to specify step-specific additions.
1483
1614
 
1484
- Use `@pypi_base` to set common packages required by all
1485
- steps and use `@pypi` to specify step-specific overrides.
1486
1615
 
1487
1616
  Parameters
1488
1617
  ----------
1489
- packages : Dict[str, str], default: {}
1618
+ packages : Dict[str, str], default {}
1490
1619
  Packages to use for this flow. The key is the name of the package
1491
1620
  and the value is the version to use.
1492
- python : str, optional, default: None
1621
+ libraries : Dict[str, str], default {}
1622
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1623
+ python : str, optional, default None
1493
1624
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1494
1625
  that the version used will correspond to the version of the Python interpreter used to start the run.
1626
+ disabled : bool, default False
1627
+ If set to True, disables Conda.
1495
1628
  """
1496
1629
  ...
1497
1630
 
1498
1631
  @typing.overload
1499
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1632
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1500
1633
  ...
1501
1634
 
1502
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1635
+ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1503
1636
  """
1504
- Specifies the PyPI packages for all steps of the flow.
1637
+ Specifies the Conda environment for all steps of the flow.
1638
+
1639
+ Use `@conda_base` to set common libraries required by all
1640
+ steps and use `@conda` to specify step-specific additions.
1505
1641
 
1506
- Use `@pypi_base` to set common packages required by all
1507
- steps and use `@pypi` to specify step-specific overrides.
1508
1642
 
1509
1643
  Parameters
1510
1644
  ----------
1511
- packages : Dict[str, str], default: {}
1645
+ packages : Dict[str, str], default {}
1512
1646
  Packages to use for this flow. The key is the name of the package
1513
1647
  and the value is the version to use.
1514
- python : str, optional, default: None
1648
+ libraries : Dict[str, str], default {}
1649
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1650
+ python : str, optional, default None
1515
1651
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1516
1652
  that the version used will correspond to the version of the Python interpreter used to start the run.
1517
- """
1518
- ...
1519
-
1520
- def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1521
- """
1522
- Specifies what flows belong to the same project.
1523
-
1524
- A project-specific namespace is created for all flows that
1525
- use the same `@project(name)`.
1526
-
1527
-
1528
- Parameters
1529
- ----------
1530
- name : str
1531
- Project name. Make sure that the name is unique amongst all
1532
- projects that use the same production scheduler. The name may
1533
- contain only lowercase alphanumeric characters and underscores.
1534
-
1535
- branch : Optional[str], default None
1536
- The branch to use. If not specified, the branch is set to
1537
- `user.<username>` unless `production` is set to `True`. This can
1538
- also be set on the command line using `--branch` as a top-level option.
1539
- It is an error to specify `branch` in the decorator and on the command line.
1540
-
1541
- production : bool, default False
1542
- Whether or not the branch is the production branch. This can also be set on the
1543
- command line using `--production` as a top-level option. It is an error to specify
1544
- `production` in the decorator and on the command line.
1545
- The project branch name will be:
1546
- - if `branch` is specified:
1547
- - if `production` is True: `prod.<branch>`
1548
- - if `production` is False: `test.<branch>`
1549
- - if `branch` is not specified:
1550
- - if `production` is True: `prod`
1551
- - if `production` is False: `user.<username>`
1653
+ disabled : bool, default False
1654
+ If set to True, disables Conda.
1552
1655
  """
1553
1656
  ...
1554
1657
 
@@ -1645,150 +1748,6 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
1645
1748
  """
1646
1749
  ...
1647
1750
 
1648
- @typing.overload
1649
- def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1650
- """
1651
- Specifies the flow(s) that this flow depends on.
1652
-
1653
- ```
1654
- @trigger_on_finish(flow='FooFlow')
1655
- ```
1656
- or
1657
- ```
1658
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1659
- ```
1660
- This decorator respects the @project decorator and triggers the flow
1661
- when upstream runs within the same namespace complete successfully
1662
-
1663
- Additionally, you can specify project aware upstream flow dependencies
1664
- by specifying the fully qualified project_flow_name.
1665
- ```
1666
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1667
- ```
1668
- or
1669
- ```
1670
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1671
- ```
1672
-
1673
- You can also specify just the project or project branch (other values will be
1674
- inferred from the current project or project branch):
1675
- ```
1676
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1677
- ```
1678
-
1679
- Note that `branch` is typically one of:
1680
- - `prod`
1681
- - `user.bob`
1682
- - `test.my_experiment`
1683
- - `prod.staging`
1684
-
1685
-
1686
- Parameters
1687
- ----------
1688
- flow : Union[str, Dict[str, str]], optional, default None
1689
- Upstream flow dependency for this flow.
1690
- flows : List[Union[str, Dict[str, str]]], default []
1691
- Upstream flow dependencies for this flow.
1692
- options : Dict[str, Any], default {}
1693
- Backend-specific configuration for tuning eventing behavior.
1694
- """
1695
- ...
1696
-
1697
- @typing.overload
1698
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1699
- ...
1700
-
1701
- def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1702
- """
1703
- Specifies the flow(s) that this flow depends on.
1704
-
1705
- ```
1706
- @trigger_on_finish(flow='FooFlow')
1707
- ```
1708
- or
1709
- ```
1710
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1711
- ```
1712
- This decorator respects the @project decorator and triggers the flow
1713
- when upstream runs within the same namespace complete successfully
1714
-
1715
- Additionally, you can specify project aware upstream flow dependencies
1716
- by specifying the fully qualified project_flow_name.
1717
- ```
1718
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1719
- ```
1720
- or
1721
- ```
1722
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1723
- ```
1724
-
1725
- You can also specify just the project or project branch (other values will be
1726
- inferred from the current project or project branch):
1727
- ```
1728
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1729
- ```
1730
-
1731
- Note that `branch` is typically one of:
1732
- - `prod`
1733
- - `user.bob`
1734
- - `test.my_experiment`
1735
- - `prod.staging`
1736
-
1737
-
1738
- Parameters
1739
- ----------
1740
- flow : Union[str, Dict[str, str]], optional, default None
1741
- Upstream flow dependency for this flow.
1742
- flows : List[Union[str, Dict[str, str]]], default []
1743
- Upstream flow dependencies for this flow.
1744
- options : Dict[str, Any], default {}
1745
- Backend-specific configuration for tuning eventing behavior.
1746
- """
1747
- ...
1748
-
1749
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1750
- """
1751
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1752
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1753
-
1754
-
1755
- Parameters
1756
- ----------
1757
- timeout : int
1758
- Time, in seconds before the task times out and fails. (Default: 3600)
1759
- poke_interval : int
1760
- Time in seconds that the job should wait in between each try. (Default: 60)
1761
- mode : str
1762
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1763
- exponential_backoff : bool
1764
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1765
- pool : str
1766
- the slot pool this task should run in,
1767
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1768
- soft_fail : bool
1769
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1770
- name : str
1771
- Name of the sensor on Airflow
1772
- description : str
1773
- Description of sensor in the Airflow UI
1774
- external_dag_id : str
1775
- The dag_id that contains the task you want to wait for.
1776
- external_task_ids : List[str]
1777
- The list of task_ids that you want to wait for.
1778
- If None (default value) the sensor waits for the DAG. (Default: None)
1779
- allowed_states : List[str]
1780
- Iterable of allowed states, (Default: ['success'])
1781
- failed_states : List[str]
1782
- Iterable of failed or dis-allowed states. (Default: None)
1783
- execution_delta : datetime.timedelta
1784
- time difference with the previous execution to look at,
1785
- the default is the same logical date as the current task or DAG. (Default: None)
1786
- check_existence: bool
1787
- Set to True to check if the external task exists or check if
1788
- the DAG to wait for exists. (Default: True)
1789
- """
1790
- ...
1791
-
1792
1751
  def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1793
1752
  """
1794
1753
  The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
@@ -1832,5 +1791,46 @@ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, expone
1832
1791
  """
1833
1792
  ...
1834
1793
 
1794
+ @typing.overload
1795
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1796
+ """
1797
+ Specifies the PyPI packages for all steps of the flow.
1798
+
1799
+ Use `@pypi_base` to set common packages required by all
1800
+ steps and use `@pypi` to specify step-specific overrides.
1801
+
1802
+ Parameters
1803
+ ----------
1804
+ packages : Dict[str, str], default: {}
1805
+ Packages to use for this flow. The key is the name of the package
1806
+ and the value is the version to use.
1807
+ python : str, optional, default: None
1808
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1809
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1810
+ """
1811
+ ...
1812
+
1813
+ @typing.overload
1814
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1815
+ ...
1816
+
1817
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1818
+ """
1819
+ Specifies the PyPI packages for all steps of the flow.
1820
+
1821
+ Use `@pypi_base` to set common packages required by all
1822
+ steps and use `@pypi` to specify step-specific overrides.
1823
+
1824
+ Parameters
1825
+ ----------
1826
+ packages : Dict[str, str], default: {}
1827
+ Packages to use for this flow. The key is the name of the package
1828
+ and the value is the version to use.
1829
+ python : str, optional, default: None
1830
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1831
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1832
+ """
1833
+ ...
1834
+
1835
1835
  pkg_name: str
1836
1836