ob-metaflow-stubs 6.0.9.2__py2.py3-none-any.whl → 6.0.9.4__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ob-metaflow-stubs might be problematic. Click here for more details.

Files changed (262) hide show
  1. metaflow-stubs/__init__.pyi +987 -987
  2. metaflow-stubs/cards.pyi +2 -2
  3. metaflow-stubs/cli.pyi +2 -2
  4. metaflow-stubs/cli_components/__init__.pyi +2 -2
  5. metaflow-stubs/cli_components/utils.pyi +2 -2
  6. metaflow-stubs/client/__init__.pyi +2 -2
  7. metaflow-stubs/client/core.pyi +5 -5
  8. metaflow-stubs/client/filecache.pyi +2 -2
  9. metaflow-stubs/events.pyi +2 -2
  10. metaflow-stubs/exception.pyi +2 -2
  11. metaflow-stubs/flowspec.pyi +5 -5
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +3 -3
  14. metaflow-stubs/meta_files.pyi +2 -2
  15. metaflow-stubs/metadata_provider/__init__.pyi +2 -2
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +2 -2
  17. metaflow-stubs/metadata_provider/metadata.pyi +2 -2
  18. metaflow-stubs/metadata_provider/util.pyi +2 -2
  19. metaflow-stubs/metaflow_config.pyi +2 -2
  20. metaflow-stubs/metaflow_current.pyi +49 -49
  21. metaflow-stubs/metaflow_git.pyi +2 -2
  22. metaflow-stubs/mf_extensions/__init__.pyi +2 -2
  23. metaflow-stubs/mf_extensions/obcheckpoint/__init__.pyi +2 -2
  24. metaflow-stubs/mf_extensions/obcheckpoint/plugins/__init__.pyi +2 -2
  25. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/__init__.pyi +2 -2
  26. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/__init__.pyi +2 -2
  27. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/async_cards.pyi +3 -3
  28. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/deco_injection_mixin.pyi +2 -2
  29. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/extra_components.pyi +2 -2
  30. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/__init__.pyi +2 -2
  31. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/__init__.pyi +2 -2
  32. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/checkpoint_lister.pyi +4 -4
  33. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/lineage_card.pyi +2 -2
  34. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/checkpoint_storage.pyi +4 -4
  35. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/constructors.pyi +2 -2
  36. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/core.pyi +5 -5
  37. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/decorator.pyi +4 -4
  38. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/exceptions.pyi +2 -2
  39. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/final_api.pyi +3 -3
  40. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/lineage.pyi +2 -2
  41. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/__init__.pyi +2 -2
  42. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/context.pyi +2 -2
  43. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/core.pyi +3 -3
  44. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/decorator.pyi +2 -2
  45. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/exceptions.pyi +2 -2
  46. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/task_utils.pyi +2 -2
  47. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/utils.pyi +2 -2
  48. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastructures.pyi +2 -2
  49. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/exceptions.pyi +2 -2
  50. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/__init__.pyi +2 -2
  51. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/decorator.pyi +3 -3
  52. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/__init__.pyi +2 -2
  53. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/core.pyi +3 -3
  54. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/exceptions.pyi +2 -2
  55. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/model_storage.pyi +4 -4
  56. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/__init__.pyi +2 -2
  57. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/flowspec_utils.pyi +2 -2
  58. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/general.pyi +2 -2
  59. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/identity_utils.pyi +2 -2
  60. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/__init__.pyi +2 -2
  61. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/base.pyi +2 -2
  62. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/tar.pyi +2 -2
  63. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/tar_utils.pyi +2 -2
  64. metaflow-stubs/mf_extensions/outerbounds/__init__.pyi +2 -2
  65. metaflow-stubs/mf_extensions/outerbounds/plugins/__init__.pyi +2 -2
  66. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/__init__.pyi +2 -2
  67. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/__init__.pyi +2 -2
  68. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_state_machine.pyi +2 -2
  69. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/__init__.pyi +2 -2
  70. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/spinner/__init__.pyi +2 -2
  71. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/spinner/spinners.pyi +2 -2
  72. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/app_cli.pyi +2 -2
  73. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/app_config.pyi +2 -2
  74. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/capsule.pyi +3 -3
  75. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/click_importer.pyi +2 -2
  76. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/code_package/__init__.pyi +2 -2
  77. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/code_package/code_packager.pyi +2 -2
  78. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/__init__.pyi +2 -2
  79. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/cli_generator.pyi +2 -2
  80. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/config_utils.pyi +2 -2
  81. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/schema_export.pyi +2 -2
  82. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/typed_configs.pyi +2 -2
  83. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/unified_config.pyi +2 -2
  84. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/dependencies.pyi +2 -2
  85. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/deployer.pyi +3 -3
  86. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/experimental/__init__.pyi +2 -2
  87. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/perimeters.pyi +2 -2
  88. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/utils.pyi +3 -3
  89. metaflow-stubs/mf_extensions/outerbounds/plugins/aws/__init__.pyi +2 -2
  90. metaflow-stubs/mf_extensions/outerbounds/plugins/aws/assume_role_decorator.pyi +2 -2
  91. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/__init__.pyi +2 -2
  92. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/async_cards.pyi +2 -2
  93. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/injector.pyi +2 -2
  94. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/__init__.pyi +2 -2
  95. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/coreweave.pyi +3 -3
  96. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/nebius.pyi +3 -3
  97. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/__init__.pyi +2 -2
  98. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/baker.pyi +2 -2
  99. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/docker_environment.pyi +2 -2
  100. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/fast_bakery.pyi +2 -2
  101. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/__init__.pyi +2 -2
  102. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/pod_killer.pyi +2 -2
  103. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/__init__.pyi +2 -2
  104. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/constants.pyi +2 -2
  105. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/exceptions.pyi +2 -2
  106. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/ollama.pyi +2 -2
  107. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/status_card.pyi +2 -2
  108. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/__init__.pyi +2 -2
  109. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/snowflake.pyi +2 -2
  110. metaflow-stubs/mf_extensions/outerbounds/profilers/__init__.pyi +2 -2
  111. metaflow-stubs/mf_extensions/outerbounds/profilers/gpu.pyi +2 -2
  112. metaflow-stubs/mf_extensions/outerbounds/remote_config.pyi +2 -2
  113. metaflow-stubs/mf_extensions/outerbounds/toplevel/__init__.pyi +2 -2
  114. metaflow-stubs/mf_extensions/outerbounds/toplevel/global_aliases_for_metaflow_package.pyi +2 -2
  115. metaflow-stubs/mf_extensions/outerbounds/toplevel/s3_proxy.pyi +2 -2
  116. metaflow-stubs/multicore_utils.pyi +2 -2
  117. metaflow-stubs/ob_internal.pyi +2 -2
  118. metaflow-stubs/packaging_sys/__init__.pyi +7 -7
  119. metaflow-stubs/packaging_sys/backend.pyi +3 -3
  120. metaflow-stubs/packaging_sys/distribution_support.pyi +5 -5
  121. metaflow-stubs/packaging_sys/tar_backend.pyi +5 -5
  122. metaflow-stubs/packaging_sys/utils.pyi +2 -2
  123. metaflow-stubs/packaging_sys/v1.pyi +2 -2
  124. metaflow-stubs/parameters.pyi +3 -3
  125. metaflow-stubs/plugins/__init__.pyi +8 -8
  126. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  127. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  128. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  129. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  130. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
  131. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
  132. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
  133. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  134. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  135. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  136. metaflow-stubs/plugins/argo/argo_workflows.pyi +2 -2
  137. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +3 -3
  138. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +3 -3
  139. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +2 -2
  140. metaflow-stubs/plugins/argo/exit_hooks.pyi +2 -2
  141. metaflow-stubs/plugins/aws/__init__.pyi +2 -2
  142. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  143. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  144. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  145. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  146. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  147. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
  148. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  149. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +2 -2
  150. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  151. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  152. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  153. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  154. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  155. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +3 -3
  156. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +3 -3
  157. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  158. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  159. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  160. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +2 -2
  161. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  162. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  163. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  164. metaflow-stubs/plugins/cards/__init__.pyi +6 -6
  165. metaflow-stubs/plugins/cards/card_client.pyi +3 -3
  166. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  167. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  168. metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
  169. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  170. metaflow-stubs/plugins/cards/card_modules/basic.pyi +2 -2
  171. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  172. metaflow-stubs/plugins/cards/card_modules/components.pyi +3 -3
  173. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  174. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  175. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  176. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  177. metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
  178. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  179. metaflow-stubs/plugins/catch_decorator.pyi +2 -2
  180. metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
  181. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  182. metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
  183. metaflow-stubs/plugins/datatools/s3/s3.pyi +3 -3
  184. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  185. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  186. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  187. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  188. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  189. metaflow-stubs/plugins/events_decorator.pyi +2 -2
  190. metaflow-stubs/plugins/exit_hook/__init__.pyi +2 -2
  191. metaflow-stubs/plugins/exit_hook/exit_hook_decorator.pyi +2 -2
  192. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  193. metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
  194. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  195. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +2 -2
  196. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  197. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  198. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  199. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  200. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  201. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +2 -2
  202. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
  203. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  204. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
  205. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
  206. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +2 -2
  207. metaflow-stubs/plugins/ollama/__init__.pyi +3 -3
  208. metaflow-stubs/plugins/optuna/__init__.pyi +2 -2
  209. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  210. metaflow-stubs/plugins/perimeters.pyi +2 -2
  211. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  212. metaflow-stubs/plugins/pypi/__init__.pyi +3 -3
  213. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  214. metaflow-stubs/plugins/pypi/conda_environment.pyi +4 -4
  215. metaflow-stubs/plugins/pypi/parsers.pyi +2 -2
  216. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  217. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  218. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  219. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  220. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  221. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  222. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +2 -2
  223. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  224. metaflow-stubs/plugins/secrets/secrets_func.pyi +2 -2
  225. metaflow-stubs/plugins/secrets/secrets_spec.pyi +2 -2
  226. metaflow-stubs/plugins/secrets/utils.pyi +2 -2
  227. metaflow-stubs/plugins/snowflake/__init__.pyi +2 -2
  228. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  229. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +2 -2
  230. metaflow-stubs/plugins/timeout_decorator.pyi +2 -2
  231. metaflow-stubs/plugins/torchtune/__init__.pyi +2 -2
  232. metaflow-stubs/plugins/uv/__init__.pyi +2 -2
  233. metaflow-stubs/plugins/uv/uv_environment.pyi +2 -2
  234. metaflow-stubs/profilers/__init__.pyi +2 -2
  235. metaflow-stubs/pylint_wrapper.pyi +2 -2
  236. metaflow-stubs/runner/__init__.pyi +2 -2
  237. metaflow-stubs/runner/deployer.pyi +33 -33
  238. metaflow-stubs/runner/deployer_impl.pyi +2 -2
  239. metaflow-stubs/runner/metaflow_runner.pyi +3 -3
  240. metaflow-stubs/runner/nbdeploy.pyi +2 -2
  241. metaflow-stubs/runner/nbrun.pyi +2 -2
  242. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  243. metaflow-stubs/runner/utils.pyi +3 -3
  244. metaflow-stubs/system/__init__.pyi +2 -2
  245. metaflow-stubs/system/system_logger.pyi +2 -2
  246. metaflow-stubs/system/system_monitor.pyi +2 -2
  247. metaflow-stubs/tagging_util.pyi +2 -2
  248. metaflow-stubs/tuple_util.pyi +2 -2
  249. metaflow-stubs/user_configs/__init__.pyi +2 -2
  250. metaflow-stubs/user_configs/config_options.pyi +3 -3
  251. metaflow-stubs/user_configs/config_parameters.pyi +7 -7
  252. metaflow-stubs/user_decorators/__init__.pyi +2 -2
  253. metaflow-stubs/user_decorators/common.pyi +2 -2
  254. metaflow-stubs/user_decorators/mutable_flow.pyi +5 -5
  255. metaflow-stubs/user_decorators/mutable_step.pyi +5 -5
  256. metaflow-stubs/user_decorators/user_flow_decorator.pyi +3 -3
  257. metaflow-stubs/user_decorators/user_step_decorator.pyi +5 -5
  258. {ob_metaflow_stubs-6.0.9.2.dist-info → ob_metaflow_stubs-6.0.9.4.dist-info}/METADATA +1 -1
  259. ob_metaflow_stubs-6.0.9.4.dist-info/RECORD +262 -0
  260. ob_metaflow_stubs-6.0.9.2.dist-info/RECORD +0 -262
  261. {ob_metaflow_stubs-6.0.9.2.dist-info → ob_metaflow_stubs-6.0.9.4.dist-info}/WHEEL +0 -0
  262. {ob_metaflow_stubs-6.0.9.2.dist-info → ob_metaflow_stubs-6.0.9.4.dist-info}/top_level.txt +0 -0
@@ -1,15 +1,15 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.18.1.1+obcheckpoint(0.2.4);ob(v1) #
4
- # Generated on 2025-08-29T18:32:22.531594 #
3
+ # MF version: 2.18.2.1+obcheckpoint(0.2.4);ob(v1) #
4
+ # Generated on 2025-09-03T10:45:51.965005 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import typing
12
11
  import datetime
12
+ import typing
13
13
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
14
14
  StepFlag = typing.NewType("StepFlag", bool)
15
15
 
@@ -39,17 +39,17 @@ from .user_decorators.user_step_decorator import UserStepDecorator as UserStepDe
39
39
  from .user_decorators.user_step_decorator import StepMutator as StepMutator
40
40
  from .user_decorators.user_step_decorator import user_step_decorator as user_step_decorator
41
41
  from .user_decorators.user_flow_decorator import FlowMutator as FlowMutator
42
- from . import metaflow_git as metaflow_git
43
42
  from . import tuple_util as tuple_util
44
43
  from . import cards as cards
44
+ from . import metaflow_git as metaflow_git
45
45
  from . import events as events
46
46
  from . import runner as runner
47
47
  from . import plugins as plugins
48
48
  from .mf_extensions.outerbounds.toplevel.global_aliases_for_metaflow_package import S3 as S3
49
49
  from . import includefile as includefile
50
50
  from .includefile import IncludeFile as IncludeFile
51
- from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
52
51
  from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
52
+ from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
53
53
  from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
54
54
  from . import client as client
55
55
  from .client.core import namespace as namespace
@@ -167,6 +167,74 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
167
167
  """
168
168
  ...
169
169
 
170
+ @typing.overload
171
+ def app_deploy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
172
+ """
173
+ Decorator prototype for all step decorators. This function gets specialized
174
+ and imported for all decorators types by _import_plugin_decorators().
175
+ """
176
+ ...
177
+
178
+ @typing.overload
179
+ def app_deploy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
180
+ ...
181
+
182
+ def app_deploy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
183
+ """
184
+ Decorator prototype for all step decorators. This function gets specialized
185
+ and imported for all decorators types by _import_plugin_decorators().
186
+ """
187
+ ...
188
+
189
+ @typing.overload
190
+ def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
191
+ """
192
+ Creates a human-readable report, a Metaflow Card, after this step completes.
193
+
194
+ Note that you may add multiple `@card` decorators in a step with different parameters.
195
+
196
+
197
+ Parameters
198
+ ----------
199
+ type : str, default 'default'
200
+ Card type.
201
+ id : str, optional, default None
202
+ If multiple cards are present, use this id to identify this card.
203
+ options : Dict[str, Any], default {}
204
+ Options passed to the card. The contents depend on the card type.
205
+ timeout : int, default 45
206
+ Interrupt reporting if it takes more than this many seconds.
207
+ """
208
+ ...
209
+
210
+ @typing.overload
211
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
212
+ ...
213
+
214
+ @typing.overload
215
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
216
+ ...
217
+
218
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
219
+ """
220
+ Creates a human-readable report, a Metaflow Card, after this step completes.
221
+
222
+ Note that you may add multiple `@card` decorators in a step with different parameters.
223
+
224
+
225
+ Parameters
226
+ ----------
227
+ type : str, default 'default'
228
+ Card type.
229
+ id : str, optional, default None
230
+ If multiple cards are present, use this id to identify this card.
231
+ options : Dict[str, Any], default {}
232
+ Options passed to the card. The contents depend on the card type.
233
+ timeout : int, default 45
234
+ Interrupt reporting if it takes more than this many seconds.
235
+ """
236
+ ...
237
+
170
238
  @typing.overload
171
239
  def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
172
240
  """
@@ -219,270 +287,216 @@ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
219
287
  ...
220
288
 
221
289
  @typing.overload
222
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
223
- """
224
- Specifies the Conda environment for the step.
225
-
226
- Information in this decorator will augment any
227
- attributes set in the `@conda_base` flow-level decorator. Hence,
228
- you can use `@conda_base` to set packages required by all
229
- steps and use `@conda` to specify step-specific overrides.
230
-
231
-
232
- Parameters
233
- ----------
234
- packages : Dict[str, str], default {}
235
- Packages to use for this step. The key is the name of the package
236
- and the value is the version to use.
237
- libraries : Dict[str, str], default {}
238
- Supported for backward compatibility. When used with packages, packages will take precedence.
239
- python : str, optional, default None
240
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
241
- that the version used will correspond to the version of the Python interpreter used to start the run.
242
- disabled : bool, default False
243
- If set to True, disables @conda.
244
- """
245
- ...
246
-
247
- @typing.overload
248
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
249
- ...
250
-
251
- @typing.overload
252
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
253
- ...
254
-
255
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
290
+ def checkpoint(*, load_policy: str = 'fresh', temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
256
291
  """
257
- Specifies the Conda environment for the step.
258
-
259
- Information in this decorator will augment any
260
- attributes set in the `@conda_base` flow-level decorator. Hence,
261
- you can use `@conda_base` to set packages required by all
262
- steps and use `@conda` to specify step-specific overrides.
292
+ Enables checkpointing for a step.
263
293
 
294
+ > Examples
264
295
 
265
- Parameters
266
- ----------
267
- packages : Dict[str, str], default {}
268
- Packages to use for this step. The key is the name of the package
269
- and the value is the version to use.
270
- libraries : Dict[str, str], default {}
271
- Supported for backward compatibility. When used with packages, packages will take precedence.
272
- python : str, optional, default None
273
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
274
- that the version used will correspond to the version of the Python interpreter used to start the run.
275
- disabled : bool, default False
276
- If set to True, disables @conda.
277
- """
278
- ...
279
-
280
- @typing.overload
281
- def model(*, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
282
- """
283
- Enables loading / saving of models within a step.
296
+ - Saving Checkpoints
284
297
 
285
- > Examples
286
- - Saving Models
287
298
  ```python
288
- @model
299
+ @checkpoint
289
300
  @step
290
301
  def train(self):
291
- # current.model.save returns a dictionary reference to the model saved
292
- self.my_model = current.model.save(
293
- path_to_my_model,
294
- label="my_model",
295
- metadata={
296
- "epochs": 10,
297
- "batch-size": 32,
298
- "learning-rate": 0.001,
299
- }
300
- )
301
- self.next(self.test)
302
-
303
- @model(load="my_model")
304
- @step
305
- def test(self):
306
- # `current.model.loaded` returns a dictionary of the loaded models
307
- # where the key is the name of the artifact and the value is the path to the model
308
- print(os.listdir(current.model.loaded["my_model"]))
309
- self.next(self.end)
302
+ model = create_model(self.parameters, checkpoint_path = None)
303
+ for i in range(self.epochs):
304
+ # some training logic
305
+ loss = model.train(self.dataset)
306
+ if i % 10 == 0:
307
+ model.save(
308
+ current.checkpoint.directory,
309
+ )
310
+ # saves the contents of the `current.checkpoint.directory` as a checkpoint
311
+ # and returns a reference dictionary to the checkpoint saved in the datastore
312
+ self.latest_checkpoint = current.checkpoint.save(
313
+ name="epoch_checkpoint",
314
+ metadata={
315
+ "epoch": i,
316
+ "loss": loss,
317
+ }
318
+ )
310
319
  ```
311
320
 
312
- - Loading models
321
+ - Using Loaded Checkpoints
322
+
313
323
  ```python
324
+ @retry(times=3)
325
+ @checkpoint
314
326
  @step
315
327
  def train(self):
316
- # current.model.load returns the path to the model loaded
317
- checkpoint_path = current.model.load(
318
- self.checkpoint_key,
319
- )
320
- model_path = current.model.load(
321
- self.model,
322
- )
323
- self.next(self.test)
328
+ # Assume that the task has restarted and the previous attempt of the task
329
+ # saved a checkpoint
330
+ checkpoint_path = None
331
+ if current.checkpoint.is_loaded: # Check if a checkpoint is loaded
332
+ print("Loaded checkpoint from the previous attempt")
333
+ checkpoint_path = current.checkpoint.directory
334
+
335
+ model = create_model(self.parameters, checkpoint_path = checkpoint_path)
336
+ for i in range(self.epochs):
337
+ ...
324
338
  ```
325
339
 
326
340
 
327
341
  Parameters
328
342
  ----------
329
- load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
330
- Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
331
- These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
332
- If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
333
- the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
334
- If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
343
+ load_policy : str, default: "fresh"
344
+ The policy for loading the checkpoint. The following policies are supported:
345
+ - "eager": Loads the the latest available checkpoint within the namespace.
346
+ With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
347
+ will be loaded at the start of the task.
348
+ - "none": Do not load any checkpoint
349
+ - "fresh": Loads the lastest checkpoint created within the running Task.
350
+ This mode helps loading checkpoints across various retry attempts of the same task.
351
+ With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
352
+ created within the task will be loaded when the task is retries execution on failure.
335
353
 
336
354
  temp_dir_root : str, default: None
337
- The root directory under which `current.model.loaded` will store loaded models
355
+ The root directory under which `current.checkpoint.directory` will be created.
338
356
  """
339
357
  ...
340
358
 
341
359
  @typing.overload
342
- def model(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
360
+ def checkpoint(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
343
361
  ...
344
362
 
345
363
  @typing.overload
346
- def model(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
364
+ def checkpoint(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
347
365
  ...
348
366
 
349
- def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None):
367
+ def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load_policy: str = 'fresh', temp_dir_root: str = None):
350
368
  """
351
- Enables loading / saving of models within a step.
369
+ Enables checkpointing for a step.
352
370
 
353
371
  > Examples
354
- - Saving Models
355
- ```python
356
- @model
357
- @step
358
- def train(self):
359
- # current.model.save returns a dictionary reference to the model saved
360
- self.my_model = current.model.save(
361
- path_to_my_model,
362
- label="my_model",
363
- metadata={
364
- "epochs": 10,
365
- "batch-size": 32,
366
- "learning-rate": 0.001,
367
- }
368
- )
369
- self.next(self.test)
370
372
 
371
- @model(load="my_model")
372
- @step
373
- def test(self):
374
- # `current.model.loaded` returns a dictionary of the loaded models
375
- # where the key is the name of the artifact and the value is the path to the model
376
- print(os.listdir(current.model.loaded["my_model"]))
377
- self.next(self.end)
378
- ```
373
+ - Saving Checkpoints
379
374
 
380
- - Loading models
381
375
  ```python
376
+ @checkpoint
382
377
  @step
383
378
  def train(self):
384
- # current.model.load returns the path to the model loaded
385
- checkpoint_path = current.model.load(
386
- self.checkpoint_key,
387
- )
388
- model_path = current.model.load(
389
- self.model,
390
- )
391
- self.next(self.test)
379
+ model = create_model(self.parameters, checkpoint_path = None)
380
+ for i in range(self.epochs):
381
+ # some training logic
382
+ loss = model.train(self.dataset)
383
+ if i % 10 == 0:
384
+ model.save(
385
+ current.checkpoint.directory,
386
+ )
387
+ # saves the contents of the `current.checkpoint.directory` as a checkpoint
388
+ # and returns a reference dictionary to the checkpoint saved in the datastore
389
+ self.latest_checkpoint = current.checkpoint.save(
390
+ name="epoch_checkpoint",
391
+ metadata={
392
+ "epoch": i,
393
+ "loss": loss,
394
+ }
395
+ )
396
+ ```
397
+
398
+ - Using Loaded Checkpoints
399
+
400
+ ```python
401
+ @retry(times=3)
402
+ @checkpoint
403
+ @step
404
+ def train(self):
405
+ # Assume that the task has restarted and the previous attempt of the task
406
+ # saved a checkpoint
407
+ checkpoint_path = None
408
+ if current.checkpoint.is_loaded: # Check if a checkpoint is loaded
409
+ print("Loaded checkpoint from the previous attempt")
410
+ checkpoint_path = current.checkpoint.directory
411
+
412
+ model = create_model(self.parameters, checkpoint_path = checkpoint_path)
413
+ for i in range(self.epochs):
414
+ ...
392
415
  ```
393
416
 
394
417
 
395
418
  Parameters
396
419
  ----------
397
- load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
398
- Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
399
- These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
400
- If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
401
- the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
402
- If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
420
+ load_policy : str, default: "fresh"
421
+ The policy for loading the checkpoint. The following policies are supported:
422
+ - "eager": Loads the the latest available checkpoint within the namespace.
423
+ With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
424
+ will be loaded at the start of the task.
425
+ - "none": Do not load any checkpoint
426
+ - "fresh": Loads the lastest checkpoint created within the running Task.
427
+ This mode helps loading checkpoints across various retry attempts of the same task.
428
+ With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
429
+ created within the task will be loaded when the task is retries execution on failure.
403
430
 
404
431
  temp_dir_root : str, default: None
405
- The root directory under which `current.model.loaded` will store loaded models
432
+ The root directory under which `current.checkpoint.directory` will be created.
406
433
  """
407
434
  ...
408
435
 
409
- def s3_proxy(*, integration_name: typing.Optional[str] = None, write_mode: typing.Optional[str] = None, debug: typing.Optional[bool] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
436
+ def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
410
437
  """
411
- S3 Proxy decorator for routing S3 requests through a local proxy service.
438
+ Specifies that this step should execute on DGX cloud.
412
439
 
413
440
 
414
441
  Parameters
415
442
  ----------
416
- integration_name : str, optional
417
- Name of the S3 proxy integration. If not specified, will use the only
418
- available S3 proxy integration in the namespace (fails if multiple exist).
419
- write_mode : str, optional
420
- The desired behavior during write operations to target (origin) S3 bucket.
421
- allowed options are:
422
- "origin-and-cache" -> write to both the target S3 bucket and local object
423
- storage
424
- "origin" -> only write to the target S3 bucket
425
- "cache" -> only write to the object storage service used for caching
426
- debug : bool, optional
427
- Enable debug logging for proxy operations.
443
+ gpu : int
444
+ Number of GPUs to use.
445
+ gpu_type : str
446
+ Type of Nvidia GPU to use.
447
+ queue_timeout : int
448
+ Time to keep the job in NVCF's queue.
428
449
  """
429
450
  ...
430
451
 
431
452
  @typing.overload
432
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
453
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
433
454
  """
434
- Specifies secrets to be retrieved and injected as environment variables prior to
435
- the execution of a step.
455
+ Specifies the PyPI packages for the step.
456
+
457
+ Information in this decorator will augment any
458
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
459
+ you can use `@pypi_base` to set packages required by all
460
+ steps and use `@pypi` to specify step-specific overrides.
436
461
 
437
462
 
438
463
  Parameters
439
464
  ----------
440
- sources : List[Union[str, Dict[str, Any]]], default: []
441
- List of secret specs, defining how the secrets are to be retrieved
442
- role : str, optional, default: None
443
- Role to use for fetching secrets
465
+ packages : Dict[str, str], default: {}
466
+ Packages to use for this step. The key is the name of the package
467
+ and the value is the version to use.
468
+ python : str, optional, default: None
469
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
470
+ that the version used will correspond to the version of the Python interpreter used to start the run.
444
471
  """
445
472
  ...
446
473
 
447
474
  @typing.overload
448
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
475
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
449
476
  ...
450
477
 
451
478
  @typing.overload
452
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
479
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
453
480
  ...
454
481
 
455
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
482
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
456
483
  """
457
- Specifies secrets to be retrieved and injected as environment variables prior to
458
- the execution of a step.
484
+ Specifies the PyPI packages for the step.
485
+
486
+ Information in this decorator will augment any
487
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
488
+ you can use `@pypi_base` to set packages required by all
489
+ steps and use `@pypi` to specify step-specific overrides.
459
490
 
460
491
 
461
492
  Parameters
462
493
  ----------
463
- sources : List[Union[str, Dict[str, Any]]], default: []
464
- List of secret specs, defining how the secrets are to be retrieved
465
- role : str, optional, default: None
466
- Role to use for fetching secrets
467
- """
468
- ...
469
-
470
- @typing.overload
471
- def test_append_card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
472
- """
473
- A simple decorator that demonstrates using CardDecoratorInjector
474
- to inject a card and render simple markdown content.
475
- """
476
- ...
477
-
478
- @typing.overload
479
- def test_append_card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
480
- ...
481
-
482
- def test_append_card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
483
- """
484
- A simple decorator that demonstrates using CardDecoratorInjector
485
- to inject a card and render simple markdown content.
494
+ packages : Dict[str, str], default: {}
495
+ Packages to use for this step. The key is the name of the package
496
+ and the value is the version to use.
497
+ python : str, optional, default: None
498
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
499
+ that the version used will correspond to the version of the Python interpreter used to start the run.
486
500
  """
487
501
  ...
488
502
 
@@ -505,434 +519,229 @@ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
505
519
  """
506
520
  ...
507
521
 
508
- def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
509
- """
510
- Specifies that this step should execute on DGX cloud.
511
-
512
-
513
- Parameters
514
- ----------
515
- gpu : int
516
- Number of GPUs to use.
517
- gpu_type : str
518
- Type of Nvidia GPU to use.
519
- """
520
- ...
521
-
522
522
  @typing.overload
523
- def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
523
+ def coreweave_s3_proxy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
524
524
  """
525
- Creates a human-readable report, a Metaflow Card, after this step completes.
526
-
527
- Note that you may add multiple `@card` decorators in a step with different parameters.
528
-
529
-
530
- Parameters
531
- ----------
532
- type : str, default 'default'
533
- Card type.
534
- id : str, optional, default None
535
- If multiple cards are present, use this id to identify this card.
536
- options : Dict[str, Any], default {}
537
- Options passed to the card. The contents depend on the card type.
538
- timeout : int, default 45
539
- Interrupt reporting if it takes more than this many seconds.
525
+ CoreWeave-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
526
+ It exists to make it easier for users to know that this decorator should only be used with
527
+ a Neo Cloud like CoreWeave.
540
528
  """
541
529
  ...
542
530
 
543
531
  @typing.overload
544
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
545
- ...
546
-
547
- @typing.overload
548
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
532
+ def coreweave_s3_proxy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
549
533
  ...
550
534
 
551
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
535
+ def coreweave_s3_proxy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
552
536
  """
553
- Creates a human-readable report, a Metaflow Card, after this step completes.
554
-
555
- Note that you may add multiple `@card` decorators in a step with different parameters.
556
-
557
-
558
- Parameters
559
- ----------
560
- type : str, default 'default'
561
- Card type.
562
- id : str, optional, default None
563
- If multiple cards are present, use this id to identify this card.
564
- options : Dict[str, Any], default {}
565
- Options passed to the card. The contents depend on the card type.
566
- timeout : int, default 45
567
- Interrupt reporting if it takes more than this many seconds.
537
+ CoreWeave-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
538
+ It exists to make it easier for users to know that this decorator should only be used with
539
+ a Neo Cloud like CoreWeave.
568
540
  """
569
541
  ...
570
542
 
571
543
  @typing.overload
572
- def coreweave_s3_proxy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
544
+ def nebius_s3_proxy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
573
545
  """
574
- CoreWeave-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
546
+ Nebius-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
575
547
  It exists to make it easier for users to know that this decorator should only be used with
576
- a Neo Cloud like CoreWeave.
548
+ a Neo Cloud like Nebius.
577
549
  """
578
550
  ...
579
551
 
580
552
  @typing.overload
581
- def coreweave_s3_proxy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
553
+ def nebius_s3_proxy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
582
554
  ...
583
555
 
584
- def coreweave_s3_proxy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
556
+ def nebius_s3_proxy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
585
557
  """
586
- CoreWeave-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
558
+ Nebius-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
587
559
  It exists to make it easier for users to know that this decorator should only be used with
588
- a Neo Cloud like CoreWeave.
560
+ a Neo Cloud like Nebius.
589
561
  """
590
562
  ...
591
563
 
592
- def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
564
+ @typing.overload
565
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
593
566
  """
594
- Specifies that this step should execute on DGX cloud.
567
+ Specifies the number of times the task corresponding
568
+ to a step needs to be retried.
569
+
570
+ This decorator is useful for handling transient errors, such as networking issues.
571
+ If your task contains operations that can't be retried safely, e.g. database updates,
572
+ it is advisable to annotate it with `@retry(times=0)`.
573
+
574
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
575
+ decorator will execute a no-op task after all retries have been exhausted,
576
+ ensuring that the flow execution can continue.
595
577
 
596
578
 
597
579
  Parameters
598
580
  ----------
599
- gpu : int
600
- Number of GPUs to use.
601
- gpu_type : str
602
- Type of Nvidia GPU to use.
603
- queue_timeout : int
604
- Time to keep the job in NVCF's queue.
581
+ times : int, default 3
582
+ Number of times to retry this task.
583
+ minutes_between_retries : int, default 2
584
+ Number of minutes between retries.
605
585
  """
606
586
  ...
607
587
 
608
588
  @typing.overload
609
- def checkpoint(*, load_policy: str = 'fresh', temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
589
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
590
+ ...
591
+
592
+ @typing.overload
593
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
594
+ ...
595
+
596
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
610
597
  """
611
- Enables checkpointing for a step.
612
-
613
- > Examples
598
+ Specifies the number of times the task corresponding
599
+ to a step needs to be retried.
614
600
 
615
- - Saving Checkpoints
616
-
617
- ```python
618
- @checkpoint
619
- @step
620
- def train(self):
621
- model = create_model(self.parameters, checkpoint_path = None)
622
- for i in range(self.epochs):
623
- # some training logic
624
- loss = model.train(self.dataset)
625
- if i % 10 == 0:
626
- model.save(
627
- current.checkpoint.directory,
628
- )
629
- # saves the contents of the `current.checkpoint.directory` as a checkpoint
630
- # and returns a reference dictionary to the checkpoint saved in the datastore
631
- self.latest_checkpoint = current.checkpoint.save(
632
- name="epoch_checkpoint",
633
- metadata={
634
- "epoch": i,
635
- "loss": loss,
636
- }
637
- )
638
- ```
639
-
640
- - Using Loaded Checkpoints
641
-
642
- ```python
643
- @retry(times=3)
644
- @checkpoint
645
- @step
646
- def train(self):
647
- # Assume that the task has restarted and the previous attempt of the task
648
- # saved a checkpoint
649
- checkpoint_path = None
650
- if current.checkpoint.is_loaded: # Check if a checkpoint is loaded
651
- print("Loaded checkpoint from the previous attempt")
652
- checkpoint_path = current.checkpoint.directory
601
+ This decorator is useful for handling transient errors, such as networking issues.
602
+ If your task contains operations that can't be retried safely, e.g. database updates,
603
+ it is advisable to annotate it with `@retry(times=0)`.
653
604
 
654
- model = create_model(self.parameters, checkpoint_path = checkpoint_path)
655
- for i in range(self.epochs):
656
- ...
657
- ```
605
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
606
+ decorator will execute a no-op task after all retries have been exhausted,
607
+ ensuring that the flow execution can continue.
658
608
 
659
609
 
660
610
  Parameters
661
611
  ----------
662
- load_policy : str, default: "fresh"
663
- The policy for loading the checkpoint. The following policies are supported:
664
- - "eager": Loads the the latest available checkpoint within the namespace.
665
- With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
666
- will be loaded at the start of the task.
667
- - "none": Do not load any checkpoint
668
- - "fresh": Loads the lastest checkpoint created within the running Task.
669
- This mode helps loading checkpoints across various retry attempts of the same task.
670
- With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
671
- created within the task will be loaded when the task is retries execution on failure.
672
-
673
- temp_dir_root : str, default: None
674
- The root directory under which `current.checkpoint.directory` will be created.
612
+ times : int, default 3
613
+ Number of times to retry this task.
614
+ minutes_between_retries : int, default 2
615
+ Number of minutes between retries.
675
616
  """
676
617
  ...
677
618
 
678
619
  @typing.overload
679
- def checkpoint(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
680
- ...
681
-
682
- @typing.overload
683
- def checkpoint(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
684
- ...
685
-
686
- def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load_policy: str = 'fresh', temp_dir_root: str = None):
620
+ def model(*, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
687
621
  """
688
- Enables checkpointing for a step.
622
+ Enables loading / saving of models within a step.
689
623
 
690
624
  > Examples
691
-
692
- - Saving Checkpoints
693
-
625
+ - Saving Models
694
626
  ```python
695
- @checkpoint
627
+ @model
696
628
  @step
697
629
  def train(self):
698
- model = create_model(self.parameters, checkpoint_path = None)
699
- for i in range(self.epochs):
700
- # some training logic
701
- loss = model.train(self.dataset)
702
- if i % 10 == 0:
703
- model.save(
704
- current.checkpoint.directory,
705
- )
706
- # saves the contents of the `current.checkpoint.directory` as a checkpoint
707
- # and returns a reference dictionary to the checkpoint saved in the datastore
708
- self.latest_checkpoint = current.checkpoint.save(
709
- name="epoch_checkpoint",
710
- metadata={
711
- "epoch": i,
712
- "loss": loss,
713
- }
714
- )
715
- ```
630
+ # current.model.save returns a dictionary reference to the model saved
631
+ self.my_model = current.model.save(
632
+ path_to_my_model,
633
+ label="my_model",
634
+ metadata={
635
+ "epochs": 10,
636
+ "batch-size": 32,
637
+ "learning-rate": 0.001,
638
+ }
639
+ )
640
+ self.next(self.test)
716
641
 
717
- - Using Loaded Checkpoints
642
+ @model(load="my_model")
643
+ @step
644
+ def test(self):
645
+ # `current.model.loaded` returns a dictionary of the loaded models
646
+ # where the key is the name of the artifact and the value is the path to the model
647
+ print(os.listdir(current.model.loaded["my_model"]))
648
+ self.next(self.end)
649
+ ```
718
650
 
651
+ - Loading models
719
652
  ```python
720
- @retry(times=3)
721
- @checkpoint
722
653
  @step
723
654
  def train(self):
724
- # Assume that the task has restarted and the previous attempt of the task
725
- # saved a checkpoint
726
- checkpoint_path = None
727
- if current.checkpoint.is_loaded: # Check if a checkpoint is loaded
728
- print("Loaded checkpoint from the previous attempt")
729
- checkpoint_path = current.checkpoint.directory
730
-
731
- model = create_model(self.parameters, checkpoint_path = checkpoint_path)
732
- for i in range(self.epochs):
733
- ...
655
+ # current.model.load returns the path to the model loaded
656
+ checkpoint_path = current.model.load(
657
+ self.checkpoint_key,
658
+ )
659
+ model_path = current.model.load(
660
+ self.model,
661
+ )
662
+ self.next(self.test)
734
663
  ```
735
664
 
736
665
 
737
666
  Parameters
738
667
  ----------
739
- load_policy : str, default: "fresh"
740
- The policy for loading the checkpoint. The following policies are supported:
741
- - "eager": Loads the the latest available checkpoint within the namespace.
742
- With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
743
- will be loaded at the start of the task.
744
- - "none": Do not load any checkpoint
745
- - "fresh": Loads the lastest checkpoint created within the running Task.
746
- This mode helps loading checkpoints across various retry attempts of the same task.
747
- With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
748
- created within the task will be loaded when the task is retries execution on failure.
668
+ load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
669
+ Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
670
+ These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
671
+ If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
672
+ the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
673
+ If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
749
674
 
750
675
  temp_dir_root : str, default: None
751
- The root directory under which `current.checkpoint.directory` will be created.
752
- """
753
- ...
754
-
755
- @typing.overload
756
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
757
- """
758
- Specifies the PyPI packages for the step.
759
-
760
- Information in this decorator will augment any
761
- attributes set in the `@pyi_base` flow-level decorator. Hence,
762
- you can use `@pypi_base` to set packages required by all
763
- steps and use `@pypi` to specify step-specific overrides.
764
-
765
-
766
- Parameters
767
- ----------
768
- packages : Dict[str, str], default: {}
769
- Packages to use for this step. The key is the name of the package
770
- and the value is the version to use.
771
- python : str, optional, default: None
772
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
773
- that the version used will correspond to the version of the Python interpreter used to start the run.
676
+ The root directory under which `current.model.loaded` will store loaded models
774
677
  """
775
678
  ...
776
679
 
777
680
  @typing.overload
778
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
681
+ def model(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
779
682
  ...
780
683
 
781
684
  @typing.overload
782
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
685
+ def model(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
783
686
  ...
784
687
 
785
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
688
+ def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None):
786
689
  """
787
- Specifies the PyPI packages for the step.
788
-
789
- Information in this decorator will augment any
790
- attributes set in the `@pyi_base` flow-level decorator. Hence,
791
- you can use `@pypi_base` to set packages required by all
792
- steps and use `@pypi` to specify step-specific overrides.
793
-
690
+ Enables loading / saving of models within a step.
794
691
 
795
- Parameters
796
- ----------
797
- packages : Dict[str, str], default: {}
798
- Packages to use for this step. The key is the name of the package
799
- and the value is the version to use.
800
- python : str, optional, default: None
801
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
802
- that the version used will correspond to the version of the Python interpreter used to start the run.
803
- """
804
- ...
805
-
806
- @typing.overload
807
- def nebius_s3_proxy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
808
- """
809
- Nebius-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
810
- It exists to make it easier for users to know that this decorator should only be used with
811
- a Neo Cloud like Nebius.
812
- """
813
- ...
814
-
815
- @typing.overload
816
- def nebius_s3_proxy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
817
- ...
818
-
819
- def nebius_s3_proxy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
820
- """
821
- Nebius-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
822
- It exists to make it easier for users to know that this decorator should only be used with
823
- a Neo Cloud like Nebius.
824
- """
825
- ...
826
-
827
- @typing.overload
828
- def app_deploy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
829
- """
830
- Decorator prototype for all step decorators. This function gets specialized
831
- and imported for all decorators types by _import_plugin_decorators().
832
- """
833
- ...
834
-
835
- @typing.overload
836
- def app_deploy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
837
- ...
838
-
839
- def app_deploy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
840
- """
841
- Decorator prototype for all step decorators. This function gets specialized
842
- and imported for all decorators types by _import_plugin_decorators().
843
- """
844
- ...
845
-
846
- @typing.overload
847
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
848
- """
849
- Specifies environment variables to be set prior to the execution of a step.
692
+ > Examples
693
+ - Saving Models
694
+ ```python
695
+ @model
696
+ @step
697
+ def train(self):
698
+ # current.model.save returns a dictionary reference to the model saved
699
+ self.my_model = current.model.save(
700
+ path_to_my_model,
701
+ label="my_model",
702
+ metadata={
703
+ "epochs": 10,
704
+ "batch-size": 32,
705
+ "learning-rate": 0.001,
706
+ }
707
+ )
708
+ self.next(self.test)
850
709
 
710
+ @model(load="my_model")
711
+ @step
712
+ def test(self):
713
+ # `current.model.loaded` returns a dictionary of the loaded models
714
+ # where the key is the name of the artifact and the value is the path to the model
715
+ print(os.listdir(current.model.loaded["my_model"]))
716
+ self.next(self.end)
717
+ ```
851
718
 
852
- Parameters
853
- ----------
854
- vars : Dict[str, str], default {}
855
- Dictionary of environment variables to set.
856
- """
857
- ...
858
-
859
- @typing.overload
860
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
861
- ...
862
-
863
- @typing.overload
864
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
865
- ...
866
-
867
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
868
- """
869
- Specifies environment variables to be set prior to the execution of a step.
719
+ - Loading models
720
+ ```python
721
+ @step
722
+ def train(self):
723
+ # current.model.load returns the path to the model loaded
724
+ checkpoint_path = current.model.load(
725
+ self.checkpoint_key,
726
+ )
727
+ model_path = current.model.load(
728
+ self.model,
729
+ )
730
+ self.next(self.test)
731
+ ```
870
732
 
871
733
 
872
734
  Parameters
873
735
  ----------
874
- vars : Dict[str, str], default {}
875
- Dictionary of environment variables to set.
876
- """
877
- ...
878
-
879
- def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy: str, force_cache_update: bool, debug: bool, circuit_breaker_config: dict, timeout_config: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
880
- """
881
- This decorator is used to run Ollama APIs as Metaflow task sidecars.
882
-
883
- User code call
884
- --------------
885
- @ollama(
886
- models=[...],
887
- ...
888
- )
889
-
890
- Valid backend options
891
- ---------------------
892
- - 'local': Run as a separate process on the local task machine.
893
- - (TODO) 'managed': Outerbounds hosts and selects compute provider.
894
- - (TODO) 'remote': Spin up separate instance to serve Ollama models.
895
-
896
- Valid model options
897
- -------------------
898
- Any model here https://ollama.com/search, e.g. 'llama3.2', 'llama3.3'
899
-
736
+ load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
737
+ Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
738
+ These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
739
+ If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
740
+ the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
741
+ If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
900
742
 
901
- Parameters
902
- ----------
903
- models: list[str]
904
- List of Ollama containers running models in sidecars.
905
- backend: str
906
- Determines where and how to run the Ollama process.
907
- force_pull: bool
908
- Whether to run `ollama pull` no matter what, or first check the remote cache in Metaflow datastore for this model key.
909
- cache_update_policy: str
910
- Cache update policy: "auto", "force", or "never".
911
- force_cache_update: bool
912
- Simple override for "force" cache update policy.
913
- debug: bool
914
- Whether to turn on verbose debugging logs.
915
- circuit_breaker_config: dict
916
- Configuration for circuit breaker protection. Keys: failure_threshold, recovery_timeout, reset_timeout.
917
- timeout_config: dict
918
- Configuration for various operation timeouts. Keys: pull, stop, health_check, install, server_startup.
919
- """
920
- ...
921
-
922
- @typing.overload
923
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
924
- """
925
- Internal decorator to support Fast bakery
926
- """
927
- ...
928
-
929
- @typing.overload
930
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
931
- ...
932
-
933
- def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
934
- """
935
- Internal decorator to support Fast bakery
743
+ temp_dir_root : str, default: None
744
+ The root directory under which `current.model.loaded` will store loaded models
936
745
  """
937
746
  ...
938
747
 
@@ -998,21 +807,74 @@ def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.
998
807
 
999
808
  Parameters
1000
809
  ----------
1001
- temp_dir_root : str, optional
1002
- The root directory that will hold the temporary directory where objects will be downloaded.
1003
-
1004
- load: Union[List[str], List[Tuple[Dict, str]], List[Tuple[str, str]], List[Dict], None]
1005
- The list of repos (models/datasets) to load.
1006
-
1007
- Loaded repos can be accessed via `current.huggingface_hub.loaded`. If load is set, then the following happens:
1008
-
1009
- - If repo (model/dataset) is not found in the datastore:
1010
- - Downloads the repo from Hugging Face Hub to a temporary directory (or uses specified path) for local access
1011
- - Stores it in Metaflow's datastore (s3/gcs/azure etc.) with a unique name based on repo_type/repo_id
1012
- - All HF models loaded for a `@step` will be cached separately under flow/step/namespace.
1013
-
1014
- - If repo is found in the datastore:
1015
- - Loads it directly from datastore to local path (can be temporary directory or specified path)
810
+ temp_dir_root : str, optional
811
+ The root directory that will hold the temporary directory where objects will be downloaded.
812
+
813
+ load: Union[List[str], List[Tuple[Dict, str]], List[Tuple[str, str]], List[Dict], None]
814
+ The list of repos (models/datasets) to load.
815
+
816
+ Loaded repos can be accessed via `current.huggingface_hub.loaded`. If load is set, then the following happens:
817
+
818
+ - If repo (model/dataset) is not found in the datastore:
819
+ - Downloads the repo from Hugging Face Hub to a temporary directory (or uses specified path) for local access
820
+ - Stores it in Metaflow's datastore (s3/gcs/azure etc.) with a unique name based on repo_type/repo_id
821
+ - All HF models loaded for a `@step` will be cached separately under flow/step/namespace.
822
+
823
+ - If repo is found in the datastore:
824
+ - Loads it directly from datastore to local path (can be temporary directory or specified path)
825
+ """
826
+ ...
827
+
828
+ def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
829
+ """
830
+ Specifies that this step should execute on DGX cloud.
831
+
832
+
833
+ Parameters
834
+ ----------
835
+ gpu : int
836
+ Number of GPUs to use.
837
+ gpu_type : str
838
+ Type of Nvidia GPU to use.
839
+ """
840
+ ...
841
+
842
+ @typing.overload
843
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
844
+ """
845
+ Specifies secrets to be retrieved and injected as environment variables prior to
846
+ the execution of a step.
847
+
848
+
849
+ Parameters
850
+ ----------
851
+ sources : List[Union[str, Dict[str, Any]]], default: []
852
+ List of secret specs, defining how the secrets are to be retrieved
853
+ role : str, optional, default: None
854
+ Role to use for fetching secrets
855
+ """
856
+ ...
857
+
858
+ @typing.overload
859
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
860
+ ...
861
+
862
+ @typing.overload
863
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
864
+ ...
865
+
866
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
867
+ """
868
+ Specifies secrets to be retrieved and injected as environment variables prior to
869
+ the execution of a step.
870
+
871
+
872
+ Parameters
873
+ ----------
874
+ sources : List[Union[str, Dict[str, Any]]], default: []
875
+ List of secret specs, defining how the secrets are to be retrieved
876
+ role : str, optional, default: None
877
+ Role to use for fetching secrets
1016
878
  """
1017
879
  ...
1018
880
 
@@ -1105,58 +967,305 @@ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: ty
1105
967
  """
1106
968
  ...
1107
969
 
1108
- @typing.overload
1109
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
970
+ def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy: str, force_cache_update: bool, debug: bool, circuit_breaker_config: dict, timeout_config: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
971
+ """
972
+ This decorator is used to run Ollama APIs as Metaflow task sidecars.
973
+
974
+ User code call
975
+ --------------
976
+ @ollama(
977
+ models=[...],
978
+ ...
979
+ )
980
+
981
+ Valid backend options
982
+ ---------------------
983
+ - 'local': Run as a separate process on the local task machine.
984
+ - (TODO) 'managed': Outerbounds hosts and selects compute provider.
985
+ - (TODO) 'remote': Spin up separate instance to serve Ollama models.
986
+
987
+ Valid model options
988
+ -------------------
989
+ Any model here https://ollama.com/search, e.g. 'llama3.2', 'llama3.3'
990
+
991
+
992
+ Parameters
993
+ ----------
994
+ models: list[str]
995
+ List of Ollama containers running models in sidecars.
996
+ backend: str
997
+ Determines where and how to run the Ollama process.
998
+ force_pull: bool
999
+ Whether to run `ollama pull` no matter what, or first check the remote cache in Metaflow datastore for this model key.
1000
+ cache_update_policy: str
1001
+ Cache update policy: "auto", "force", or "never".
1002
+ force_cache_update: bool
1003
+ Simple override for "force" cache update policy.
1004
+ debug: bool
1005
+ Whether to turn on verbose debugging logs.
1006
+ circuit_breaker_config: dict
1007
+ Configuration for circuit breaker protection. Keys: failure_threshold, recovery_timeout, reset_timeout.
1008
+ timeout_config: dict
1009
+ Configuration for various operation timeouts. Keys: pull, stop, health_check, install, server_startup.
1010
+ """
1011
+ ...
1012
+
1013
+ @typing.overload
1014
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1015
+ """
1016
+ Specifies environment variables to be set prior to the execution of a step.
1017
+
1018
+
1019
+ Parameters
1020
+ ----------
1021
+ vars : Dict[str, str], default {}
1022
+ Dictionary of environment variables to set.
1023
+ """
1024
+ ...
1025
+
1026
+ @typing.overload
1027
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1028
+ ...
1029
+
1030
+ @typing.overload
1031
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1032
+ ...
1033
+
1034
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
1035
+ """
1036
+ Specifies environment variables to be set prior to the execution of a step.
1037
+
1038
+
1039
+ Parameters
1040
+ ----------
1041
+ vars : Dict[str, str], default {}
1042
+ Dictionary of environment variables to set.
1043
+ """
1044
+ ...
1045
+
1046
+ def vllm(*, model: str, backend: str, openai_api_server: bool, debug: bool, card_refresh_interval: int, max_retries: int, retry_alert_frequency: int, engine_args: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1047
+ """
1048
+ This decorator is used to run vllm APIs as Metaflow task sidecars.
1049
+
1050
+ User code call
1051
+ --------------
1052
+ @vllm(
1053
+ model="...",
1054
+ ...
1055
+ )
1056
+
1057
+ Valid backend options
1058
+ ---------------------
1059
+ - 'local': Run as a separate process on the local task machine.
1060
+
1061
+ Valid model options
1062
+ -------------------
1063
+ Any HuggingFace model identifier, e.g. 'meta-llama/Llama-3.2-1B'
1064
+
1065
+ NOTE: vLLM's OpenAI-compatible server serves ONE model per server instance.
1066
+ If you need multiple models, you must create multiple @vllm decorators.
1067
+
1068
+
1069
+ Parameters
1070
+ ----------
1071
+ model: str
1072
+ HuggingFace model identifier to be served by vLLM.
1073
+ backend: str
1074
+ Determines where and how to run the vLLM process.
1075
+ openai_api_server: bool
1076
+ Whether to use OpenAI-compatible API server mode (subprocess) instead of native engine.
1077
+ Default is False (uses native engine).
1078
+ Set to True for backward compatibility with existing code.
1079
+ debug: bool
1080
+ Whether to turn on verbose debugging logs.
1081
+ card_refresh_interval: int
1082
+ Interval in seconds for refreshing the vLLM status card.
1083
+ Only used when openai_api_server=True.
1084
+ max_retries: int
1085
+ Maximum number of retries checking for vLLM server startup.
1086
+ Only used when openai_api_server=True.
1087
+ retry_alert_frequency: int
1088
+ Frequency of alert logs for vLLM server startup retries.
1089
+ Only used when openai_api_server=True.
1090
+ engine_args : dict
1091
+ Additional keyword arguments to pass to the vLLM engine.
1092
+ For example, `tensor_parallel_size=2`.
1093
+ """
1094
+ ...
1095
+
1096
+ @typing.overload
1097
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1098
+ """
1099
+ Specifies the Conda environment for the step.
1100
+
1101
+ Information in this decorator will augment any
1102
+ attributes set in the `@conda_base` flow-level decorator. Hence,
1103
+ you can use `@conda_base` to set packages required by all
1104
+ steps and use `@conda` to specify step-specific overrides.
1105
+
1106
+
1107
+ Parameters
1108
+ ----------
1109
+ packages : Dict[str, str], default {}
1110
+ Packages to use for this step. The key is the name of the package
1111
+ and the value is the version to use.
1112
+ libraries : Dict[str, str], default {}
1113
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1114
+ python : str, optional, default None
1115
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1116
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1117
+ disabled : bool, default False
1118
+ If set to True, disables @conda.
1119
+ """
1120
+ ...
1121
+
1122
+ @typing.overload
1123
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1124
+ ...
1125
+
1126
+ @typing.overload
1127
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1128
+ ...
1129
+
1130
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1131
+ """
1132
+ Specifies the Conda environment for the step.
1133
+
1134
+ Information in this decorator will augment any
1135
+ attributes set in the `@conda_base` flow-level decorator. Hence,
1136
+ you can use `@conda_base` to set packages required by all
1137
+ steps and use `@conda` to specify step-specific overrides.
1138
+
1139
+
1140
+ Parameters
1141
+ ----------
1142
+ packages : Dict[str, str], default {}
1143
+ Packages to use for this step. The key is the name of the package
1144
+ and the value is the version to use.
1145
+ libraries : Dict[str, str], default {}
1146
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1147
+ python : str, optional, default None
1148
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1149
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1150
+ disabled : bool, default False
1151
+ If set to True, disables @conda.
1152
+ """
1153
+ ...
1154
+
1155
+ @typing.overload
1156
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1157
+ """
1158
+ Specifies a timeout for your step.
1159
+
1160
+ This decorator is useful if this step may hang indefinitely.
1161
+
1162
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1163
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
1164
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
1165
+
1166
+ Note that all the values specified in parameters are added together so if you specify
1167
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1168
+
1169
+
1170
+ Parameters
1171
+ ----------
1172
+ seconds : int, default 0
1173
+ Number of seconds to wait prior to timing out.
1174
+ minutes : int, default 0
1175
+ Number of minutes to wait prior to timing out.
1176
+ hours : int, default 0
1177
+ Number of hours to wait prior to timing out.
1178
+ """
1179
+ ...
1180
+
1181
+ @typing.overload
1182
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1183
+ ...
1184
+
1185
+ @typing.overload
1186
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1187
+ ...
1188
+
1189
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
1190
+ """
1191
+ Specifies a timeout for your step.
1192
+
1193
+ This decorator is useful if this step may hang indefinitely.
1194
+
1195
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1196
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
1197
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
1198
+
1199
+ Note that all the values specified in parameters are added together so if you specify
1200
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1201
+
1202
+
1203
+ Parameters
1204
+ ----------
1205
+ seconds : int, default 0
1206
+ Number of seconds to wait prior to timing out.
1207
+ minutes : int, default 0
1208
+ Number of minutes to wait prior to timing out.
1209
+ hours : int, default 0
1210
+ Number of hours to wait prior to timing out.
1211
+ """
1212
+ ...
1213
+
1214
+ def s3_proxy(*, integration_name: typing.Optional[str] = None, write_mode: typing.Optional[str] = None, debug: typing.Optional[bool] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1110
1215
  """
1111
- Specifies the number of times the task corresponding
1112
- to a step needs to be retried.
1113
-
1114
- This decorator is useful for handling transient errors, such as networking issues.
1115
- If your task contains operations that can't be retried safely, e.g. database updates,
1116
- it is advisable to annotate it with `@retry(times=0)`.
1117
-
1118
- This can be used in conjunction with the `@catch` decorator. The `@catch`
1119
- decorator will execute a no-op task after all retries have been exhausted,
1120
- ensuring that the flow execution can continue.
1216
+ S3 Proxy decorator for routing S3 requests through a local proxy service.
1121
1217
 
1122
1218
 
1123
1219
  Parameters
1124
1220
  ----------
1125
- times : int, default 3
1126
- Number of times to retry this task.
1127
- minutes_between_retries : int, default 2
1128
- Number of minutes between retries.
1221
+ integration_name : str, optional
1222
+ Name of the S3 proxy integration. If not specified, will use the only
1223
+ available S3 proxy integration in the namespace (fails if multiple exist).
1224
+ write_mode : str, optional
1225
+ The desired behavior during write operations to target (origin) S3 bucket.
1226
+ allowed options are:
1227
+ "origin-and-cache" -> write to both the target S3 bucket and local object
1228
+ storage
1229
+ "origin" -> only write to the target S3 bucket
1230
+ "cache" -> only write to the object storage service used for caching
1231
+ debug : bool, optional
1232
+ Enable debug logging for proxy operations.
1129
1233
  """
1130
1234
  ...
1131
1235
 
1132
1236
  @typing.overload
1133
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1237
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1238
+ """
1239
+ Internal decorator to support Fast bakery
1240
+ """
1134
1241
  ...
1135
1242
 
1136
1243
  @typing.overload
1137
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1244
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1138
1245
  ...
1139
1246
 
1140
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
1247
+ def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1141
1248
  """
1142
- Specifies the number of times the task corresponding
1143
- to a step needs to be retried.
1144
-
1145
- This decorator is useful for handling transient errors, such as networking issues.
1146
- If your task contains operations that can't be retried safely, e.g. database updates,
1147
- it is advisable to annotate it with `@retry(times=0)`.
1148
-
1149
- This can be used in conjunction with the `@catch` decorator. The `@catch`
1150
- decorator will execute a no-op task after all retries have been exhausted,
1151
- ensuring that the flow execution can continue.
1152
-
1153
-
1154
- Parameters
1155
- ----------
1156
- times : int, default 3
1157
- Number of times to retry this task.
1158
- minutes_between_retries : int, default 2
1159
- Number of minutes between retries.
1249
+ Internal decorator to support Fast bakery
1250
+ """
1251
+ ...
1252
+
1253
+ @typing.overload
1254
+ def test_append_card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1255
+ """
1256
+ A simple decorator that demonstrates using CardDecoratorInjector
1257
+ to inject a card and render simple markdown content.
1258
+ """
1259
+ ...
1260
+
1261
+ @typing.overload
1262
+ def test_append_card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1263
+ ...
1264
+
1265
+ def test_append_card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1266
+ """
1267
+ A simple decorator that demonstrates using CardDecoratorInjector
1268
+ to inject a card and render simple markdown content.
1160
1269
  """
1161
1270
  ...
1162
1271
 
@@ -1239,115 +1348,6 @@ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None]
1239
1348
  """
1240
1349
  ...
1241
1350
 
1242
- @typing.overload
1243
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1244
- """
1245
- Specifies a timeout for your step.
1246
-
1247
- This decorator is useful if this step may hang indefinitely.
1248
-
1249
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1250
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
1251
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
1252
-
1253
- Note that all the values specified in parameters are added together so if you specify
1254
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1255
-
1256
-
1257
- Parameters
1258
- ----------
1259
- seconds : int, default 0
1260
- Number of seconds to wait prior to timing out.
1261
- minutes : int, default 0
1262
- Number of minutes to wait prior to timing out.
1263
- hours : int, default 0
1264
- Number of hours to wait prior to timing out.
1265
- """
1266
- ...
1267
-
1268
- @typing.overload
1269
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1270
- ...
1271
-
1272
- @typing.overload
1273
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1274
- ...
1275
-
1276
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
1277
- """
1278
- Specifies a timeout for your step.
1279
-
1280
- This decorator is useful if this step may hang indefinitely.
1281
-
1282
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1283
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
1284
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
1285
-
1286
- Note that all the values specified in parameters are added together so if you specify
1287
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1288
-
1289
-
1290
- Parameters
1291
- ----------
1292
- seconds : int, default 0
1293
- Number of seconds to wait prior to timing out.
1294
- minutes : int, default 0
1295
- Number of minutes to wait prior to timing out.
1296
- hours : int, default 0
1297
- Number of hours to wait prior to timing out.
1298
- """
1299
- ...
1300
-
1301
- def vllm(*, model: str, backend: str, openai_api_server: bool, debug: bool, card_refresh_interval: int, max_retries: int, retry_alert_frequency: int, engine_args: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1302
- """
1303
- This decorator is used to run vllm APIs as Metaflow task sidecars.
1304
-
1305
- User code call
1306
- --------------
1307
- @vllm(
1308
- model="...",
1309
- ...
1310
- )
1311
-
1312
- Valid backend options
1313
- ---------------------
1314
- - 'local': Run as a separate process on the local task machine.
1315
-
1316
- Valid model options
1317
- -------------------
1318
- Any HuggingFace model identifier, e.g. 'meta-llama/Llama-3.2-1B'
1319
-
1320
- NOTE: vLLM's OpenAI-compatible server serves ONE model per server instance.
1321
- If you need multiple models, you must create multiple @vllm decorators.
1322
-
1323
-
1324
- Parameters
1325
- ----------
1326
- model: str
1327
- HuggingFace model identifier to be served by vLLM.
1328
- backend: str
1329
- Determines where and how to run the vLLM process.
1330
- openai_api_server: bool
1331
- Whether to use OpenAI-compatible API server mode (subprocess) instead of native engine.
1332
- Default is False (uses native engine).
1333
- Set to True for backward compatibility with existing code.
1334
- debug: bool
1335
- Whether to turn on verbose debugging logs.
1336
- card_refresh_interval: int
1337
- Interval in seconds for refreshing the vLLM status card.
1338
- Only used when openai_api_server=True.
1339
- max_retries: int
1340
- Maximum number of retries checking for vLLM server startup.
1341
- Only used when openai_api_server=True.
1342
- retry_alert_frequency: int
1343
- Frequency of alert logs for vLLM server startup retries.
1344
- Only used when openai_api_server=True.
1345
- engine_args : dict
1346
- Additional keyword arguments to pass to the vLLM engine.
1347
- For example, `tensor_parallel_size=2`.
1348
- """
1349
- ...
1350
-
1351
1351
  def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1352
1352
  """
1353
1353
  Specifies what flows belong to the same project.
@@ -1365,115 +1365,114 @@ def project(*, name: str, branch: typing.Optional[str] = None, production: bool
1365
1365
 
1366
1366
  branch : Optional[str], default None
1367
1367
  The branch to use. If not specified, the branch is set to
1368
- `user.<username>` unless `production` is set to `True`. This can
1369
- also be set on the command line using `--branch` as a top-level option.
1370
- It is an error to specify `branch` in the decorator and on the command line.
1371
-
1372
- production : bool, default False
1373
- Whether or not the branch is the production branch. This can also be set on the
1374
- command line using `--production` as a top-level option. It is an error to specify
1375
- `production` in the decorator and on the command line.
1376
- The project branch name will be:
1377
- - if `branch` is specified:
1378
- - if `production` is True: `prod.<branch>`
1379
- - if `production` is False: `test.<branch>`
1380
- - if `branch` is not specified:
1381
- - if `production` is True: `prod`
1382
- - if `production` is False: `user.<username>`
1383
- """
1384
- ...
1385
-
1386
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1387
- """
1388
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1389
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1390
-
1391
-
1392
- Parameters
1393
- ----------
1394
- timeout : int
1395
- Time, in seconds before the task times out and fails. (Default: 3600)
1396
- poke_interval : int
1397
- Time in seconds that the job should wait in between each try. (Default: 60)
1398
- mode : str
1399
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1400
- exponential_backoff : bool
1401
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1402
- pool : str
1403
- the slot pool this task should run in,
1404
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1405
- soft_fail : bool
1406
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1407
- name : str
1408
- Name of the sensor on Airflow
1409
- description : str
1410
- Description of sensor in the Airflow UI
1411
- external_dag_id : str
1412
- The dag_id that contains the task you want to wait for.
1413
- external_task_ids : List[str]
1414
- The list of task_ids that you want to wait for.
1415
- If None (default value) the sensor waits for the DAG. (Default: None)
1416
- allowed_states : List[str]
1417
- Iterable of allowed states, (Default: ['success'])
1418
- failed_states : List[str]
1419
- Iterable of failed or dis-allowed states. (Default: None)
1420
- execution_delta : datetime.timedelta
1421
- time difference with the previous execution to look at,
1422
- the default is the same logical date as the current task or DAG. (Default: None)
1423
- check_existence: bool
1424
- Set to True to check if the external task exists or check if
1425
- the DAG to wait for exists. (Default: True)
1368
+ `user.<username>` unless `production` is set to `True`. This can
1369
+ also be set on the command line using `--branch` as a top-level option.
1370
+ It is an error to specify `branch` in the decorator and on the command line.
1371
+
1372
+ production : bool, default False
1373
+ Whether or not the branch is the production branch. This can also be set on the
1374
+ command line using `--production` as a top-level option. It is an error to specify
1375
+ `production` in the decorator and on the command line.
1376
+ The project branch name will be:
1377
+ - if `branch` is specified:
1378
+ - if `production` is True: `prod.<branch>`
1379
+ - if `production` is False: `test.<branch>`
1380
+ - if `branch` is not specified:
1381
+ - if `production` is True: `prod`
1382
+ - if `production` is False: `user.<username>`
1426
1383
  """
1427
1384
  ...
1428
1385
 
1429
1386
  @typing.overload
1430
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1387
+ def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1431
1388
  """
1432
- Specifies the times when the flow should be run when running on a
1433
- production scheduler.
1389
+ Specifies the event(s) that this flow depends on.
1390
+
1391
+ ```
1392
+ @trigger(event='foo')
1393
+ ```
1394
+ or
1395
+ ```
1396
+ @trigger(events=['foo', 'bar'])
1397
+ ```
1398
+
1399
+ Additionally, you can specify the parameter mappings
1400
+ to map event payload to Metaflow parameters for the flow.
1401
+ ```
1402
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1403
+ ```
1404
+ or
1405
+ ```
1406
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1407
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1408
+ ```
1409
+
1410
+ 'parameters' can also be a list of strings and tuples like so:
1411
+ ```
1412
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1413
+ ```
1414
+ This is equivalent to:
1415
+ ```
1416
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1417
+ ```
1434
1418
 
1435
1419
 
1436
1420
  Parameters
1437
1421
  ----------
1438
- hourly : bool, default False
1439
- Run the workflow hourly.
1440
- daily : bool, default True
1441
- Run the workflow daily.
1442
- weekly : bool, default False
1443
- Run the workflow weekly.
1444
- cron : str, optional, default None
1445
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1446
- specified by this expression.
1447
- timezone : str, optional, default None
1448
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1449
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1422
+ event : Union[str, Dict[str, Any]], optional, default None
1423
+ Event dependency for this flow.
1424
+ events : List[Union[str, Dict[str, Any]]], default []
1425
+ Events dependency for this flow.
1426
+ options : Dict[str, Any], default {}
1427
+ Backend-specific configuration for tuning eventing behavior.
1450
1428
  """
1451
1429
  ...
1452
1430
 
1453
1431
  @typing.overload
1454
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1432
+ def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1455
1433
  ...
1456
1434
 
1457
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1435
+ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1458
1436
  """
1459
- Specifies the times when the flow should be run when running on a
1460
- production scheduler.
1437
+ Specifies the event(s) that this flow depends on.
1438
+
1439
+ ```
1440
+ @trigger(event='foo')
1441
+ ```
1442
+ or
1443
+ ```
1444
+ @trigger(events=['foo', 'bar'])
1445
+ ```
1446
+
1447
+ Additionally, you can specify the parameter mappings
1448
+ to map event payload to Metaflow parameters for the flow.
1449
+ ```
1450
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1451
+ ```
1452
+ or
1453
+ ```
1454
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1455
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1456
+ ```
1457
+
1458
+ 'parameters' can also be a list of strings and tuples like so:
1459
+ ```
1460
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1461
+ ```
1462
+ This is equivalent to:
1463
+ ```
1464
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1465
+ ```
1461
1466
 
1462
1467
 
1463
1468
  Parameters
1464
1469
  ----------
1465
- hourly : bool, default False
1466
- Run the workflow hourly.
1467
- daily : bool, default True
1468
- Run the workflow daily.
1469
- weekly : bool, default False
1470
- Run the workflow weekly.
1471
- cron : str, optional, default None
1472
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1473
- specified by this expression.
1474
- timezone : str, optional, default None
1475
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1476
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1470
+ event : Union[str, Dict[str, Any]], optional, default None
1471
+ Event dependency for this flow.
1472
+ events : List[Union[str, Dict[str, Any]]], default []
1473
+ Events dependency for this flow.
1474
+ options : Dict[str, Any], default {}
1475
+ Backend-specific configuration for tuning eventing behavior.
1477
1476
  """
1478
1477
  ...
1479
1478
 
@@ -1560,21 +1559,164 @@ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *
1560
1559
  @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1561
1560
  ```
1562
1561
 
1563
- Note that `branch` is typically one of:
1564
- - `prod`
1565
- - `user.bob`
1566
- - `test.my_experiment`
1567
- - `prod.staging`
1562
+ Note that `branch` is typically one of:
1563
+ - `prod`
1564
+ - `user.bob`
1565
+ - `test.my_experiment`
1566
+ - `prod.staging`
1567
+
1568
+
1569
+ Parameters
1570
+ ----------
1571
+ flow : Union[str, Dict[str, str]], optional, default None
1572
+ Upstream flow dependency for this flow.
1573
+ flows : List[Union[str, Dict[str, str]]], default []
1574
+ Upstream flow dependencies for this flow.
1575
+ options : Dict[str, Any], default {}
1576
+ Backend-specific configuration for tuning eventing behavior.
1577
+ """
1578
+ ...
1579
+
1580
+ @typing.overload
1581
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1582
+ """
1583
+ Specifies the times when the flow should be run when running on a
1584
+ production scheduler.
1585
+
1586
+
1587
+ Parameters
1588
+ ----------
1589
+ hourly : bool, default False
1590
+ Run the workflow hourly.
1591
+ daily : bool, default True
1592
+ Run the workflow daily.
1593
+ weekly : bool, default False
1594
+ Run the workflow weekly.
1595
+ cron : str, optional, default None
1596
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1597
+ specified by this expression.
1598
+ timezone : str, optional, default None
1599
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1600
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1601
+ """
1602
+ ...
1603
+
1604
+ @typing.overload
1605
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1606
+ ...
1607
+
1608
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1609
+ """
1610
+ Specifies the times when the flow should be run when running on a
1611
+ production scheduler.
1612
+
1613
+
1614
+ Parameters
1615
+ ----------
1616
+ hourly : bool, default False
1617
+ Run the workflow hourly.
1618
+ daily : bool, default True
1619
+ Run the workflow daily.
1620
+ weekly : bool, default False
1621
+ Run the workflow weekly.
1622
+ cron : str, optional, default None
1623
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1624
+ specified by this expression.
1625
+ timezone : str, optional, default None
1626
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1627
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1628
+ """
1629
+ ...
1630
+
1631
+ @typing.overload
1632
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1633
+ """
1634
+ Specifies the PyPI packages for all steps of the flow.
1635
+
1636
+ Use `@pypi_base` to set common packages required by all
1637
+ steps and use `@pypi` to specify step-specific overrides.
1638
+
1639
+ Parameters
1640
+ ----------
1641
+ packages : Dict[str, str], default: {}
1642
+ Packages to use for this flow. The key is the name of the package
1643
+ and the value is the version to use.
1644
+ python : str, optional, default: None
1645
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1646
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1647
+ """
1648
+ ...
1649
+
1650
+ @typing.overload
1651
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1652
+ ...
1653
+
1654
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1655
+ """
1656
+ Specifies the PyPI packages for all steps of the flow.
1657
+
1658
+ Use `@pypi_base` to set common packages required by all
1659
+ steps and use `@pypi` to specify step-specific overrides.
1660
+
1661
+ Parameters
1662
+ ----------
1663
+ packages : Dict[str, str], default: {}
1664
+ Packages to use for this flow. The key is the name of the package
1665
+ and the value is the version to use.
1666
+ python : str, optional, default: None
1667
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1668
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1669
+ """
1670
+ ...
1671
+
1672
+ @typing.overload
1673
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1674
+ """
1675
+ Specifies the Conda environment for all steps of the flow.
1676
+
1677
+ Use `@conda_base` to set common libraries required by all
1678
+ steps and use `@conda` to specify step-specific additions.
1679
+
1680
+
1681
+ Parameters
1682
+ ----------
1683
+ packages : Dict[str, str], default {}
1684
+ Packages to use for this flow. The key is the name of the package
1685
+ and the value is the version to use.
1686
+ libraries : Dict[str, str], default {}
1687
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1688
+ python : str, optional, default None
1689
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1690
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1691
+ disabled : bool, default False
1692
+ If set to True, disables Conda.
1693
+ """
1694
+ ...
1695
+
1696
+ @typing.overload
1697
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1698
+ ...
1699
+
1700
+ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1701
+ """
1702
+ Specifies the Conda environment for all steps of the flow.
1703
+
1704
+ Use `@conda_base` to set common libraries required by all
1705
+ steps and use `@conda` to specify step-specific additions.
1568
1706
 
1569
1707
 
1570
1708
  Parameters
1571
1709
  ----------
1572
- flow : Union[str, Dict[str, str]], optional, default None
1573
- Upstream flow dependency for this flow.
1574
- flows : List[Union[str, Dict[str, str]]], default []
1575
- Upstream flow dependencies for this flow.
1576
- options : Dict[str, Any], default {}
1577
- Backend-specific configuration for tuning eventing behavior.
1710
+ packages : Dict[str, str], default {}
1711
+ Packages to use for this flow. The key is the name of the package
1712
+ and the value is the version to use.
1713
+ libraries : Dict[str, str], default {}
1714
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1715
+ python : str, optional, default None
1716
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1717
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1718
+ disabled : bool, default False
1719
+ If set to True, disables Conda.
1578
1720
  """
1579
1721
  ...
1580
1722
 
@@ -1692,6 +1834,49 @@ def with_artifact_store(f: typing.Optional[typing.Type[FlowSpecDerived]] = None)
1692
1834
  """
1693
1835
  ...
1694
1836
 
1837
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1838
+ """
1839
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1840
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1841
+
1842
+
1843
+ Parameters
1844
+ ----------
1845
+ timeout : int
1846
+ Time, in seconds before the task times out and fails. (Default: 3600)
1847
+ poke_interval : int
1848
+ Time in seconds that the job should wait in between each try. (Default: 60)
1849
+ mode : str
1850
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1851
+ exponential_backoff : bool
1852
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1853
+ pool : str
1854
+ the slot pool this task should run in,
1855
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1856
+ soft_fail : bool
1857
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1858
+ name : str
1859
+ Name of the sensor on Airflow
1860
+ description : str
1861
+ Description of sensor in the Airflow UI
1862
+ external_dag_id : str
1863
+ The dag_id that contains the task you want to wait for.
1864
+ external_task_ids : List[str]
1865
+ The list of task_ids that you want to wait for.
1866
+ If None (default value) the sensor waits for the DAG. (Default: None)
1867
+ allowed_states : List[str]
1868
+ Iterable of allowed states, (Default: ['success'])
1869
+ failed_states : List[str]
1870
+ Iterable of failed or dis-allowed states. (Default: None)
1871
+ execution_delta : datetime.timedelta
1872
+ time difference with the previous execution to look at,
1873
+ the default is the same logical date as the current task or DAG. (Default: None)
1874
+ check_existence: bool
1875
+ Set to True to check if the external task exists or check if
1876
+ the DAG to wait for exists. (Default: True)
1877
+ """
1878
+ ...
1879
+
1695
1880
  def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1696
1881
  """
1697
1882
  The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
@@ -1735,190 +1920,5 @@ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, expone
1735
1920
  """
1736
1921
  ...
1737
1922
 
1738
- @typing.overload
1739
- def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1740
- """
1741
- Specifies the event(s) that this flow depends on.
1742
-
1743
- ```
1744
- @trigger(event='foo')
1745
- ```
1746
- or
1747
- ```
1748
- @trigger(events=['foo', 'bar'])
1749
- ```
1750
-
1751
- Additionally, you can specify the parameter mappings
1752
- to map event payload to Metaflow parameters for the flow.
1753
- ```
1754
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1755
- ```
1756
- or
1757
- ```
1758
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1759
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1760
- ```
1761
-
1762
- 'parameters' can also be a list of strings and tuples like so:
1763
- ```
1764
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1765
- ```
1766
- This is equivalent to:
1767
- ```
1768
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1769
- ```
1770
-
1771
-
1772
- Parameters
1773
- ----------
1774
- event : Union[str, Dict[str, Any]], optional, default None
1775
- Event dependency for this flow.
1776
- events : List[Union[str, Dict[str, Any]]], default []
1777
- Events dependency for this flow.
1778
- options : Dict[str, Any], default {}
1779
- Backend-specific configuration for tuning eventing behavior.
1780
- """
1781
- ...
1782
-
1783
- @typing.overload
1784
- def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1785
- ...
1786
-
1787
- def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1788
- """
1789
- Specifies the event(s) that this flow depends on.
1790
-
1791
- ```
1792
- @trigger(event='foo')
1793
- ```
1794
- or
1795
- ```
1796
- @trigger(events=['foo', 'bar'])
1797
- ```
1798
-
1799
- Additionally, you can specify the parameter mappings
1800
- to map event payload to Metaflow parameters for the flow.
1801
- ```
1802
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1803
- ```
1804
- or
1805
- ```
1806
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1807
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1808
- ```
1809
-
1810
- 'parameters' can also be a list of strings and tuples like so:
1811
- ```
1812
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1813
- ```
1814
- This is equivalent to:
1815
- ```
1816
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1817
- ```
1818
-
1819
-
1820
- Parameters
1821
- ----------
1822
- event : Union[str, Dict[str, Any]], optional, default None
1823
- Event dependency for this flow.
1824
- events : List[Union[str, Dict[str, Any]]], default []
1825
- Events dependency for this flow.
1826
- options : Dict[str, Any], default {}
1827
- Backend-specific configuration for tuning eventing behavior.
1828
- """
1829
- ...
1830
-
1831
- @typing.overload
1832
- def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1833
- """
1834
- Specifies the Conda environment for all steps of the flow.
1835
-
1836
- Use `@conda_base` to set common libraries required by all
1837
- steps and use `@conda` to specify step-specific additions.
1838
-
1839
-
1840
- Parameters
1841
- ----------
1842
- packages : Dict[str, str], default {}
1843
- Packages to use for this flow. The key is the name of the package
1844
- and the value is the version to use.
1845
- libraries : Dict[str, str], default {}
1846
- Supported for backward compatibility. When used with packages, packages will take precedence.
1847
- python : str, optional, default None
1848
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1849
- that the version used will correspond to the version of the Python interpreter used to start the run.
1850
- disabled : bool, default False
1851
- If set to True, disables Conda.
1852
- """
1853
- ...
1854
-
1855
- @typing.overload
1856
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1857
- ...
1858
-
1859
- def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1860
- """
1861
- Specifies the Conda environment for all steps of the flow.
1862
-
1863
- Use `@conda_base` to set common libraries required by all
1864
- steps and use `@conda` to specify step-specific additions.
1865
-
1866
-
1867
- Parameters
1868
- ----------
1869
- packages : Dict[str, str], default {}
1870
- Packages to use for this flow. The key is the name of the package
1871
- and the value is the version to use.
1872
- libraries : Dict[str, str], default {}
1873
- Supported for backward compatibility. When used with packages, packages will take precedence.
1874
- python : str, optional, default None
1875
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1876
- that the version used will correspond to the version of the Python interpreter used to start the run.
1877
- disabled : bool, default False
1878
- If set to True, disables Conda.
1879
- """
1880
- ...
1881
-
1882
- @typing.overload
1883
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1884
- """
1885
- Specifies the PyPI packages for all steps of the flow.
1886
-
1887
- Use `@pypi_base` to set common packages required by all
1888
- steps and use `@pypi` to specify step-specific overrides.
1889
-
1890
- Parameters
1891
- ----------
1892
- packages : Dict[str, str], default: {}
1893
- Packages to use for this flow. The key is the name of the package
1894
- and the value is the version to use.
1895
- python : str, optional, default: None
1896
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1897
- that the version used will correspond to the version of the Python interpreter used to start the run.
1898
- """
1899
- ...
1900
-
1901
- @typing.overload
1902
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1903
- ...
1904
-
1905
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1906
- """
1907
- Specifies the PyPI packages for all steps of the flow.
1908
-
1909
- Use `@pypi_base` to set common packages required by all
1910
- steps and use `@pypi` to specify step-specific overrides.
1911
-
1912
- Parameters
1913
- ----------
1914
- packages : Dict[str, str], default: {}
1915
- Packages to use for this flow. The key is the name of the package
1916
- and the value is the version to use.
1917
- python : str, optional, default: None
1918
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1919
- that the version used will correspond to the version of the Python interpreter used to start the run.
1920
- """
1921
- ...
1922
-
1923
1923
  pkg_name: str
1924
1924