ob-metaflow-stubs 6.0.10.10__py2.py3-none-any.whl → 6.0.10.11__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (262) hide show
  1. metaflow-stubs/__init__.pyi +1161 -1161
  2. metaflow-stubs/cards.pyi +1 -1
  3. metaflow-stubs/cli.pyi +1 -1
  4. metaflow-stubs/cli_components/__init__.pyi +1 -1
  5. metaflow-stubs/cli_components/utils.pyi +1 -1
  6. metaflow-stubs/client/__init__.pyi +1 -1
  7. metaflow-stubs/client/core.pyi +4 -4
  8. metaflow-stubs/client/filecache.pyi +1 -1
  9. metaflow-stubs/events.pyi +2 -2
  10. metaflow-stubs/exception.pyi +1 -1
  11. metaflow-stubs/flowspec.pyi +4 -4
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +2 -2
  14. metaflow-stubs/meta_files.pyi +1 -1
  15. metaflow-stubs/metadata_provider/__init__.pyi +1 -1
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +1 -1
  17. metaflow-stubs/metadata_provider/metadata.pyi +2 -2
  18. metaflow-stubs/metadata_provider/util.pyi +1 -1
  19. metaflow-stubs/metaflow_config.pyi +1 -1
  20. metaflow-stubs/metaflow_current.pyi +74 -74
  21. metaflow-stubs/metaflow_git.pyi +1 -1
  22. metaflow-stubs/mf_extensions/__init__.pyi +1 -1
  23. metaflow-stubs/mf_extensions/obcheckpoint/__init__.pyi +1 -1
  24. metaflow-stubs/mf_extensions/obcheckpoint/plugins/__init__.pyi +1 -1
  25. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/__init__.pyi +1 -1
  26. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/__init__.pyi +1 -1
  27. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/async_cards.pyi +2 -2
  28. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/deco_injection_mixin.pyi +1 -1
  29. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/extra_components.pyi +2 -2
  30. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/__init__.pyi +1 -1
  31. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/__init__.pyi +1 -1
  32. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/checkpoint_lister.pyi +4 -4
  33. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/lineage_card.pyi +1 -1
  34. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/checkpoint_storage.pyi +4 -4
  35. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/constructors.pyi +1 -1
  36. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/core.pyi +3 -3
  37. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/decorator.pyi +4 -4
  38. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/exceptions.pyi +1 -1
  39. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/final_api.pyi +2 -2
  40. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/lineage.pyi +1 -1
  41. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/__init__.pyi +1 -1
  42. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/context.pyi +2 -2
  43. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/core.pyi +1 -1
  44. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/decorator.pyi +1 -1
  45. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/exceptions.pyi +1 -1
  46. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/task_utils.pyi +3 -3
  47. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/utils.pyi +1 -1
  48. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastructures.pyi +2 -2
  49. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/exceptions.pyi +1 -1
  50. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/__init__.pyi +1 -1
  51. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/decorator.pyi +2 -2
  52. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/__init__.pyi +1 -1
  53. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/core.pyi +2 -2
  54. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/exceptions.pyi +1 -1
  55. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/model_storage.pyi +2 -2
  56. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/__init__.pyi +1 -1
  57. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/flowspec_utils.pyi +1 -1
  58. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/general.pyi +1 -1
  59. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/identity_utils.pyi +2 -2
  60. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/__init__.pyi +1 -1
  61. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/base.pyi +1 -1
  62. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/tar.pyi +2 -2
  63. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/tar_utils.pyi +2 -2
  64. metaflow-stubs/mf_extensions/outerbounds/__init__.pyi +1 -1
  65. metaflow-stubs/mf_extensions/outerbounds/plugins/__init__.pyi +1 -1
  66. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/__init__.pyi +1 -1
  67. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/__init__.pyi +1 -1
  68. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_state_machine.pyi +1 -1
  69. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/__init__.pyi +1 -1
  70. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/spinner/__init__.pyi +1 -1
  71. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/spinner/spinners.pyi +1 -1
  72. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/app_cli.pyi +2 -2
  73. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/app_config.pyi +1 -1
  74. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/capsule.pyi +1 -1
  75. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/click_importer.pyi +1 -1
  76. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/code_package/__init__.pyi +1 -1
  77. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/code_package/code_packager.pyi +1 -1
  78. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/__init__.pyi +1 -1
  79. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/cli_generator.pyi +1 -1
  80. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/config_utils.pyi +2 -2
  81. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/schema_export.pyi +1 -1
  82. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/typed_configs.pyi +2 -2
  83. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/unified_config.pyi +3 -3
  84. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/dependencies.pyi +2 -2
  85. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/deployer.pyi +4 -4
  86. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/experimental/__init__.pyi +1 -1
  87. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/perimeters.pyi +1 -1
  88. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/utils.pyi +2 -2
  89. metaflow-stubs/mf_extensions/outerbounds/plugins/aws/__init__.pyi +1 -1
  90. metaflow-stubs/mf_extensions/outerbounds/plugins/aws/assume_role_decorator.pyi +1 -1
  91. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/__init__.pyi +1 -1
  92. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/async_cards.pyi +2 -2
  93. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/injector.pyi +1 -1
  94. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/__init__.pyi +1 -1
  95. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/coreweave.pyi +2 -2
  96. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/nebius.pyi +2 -2
  97. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/__init__.pyi +1 -1
  98. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/baker.pyi +2 -2
  99. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/docker_environment.pyi +2 -2
  100. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/fast_bakery.pyi +1 -1
  101. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/__init__.pyi +1 -1
  102. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/pod_killer.pyi +1 -1
  103. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/__init__.pyi +1 -1
  104. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/constants.pyi +1 -1
  105. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/exceptions.pyi +1 -1
  106. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/ollama.pyi +1 -1
  107. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/status_card.pyi +1 -1
  108. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/__init__.pyi +1 -1
  109. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/snowflake.pyi +1 -1
  110. metaflow-stubs/mf_extensions/outerbounds/profilers/__init__.pyi +1 -1
  111. metaflow-stubs/mf_extensions/outerbounds/profilers/gpu.pyi +1 -1
  112. metaflow-stubs/mf_extensions/outerbounds/remote_config.pyi +3 -3
  113. metaflow-stubs/mf_extensions/outerbounds/toplevel/__init__.pyi +1 -1
  114. metaflow-stubs/mf_extensions/outerbounds/toplevel/global_aliases_for_metaflow_package.pyi +1 -1
  115. metaflow-stubs/mf_extensions/outerbounds/toplevel/s3_proxy.pyi +1 -1
  116. metaflow-stubs/multicore_utils.pyi +1 -1
  117. metaflow-stubs/ob_internal.pyi +1 -1
  118. metaflow-stubs/packaging_sys/__init__.pyi +5 -5
  119. metaflow-stubs/packaging_sys/backend.pyi +3 -3
  120. metaflow-stubs/packaging_sys/distribution_support.pyi +2 -2
  121. metaflow-stubs/packaging_sys/tar_backend.pyi +4 -4
  122. metaflow-stubs/packaging_sys/utils.pyi +1 -1
  123. metaflow-stubs/packaging_sys/v1.pyi +3 -3
  124. metaflow-stubs/parameters.pyi +2 -2
  125. metaflow-stubs/plugins/__init__.pyi +13 -13
  126. metaflow-stubs/plugins/airflow/__init__.pyi +1 -1
  127. metaflow-stubs/plugins/airflow/airflow_utils.pyi +1 -1
  128. metaflow-stubs/plugins/airflow/exception.pyi +1 -1
  129. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +1 -1
  130. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +1 -1
  131. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +1 -1
  132. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +1 -1
  133. metaflow-stubs/plugins/argo/__init__.pyi +1 -1
  134. metaflow-stubs/plugins/argo/argo_client.pyi +1 -1
  135. metaflow-stubs/plugins/argo/argo_events.pyi +1 -1
  136. metaflow-stubs/plugins/argo/argo_workflows.pyi +2 -2
  137. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +2 -2
  138. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +2 -2
  139. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +3 -3
  140. metaflow-stubs/plugins/argo/exit_hooks.pyi +2 -2
  141. metaflow-stubs/plugins/aws/__init__.pyi +1 -1
  142. metaflow-stubs/plugins/aws/aws_client.pyi +1 -1
  143. metaflow-stubs/plugins/aws/aws_utils.pyi +1 -1
  144. metaflow-stubs/plugins/aws/batch/__init__.pyi +1 -1
  145. metaflow-stubs/plugins/aws/batch/batch.pyi +1 -1
  146. metaflow-stubs/plugins/aws/batch/batch_client.pyi +1 -1
  147. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +1 -1
  148. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +1 -1
  149. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +3 -3
  150. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +1 -1
  151. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +1 -1
  152. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +1 -1
  153. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +1 -1
  154. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +1 -1
  155. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +2 -2
  156. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +3 -3
  157. metaflow-stubs/plugins/azure/__init__.pyi +1 -1
  158. metaflow-stubs/plugins/azure/azure_credential.pyi +1 -1
  159. metaflow-stubs/plugins/azure/azure_exceptions.pyi +1 -1
  160. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +3 -3
  161. metaflow-stubs/plugins/azure/azure_utils.pyi +1 -1
  162. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +1 -1
  163. metaflow-stubs/plugins/azure/includefile_support.pyi +1 -1
  164. metaflow-stubs/plugins/cards/__init__.pyi +5 -5
  165. metaflow-stubs/plugins/cards/card_client.pyi +1 -1
  166. metaflow-stubs/plugins/cards/card_creator.pyi +1 -1
  167. metaflow-stubs/plugins/cards/card_datastore.pyi +1 -1
  168. metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
  169. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +1 -1
  170. metaflow-stubs/plugins/cards/card_modules/basic.pyi +2 -2
  171. metaflow-stubs/plugins/cards/card_modules/card.pyi +1 -1
  172. metaflow-stubs/plugins/cards/card_modules/components.pyi +3 -3
  173. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +1 -1
  174. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +1 -1
  175. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +1 -1
  176. metaflow-stubs/plugins/cards/card_resolver.pyi +1 -1
  177. metaflow-stubs/plugins/cards/component_serializer.pyi +1 -1
  178. metaflow-stubs/plugins/cards/exception.pyi +1 -1
  179. metaflow-stubs/plugins/catch_decorator.pyi +1 -1
  180. metaflow-stubs/plugins/datatools/__init__.pyi +1 -1
  181. metaflow-stubs/plugins/datatools/local.pyi +1 -1
  182. metaflow-stubs/plugins/datatools/s3/__init__.pyi +1 -1
  183. metaflow-stubs/plugins/datatools/s3/s3.pyi +2 -2
  184. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +1 -1
  185. metaflow-stubs/plugins/datatools/s3/s3util.pyi +1 -1
  186. metaflow-stubs/plugins/debug_logger.pyi +1 -1
  187. metaflow-stubs/plugins/debug_monitor.pyi +1 -1
  188. metaflow-stubs/plugins/environment_decorator.pyi +1 -1
  189. metaflow-stubs/plugins/events_decorator.pyi +1 -1
  190. metaflow-stubs/plugins/exit_hook/__init__.pyi +1 -1
  191. metaflow-stubs/plugins/exit_hook/exit_hook_decorator.pyi +1 -1
  192. metaflow-stubs/plugins/frameworks/__init__.pyi +1 -1
  193. metaflow-stubs/plugins/frameworks/pytorch.pyi +1 -1
  194. metaflow-stubs/plugins/gcp/__init__.pyi +1 -1
  195. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +3 -3
  196. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +1 -1
  197. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +1 -1
  198. metaflow-stubs/plugins/gcp/gs_utils.pyi +1 -1
  199. metaflow-stubs/plugins/gcp/includefile_support.pyi +1 -1
  200. metaflow-stubs/plugins/kubernetes/__init__.pyi +1 -1
  201. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +2 -2
  202. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +1 -1
  203. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +1 -1
  204. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +1 -1
  205. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +1 -1
  206. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +1 -1
  207. metaflow-stubs/plugins/ollama/__init__.pyi +2 -2
  208. metaflow-stubs/plugins/optuna/__init__.pyi +1 -1
  209. metaflow-stubs/plugins/parallel_decorator.pyi +1 -1
  210. metaflow-stubs/plugins/perimeters.pyi +1 -1
  211. metaflow-stubs/plugins/project_decorator.pyi +1 -1
  212. metaflow-stubs/plugins/pypi/__init__.pyi +1 -1
  213. metaflow-stubs/plugins/pypi/conda_decorator.pyi +1 -1
  214. metaflow-stubs/plugins/pypi/conda_environment.pyi +6 -6
  215. metaflow-stubs/plugins/pypi/parsers.pyi +1 -1
  216. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +1 -1
  217. metaflow-stubs/plugins/pypi/pypi_environment.pyi +1 -1
  218. metaflow-stubs/plugins/pypi/utils.pyi +1 -1
  219. metaflow-stubs/plugins/resources_decorator.pyi +1 -1
  220. metaflow-stubs/plugins/retry_decorator.pyi +1 -1
  221. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  222. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +3 -3
  223. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +1 -1
  224. metaflow-stubs/plugins/secrets/secrets_func.pyi +1 -1
  225. metaflow-stubs/plugins/secrets/secrets_spec.pyi +1 -1
  226. metaflow-stubs/plugins/secrets/utils.pyi +1 -1
  227. metaflow-stubs/plugins/snowflake/__init__.pyi +1 -1
  228. metaflow-stubs/plugins/storage_executor.pyi +1 -1
  229. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +2 -2
  230. metaflow-stubs/plugins/timeout_decorator.pyi +1 -1
  231. metaflow-stubs/plugins/torchtune/__init__.pyi +1 -1
  232. metaflow-stubs/plugins/uv/__init__.pyi +1 -1
  233. metaflow-stubs/plugins/uv/uv_environment.pyi +2 -2
  234. metaflow-stubs/profilers/__init__.pyi +1 -1
  235. metaflow-stubs/pylint_wrapper.pyi +1 -1
  236. metaflow-stubs/runner/__init__.pyi +1 -1
  237. metaflow-stubs/runner/deployer.pyi +4 -4
  238. metaflow-stubs/runner/deployer_impl.pyi +2 -2
  239. metaflow-stubs/runner/metaflow_runner.pyi +3 -3
  240. metaflow-stubs/runner/nbdeploy.pyi +1 -1
  241. metaflow-stubs/runner/nbrun.pyi +1 -1
  242. metaflow-stubs/runner/subprocess_manager.pyi +1 -1
  243. metaflow-stubs/runner/utils.pyi +2 -2
  244. metaflow-stubs/system/__init__.pyi +1 -1
  245. metaflow-stubs/system/system_logger.pyi +1 -1
  246. metaflow-stubs/system/system_monitor.pyi +1 -1
  247. metaflow-stubs/tagging_util.pyi +1 -1
  248. metaflow-stubs/tuple_util.pyi +1 -1
  249. metaflow-stubs/user_configs/__init__.pyi +1 -1
  250. metaflow-stubs/user_configs/config_options.pyi +2 -2
  251. metaflow-stubs/user_configs/config_parameters.pyi +6 -6
  252. metaflow-stubs/user_decorators/__init__.pyi +1 -1
  253. metaflow-stubs/user_decorators/common.pyi +1 -1
  254. metaflow-stubs/user_decorators/mutable_flow.pyi +5 -5
  255. metaflow-stubs/user_decorators/mutable_step.pyi +5 -5
  256. metaflow-stubs/user_decorators/user_flow_decorator.pyi +3 -3
  257. metaflow-stubs/user_decorators/user_step_decorator.pyi +5 -5
  258. {ob_metaflow_stubs-6.0.10.10.dist-info → ob_metaflow_stubs-6.0.10.11.dist-info}/METADATA +1 -1
  259. ob_metaflow_stubs-6.0.10.11.dist-info/RECORD +262 -0
  260. ob_metaflow_stubs-6.0.10.10.dist-info/RECORD +0 -262
  261. {ob_metaflow_stubs-6.0.10.10.dist-info → ob_metaflow_stubs-6.0.10.11.dist-info}/WHEEL +0 -0
  262. {ob_metaflow_stubs-6.0.10.10.dist-info → ob_metaflow_stubs-6.0.10.11.dist-info}/top_level.txt +0 -0
@@ -1,15 +1,15 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
3
  # MF version: 2.18.7.5+obcheckpoint(0.2.7);ob(v1) #
4
- # Generated on 2025-09-23T01:34:30.897811 #
4
+ # Generated on 2025-09-29T21:43:14.782191 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import datetime
12
11
  import typing
12
+ import datetime
13
13
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
14
14
  StepFlag = typing.NewType("StepFlag", bool)
15
15
 
@@ -39,9 +39,9 @@ from .user_decorators.user_step_decorator import UserStepDecorator as UserStepDe
39
39
  from .user_decorators.user_step_decorator import StepMutator as StepMutator
40
40
  from .user_decorators.user_step_decorator import user_step_decorator as user_step_decorator
41
41
  from .user_decorators.user_flow_decorator import FlowMutator as FlowMutator
42
- from . import tuple_util as tuple_util
43
42
  from . import cards as cards
44
43
  from . import metaflow_git as metaflow_git
44
+ from . import tuple_util as tuple_util
45
45
  from . import events as events
46
46
  from . import runner as runner
47
47
  from . import plugins as plugins
@@ -169,305 +169,377 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
169
169
  ...
170
170
 
171
171
  @typing.overload
172
- def test_append_card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
173
- """
174
- A simple decorator that demonstrates using CardDecoratorInjector
175
- to inject a card and render simple markdown content.
176
- """
177
- ...
178
-
179
- @typing.overload
180
- def test_append_card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
181
- ...
182
-
183
- def test_append_card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
184
- """
185
- A simple decorator that demonstrates using CardDecoratorInjector
186
- to inject a card and render simple markdown content.
187
- """
188
- ...
189
-
190
- @typing.overload
191
- def model(*, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
172
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
192
173
  """
193
- Enables loading / saving of models within a step.
194
-
195
- > Examples
196
- - Saving Models
197
- ```python
198
- @model
199
- @step
200
- def train(self):
201
- # current.model.save returns a dictionary reference to the model saved
202
- self.my_model = current.model.save(
203
- path_to_my_model,
204
- label="my_model",
205
- metadata={
206
- "epochs": 10,
207
- "batch-size": 32,
208
- "learning-rate": 0.001,
209
- }
210
- )
211
- self.next(self.test)
174
+ Specifies the number of times the task corresponding
175
+ to a step needs to be retried.
212
176
 
213
- @model(load="my_model")
214
- @step
215
- def test(self):
216
- # `current.model.loaded` returns a dictionary of the loaded models
217
- # where the key is the name of the artifact and the value is the path to the model
218
- print(os.listdir(current.model.loaded["my_model"]))
219
- self.next(self.end)
220
- ```
177
+ This decorator is useful for handling transient errors, such as networking issues.
178
+ If your task contains operations that can't be retried safely, e.g. database updates,
179
+ it is advisable to annotate it with `@retry(times=0)`.
221
180
 
222
- - Loading models
223
- ```python
224
- @step
225
- def train(self):
226
- # current.model.load returns the path to the model loaded
227
- checkpoint_path = current.model.load(
228
- self.checkpoint_key,
229
- )
230
- model_path = current.model.load(
231
- self.model,
232
- )
233
- self.next(self.test)
234
- ```
181
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
182
+ decorator will execute a no-op task after all retries have been exhausted,
183
+ ensuring that the flow execution can continue.
235
184
 
236
185
 
237
186
  Parameters
238
187
  ----------
239
- load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
240
- Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
241
- These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
242
- If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
243
- the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
244
- If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
245
-
246
- temp_dir_root : str, default: None
247
- The root directory under which `current.model.loaded` will store loaded models
188
+ times : int, default 3
189
+ Number of times to retry this task.
190
+ minutes_between_retries : int, default 2
191
+ Number of minutes between retries.
248
192
  """
249
193
  ...
250
194
 
251
195
  @typing.overload
252
- def model(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
196
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
253
197
  ...
254
198
 
255
199
  @typing.overload
256
- def model(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
200
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
257
201
  ...
258
202
 
259
- def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None):
203
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
260
204
  """
261
- Enables loading / saving of models within a step.
262
-
263
- > Examples
264
- - Saving Models
265
- ```python
266
- @model
267
- @step
268
- def train(self):
269
- # current.model.save returns a dictionary reference to the model saved
270
- self.my_model = current.model.save(
271
- path_to_my_model,
272
- label="my_model",
273
- metadata={
274
- "epochs": 10,
275
- "batch-size": 32,
276
- "learning-rate": 0.001,
277
- }
278
- )
279
- self.next(self.test)
205
+ Specifies the number of times the task corresponding
206
+ to a step needs to be retried.
280
207
 
281
- @model(load="my_model")
282
- @step
283
- def test(self):
284
- # `current.model.loaded` returns a dictionary of the loaded models
285
- # where the key is the name of the artifact and the value is the path to the model
286
- print(os.listdir(current.model.loaded["my_model"]))
287
- self.next(self.end)
288
- ```
208
+ This decorator is useful for handling transient errors, such as networking issues.
209
+ If your task contains operations that can't be retried safely, e.g. database updates,
210
+ it is advisable to annotate it with `@retry(times=0)`.
289
211
 
290
- - Loading models
291
- ```python
292
- @step
293
- def train(self):
294
- # current.model.load returns the path to the model loaded
295
- checkpoint_path = current.model.load(
296
- self.checkpoint_key,
297
- )
298
- model_path = current.model.load(
299
- self.model,
300
- )
301
- self.next(self.test)
302
- ```
212
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
213
+ decorator will execute a no-op task after all retries have been exhausted,
214
+ ensuring that the flow execution can continue.
303
215
 
304
216
 
305
217
  Parameters
306
218
  ----------
307
- load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
308
- Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
309
- These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
310
- If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
311
- the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
312
- If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
313
-
314
- temp_dir_root : str, default: None
315
- The root directory under which `current.model.loaded` will store loaded models
219
+ times : int, default 3
220
+ Number of times to retry this task.
221
+ minutes_between_retries : int, default 2
222
+ Number of minutes between retries.
316
223
  """
317
224
  ...
318
225
 
319
226
  @typing.overload
320
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
227
+ def test_append_card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
321
228
  """
322
- Specifies environment variables to be set prior to the execution of a step.
323
-
324
-
325
- Parameters
326
- ----------
327
- vars : Dict[str, str], default {}
328
- Dictionary of environment variables to set.
229
+ A simple decorator that demonstrates using CardDecoratorInjector
230
+ to inject a card and render simple markdown content.
329
231
  """
330
232
  ...
331
233
 
332
234
  @typing.overload
333
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
334
- ...
335
-
336
- @typing.overload
337
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
235
+ def test_append_card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
338
236
  ...
339
237
 
340
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
238
+ def test_append_card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
341
239
  """
342
- Specifies environment variables to be set prior to the execution of a step.
343
-
344
-
345
- Parameters
346
- ----------
347
- vars : Dict[str, str], default {}
348
- Dictionary of environment variables to set.
240
+ A simple decorator that demonstrates using CardDecoratorInjector
241
+ to inject a card and render simple markdown content.
349
242
  """
350
243
  ...
351
244
 
352
245
  @typing.overload
353
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
246
+ def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
354
247
  """
355
- Specifies that the step will success under all circumstances.
248
+ Creates a human-readable report, a Metaflow Card, after this step completes.
356
249
 
357
- The decorator will create an optional artifact, specified by `var`, which
358
- contains the exception raised. You can use it to detect the presence
359
- of errors, indicating that all happy-path artifacts produced by the step
360
- are missing.
250
+ Note that you may add multiple `@card` decorators in a step with different parameters.
361
251
 
362
252
 
363
253
  Parameters
364
254
  ----------
365
- var : str, optional, default None
366
- Name of the artifact in which to store the caught exception.
367
- If not specified, the exception is not stored.
368
- print_exception : bool, default True
369
- Determines whether or not the exception is printed to
370
- stdout when caught.
255
+ type : str, default 'default'
256
+ Card type.
257
+ id : str, optional, default None
258
+ If multiple cards are present, use this id to identify this card.
259
+ options : Dict[str, Any], default {}
260
+ Options passed to the card. The contents depend on the card type.
261
+ timeout : int, default 45
262
+ Interrupt reporting if it takes more than this many seconds.
371
263
  """
372
264
  ...
373
265
 
374
266
  @typing.overload
375
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
267
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
376
268
  ...
377
269
 
378
270
  @typing.overload
379
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
271
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
380
272
  ...
381
273
 
382
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
274
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
383
275
  """
384
- Specifies that the step will success under all circumstances.
276
+ Creates a human-readable report, a Metaflow Card, after this step completes.
385
277
 
386
- The decorator will create an optional artifact, specified by `var`, which
387
- contains the exception raised. You can use it to detect the presence
388
- of errors, indicating that all happy-path artifacts produced by the step
389
- are missing.
278
+ Note that you may add multiple `@card` decorators in a step with different parameters.
390
279
 
391
280
 
392
281
  Parameters
393
282
  ----------
394
- var : str, optional, default None
395
- Name of the artifact in which to store the caught exception.
396
- If not specified, the exception is not stored.
397
- print_exception : bool, default True
398
- Determines whether or not the exception is printed to
399
- stdout when caught.
283
+ type : str, default 'default'
284
+ Card type.
285
+ id : str, optional, default None
286
+ If multiple cards are present, use this id to identify this card.
287
+ options : Dict[str, Any], default {}
288
+ Options passed to the card. The contents depend on the card type.
289
+ timeout : int, default 45
290
+ Interrupt reporting if it takes more than this many seconds.
400
291
  """
401
292
  ...
402
293
 
403
- def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
294
+ def vllm(*, model: str, backend: str, openai_api_server: bool, debug: bool, card_refresh_interval: int, max_retries: int, retry_alert_frequency: int, engine_args: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
404
295
  """
405
- Specifies that this step should execute on DGX cloud.
296
+ This decorator is used to run vllm APIs as Metaflow task sidecars.
297
+
298
+ User code call
299
+ --------------
300
+ @vllm(
301
+ model="...",
302
+ ...
303
+ )
304
+
305
+ Valid backend options
306
+ ---------------------
307
+ - 'local': Run as a separate process on the local task machine.
308
+
309
+ Valid model options
310
+ -------------------
311
+ Any HuggingFace model identifier, e.g. 'meta-llama/Llama-3.2-1B'
312
+
313
+ NOTE: vLLM's OpenAI-compatible server serves ONE model per server instance.
314
+ If you need multiple models, you must create multiple @vllm decorators.
406
315
 
407
316
 
408
317
  Parameters
409
318
  ----------
410
- gpu : int
411
- Number of GPUs to use.
412
- gpu_type : str
413
- Type of Nvidia GPU to use.
414
- queue_timeout : int
415
- Time to keep the job in NVCF's queue.
319
+ model: str
320
+ HuggingFace model identifier to be served by vLLM.
321
+ backend: str
322
+ Determines where and how to run the vLLM process.
323
+ openai_api_server: bool
324
+ Whether to use OpenAI-compatible API server mode (subprocess) instead of native engine.
325
+ Default is False (uses native engine).
326
+ Set to True for backward compatibility with existing code.
327
+ debug: bool
328
+ Whether to turn on verbose debugging logs.
329
+ card_refresh_interval: int
330
+ Interval in seconds for refreshing the vLLM status card.
331
+ Only used when openai_api_server=True.
332
+ max_retries: int
333
+ Maximum number of retries checking for vLLM server startup.
334
+ Only used when openai_api_server=True.
335
+ retry_alert_frequency: int
336
+ Frequency of alert logs for vLLM server startup retries.
337
+ Only used when openai_api_server=True.
338
+ engine_args : dict
339
+ Additional keyword arguments to pass to the vLLM engine.
340
+ For example, `tensor_parallel_size=2`.
416
341
  """
417
342
  ...
418
343
 
419
- @typing.overload
420
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
344
+ def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy: str, force_cache_update: bool, debug: bool, circuit_breaker_config: dict, timeout_config: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
421
345
  """
422
- Specifies the number of times the task corresponding
423
- to a step needs to be retried.
346
+ This decorator is used to run Ollama APIs as Metaflow task sidecars.
424
347
 
425
- This decorator is useful for handling transient errors, such as networking issues.
426
- If your task contains operations that can't be retried safely, e.g. database updates,
427
- it is advisable to annotate it with `@retry(times=0)`.
348
+ User code call
349
+ --------------
350
+ @ollama(
351
+ models=[...],
352
+ ...
353
+ )
428
354
 
429
- This can be used in conjunction with the `@catch` decorator. The `@catch`
430
- decorator will execute a no-op task after all retries have been exhausted,
431
- ensuring that the flow execution can continue.
355
+ Valid backend options
356
+ ---------------------
357
+ - 'local': Run as a separate process on the local task machine.
358
+ - (TODO) 'managed': Outerbounds hosts and selects compute provider.
359
+ - (TODO) 'remote': Spin up separate instance to serve Ollama models.
360
+
361
+ Valid model options
362
+ -------------------
363
+ Any model here https://ollama.com/search, e.g. 'llama3.2', 'llama3.3'
432
364
 
433
365
 
434
366
  Parameters
435
367
  ----------
436
- times : int, default 3
437
- Number of times to retry this task.
438
- minutes_between_retries : int, default 2
439
- Number of minutes between retries.
368
+ models: list[str]
369
+ List of Ollama containers running models in sidecars.
370
+ backend: str
371
+ Determines where and how to run the Ollama process.
372
+ force_pull: bool
373
+ Whether to run `ollama pull` no matter what, or first check the remote cache in Metaflow datastore for this model key.
374
+ cache_update_policy: str
375
+ Cache update policy: "auto", "force", or "never".
376
+ force_cache_update: bool
377
+ Simple override for "force" cache update policy.
378
+ debug: bool
379
+ Whether to turn on verbose debugging logs.
380
+ circuit_breaker_config: dict
381
+ Configuration for circuit breaker protection. Keys: failure_threshold, recovery_timeout, reset_timeout.
382
+ timeout_config: dict
383
+ Configuration for various operation timeouts. Keys: pull, stop, health_check, install, server_startup.
440
384
  """
441
385
  ...
442
386
 
443
387
  @typing.overload
444
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
388
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
389
+ """
390
+ Specifies the PyPI packages for the step.
391
+
392
+ Information in this decorator will augment any
393
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
394
+ you can use `@pypi_base` to set packages required by all
395
+ steps and use `@pypi` to specify step-specific overrides.
396
+
397
+
398
+ Parameters
399
+ ----------
400
+ packages : Dict[str, str], default: {}
401
+ Packages to use for this step. The key is the name of the package
402
+ and the value is the version to use.
403
+ python : str, optional, default: None
404
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
405
+ that the version used will correspond to the version of the Python interpreter used to start the run.
406
+ """
445
407
  ...
446
408
 
447
409
  @typing.overload
448
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
410
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
449
411
  ...
450
412
 
451
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
413
+ @typing.overload
414
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
415
+ ...
416
+
417
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
452
418
  """
453
- Specifies the number of times the task corresponding
454
- to a step needs to be retried.
419
+ Specifies the PyPI packages for the step.
455
420
 
456
- This decorator is useful for handling transient errors, such as networking issues.
457
- If your task contains operations that can't be retried safely, e.g. database updates,
458
- it is advisable to annotate it with `@retry(times=0)`.
421
+ Information in this decorator will augment any
422
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
423
+ you can use `@pypi_base` to set packages required by all
424
+ steps and use `@pypi` to specify step-specific overrides.
459
425
 
460
- This can be used in conjunction with the `@catch` decorator. The `@catch`
461
- decorator will execute a no-op task after all retries have been exhausted,
462
- ensuring that the flow execution can continue.
426
+
427
+ Parameters
428
+ ----------
429
+ packages : Dict[str, str], default: {}
430
+ Packages to use for this step. The key is the name of the package
431
+ and the value is the version to use.
432
+ python : str, optional, default: None
433
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
434
+ that the version used will correspond to the version of the Python interpreter used to start the run.
435
+ """
436
+ ...
437
+
438
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[typing.Dict[str, str]] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
439
+ """
440
+ Specifies that this step should execute on Kubernetes.
463
441
 
464
442
 
465
443
  Parameters
466
444
  ----------
467
- times : int, default 3
468
- Number of times to retry this task.
469
- minutes_between_retries : int, default 2
470
- Number of minutes between retries.
445
+ cpu : int, default 1
446
+ Number of CPUs required for this step. If `@resources` is
447
+ also present, the maximum value from all decorators is used.
448
+ memory : int, default 4096
449
+ Memory size (in MB) required for this step. If
450
+ `@resources` is also present, the maximum value from all decorators is
451
+ used.
452
+ disk : int, default 10240
453
+ Disk size (in MB) required for this step. If
454
+ `@resources` is also present, the maximum value from all decorators is
455
+ used.
456
+ image : str, optional, default None
457
+ Docker image to use when launching on Kubernetes. If not specified, and
458
+ METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
459
+ not, a default Docker image mapping to the current version of Python is used.
460
+ image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
461
+ If given, the imagePullPolicy to be applied to the Docker image of the step.
462
+ image_pull_secrets: List[str], default []
463
+ The default is extracted from METAFLOW_KUBERNETES_IMAGE_PULL_SECRETS.
464
+ Kubernetes image pull secrets to use when pulling container images
465
+ in Kubernetes.
466
+ service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
467
+ Kubernetes service account to use when launching pod in Kubernetes.
468
+ secrets : List[str], optional, default None
469
+ Kubernetes secrets to use when launching pod in Kubernetes. These
470
+ secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
471
+ in Metaflow configuration.
472
+ node_selector: Union[Dict[str,str], str], optional, default None
473
+ Kubernetes node selector(s) to apply to the pod running the task.
474
+ Can be passed in as a comma separated string of values e.g.
475
+ 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
476
+ {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
477
+ namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
478
+ Kubernetes namespace to use when launching pod in Kubernetes.
479
+ gpu : int, optional, default None
480
+ Number of GPUs required for this step. A value of zero implies that
481
+ the scheduled node should not have GPUs.
482
+ gpu_vendor : str, default KUBERNETES_GPU_VENDOR
483
+ The vendor of the GPUs to be used for this step.
484
+ tolerations : List[Dict[str,str]], default []
485
+ The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
486
+ Kubernetes tolerations to use when launching pod in Kubernetes.
487
+ labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
488
+ Kubernetes labels to use when launching pod in Kubernetes.
489
+ annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
490
+ Kubernetes annotations to use when launching pod in Kubernetes.
491
+ use_tmpfs : bool, default False
492
+ This enables an explicit tmpfs mount for this step.
493
+ tmpfs_tempdir : bool, default True
494
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
495
+ tmpfs_size : int, optional, default: None
496
+ The value for the size (in MiB) of the tmpfs mount for this step.
497
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
498
+ memory allocated for this step.
499
+ tmpfs_path : str, optional, default /metaflow_temp
500
+ Path to tmpfs mount for this step.
501
+ persistent_volume_claims : Dict[str, str], optional, default None
502
+ A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
503
+ volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
504
+ shared_memory: int, optional
505
+ Shared memory size (in MiB) required for this step
506
+ port: int, optional
507
+ Port number to specify in the Kubernetes job object
508
+ compute_pool : str, optional, default None
509
+ Compute pool to be used for for this step.
510
+ If not specified, any accessible compute pool within the perimeter is used.
511
+ hostname_resolution_timeout: int, default 10 * 60
512
+ Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
513
+ Only applicable when @parallel is used.
514
+ qos: str, default: Burstable
515
+ Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
516
+
517
+ security_context: Dict[str, Any], optional, default None
518
+ Container security context. Applies to the task container. Allows the following keys:
519
+ - privileged: bool, optional, default None
520
+ - allow_privilege_escalation: bool, optional, default None
521
+ - run_as_user: int, optional, default None
522
+ - run_as_group: int, optional, default None
523
+ - run_as_non_root: bool, optional, default None
524
+ """
525
+ ...
526
+
527
+ @typing.overload
528
+ def app_deploy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
529
+ """
530
+ Decorator prototype for all step decorators. This function gets specialized
531
+ and imported for all decorators types by _import_plugin_decorators().
532
+ """
533
+ ...
534
+
535
+ @typing.overload
536
+ def app_deploy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
537
+ ...
538
+
539
+ def app_deploy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
540
+ """
541
+ Decorator prototype for all step decorators. This function gets specialized
542
+ and imported for all decorators types by _import_plugin_decorators().
471
543
  """
472
544
  ...
473
545
 
@@ -618,21 +690,198 @@ def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None
618
690
  """
619
691
  ...
620
692
 
621
- def nebius_s3_proxy(*, integration_name: typing.Optional[str] = None, write_mode: typing.Optional[str] = None, debug: typing.Optional[bool] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
693
+ def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
622
694
  """
623
- `@nebius_s3_proxy` is a Nebius-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
624
- It exists to make it easier for users to know that this decorator should only be used with
625
- a Neo Cloud like Nebius. The underlying mechanics of the decorator is the same as the `@s3_proxy`:
626
-
695
+ Specifies that this step should execute on DGX cloud.
627
696
 
628
- Set up an S3 proxy that caches objects in an external, S3‑compatible bucket
629
- for S3 read and write requests.
630
697
 
631
- This decorator requires an integration in the Outerbounds platform that
632
- points to an external bucket. It affects S3 operations performed via
633
- Metaflow's `get_aws_client` and `S3` within a `@step`.
698
+ Parameters
699
+ ----------
700
+ gpu : int
701
+ Number of GPUs to use.
702
+ gpu_type : str
703
+ Type of Nvidia GPU to use.
704
+ """
705
+ ...
706
+
707
+ @typing.overload
708
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
709
+ """
710
+ Specifies that the step will success under all circumstances.
634
711
 
635
- Read operations
712
+ The decorator will create an optional artifact, specified by `var`, which
713
+ contains the exception raised. You can use it to detect the presence
714
+ of errors, indicating that all happy-path artifacts produced by the step
715
+ are missing.
716
+
717
+
718
+ Parameters
719
+ ----------
720
+ var : str, optional, default None
721
+ Name of the artifact in which to store the caught exception.
722
+ If not specified, the exception is not stored.
723
+ print_exception : bool, default True
724
+ Determines whether or not the exception is printed to
725
+ stdout when caught.
726
+ """
727
+ ...
728
+
729
+ @typing.overload
730
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
731
+ ...
732
+
733
+ @typing.overload
734
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
735
+ ...
736
+
737
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
738
+ """
739
+ Specifies that the step will success under all circumstances.
740
+
741
+ The decorator will create an optional artifact, specified by `var`, which
742
+ contains the exception raised. You can use it to detect the presence
743
+ of errors, indicating that all happy-path artifacts produced by the step
744
+ are missing.
745
+
746
+
747
+ Parameters
748
+ ----------
749
+ var : str, optional, default None
750
+ Name of the artifact in which to store the caught exception.
751
+ If not specified, the exception is not stored.
752
+ print_exception : bool, default True
753
+ Determines whether or not the exception is printed to
754
+ stdout when caught.
755
+ """
756
+ ...
757
+
758
+ @typing.overload
759
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
760
+ """
761
+ Specifies the Conda environment for the step.
762
+
763
+ Information in this decorator will augment any
764
+ attributes set in the `@conda_base` flow-level decorator. Hence,
765
+ you can use `@conda_base` to set packages required by all
766
+ steps and use `@conda` to specify step-specific overrides.
767
+
768
+
769
+ Parameters
770
+ ----------
771
+ packages : Dict[str, str], default {}
772
+ Packages to use for this step. The key is the name of the package
773
+ and the value is the version to use.
774
+ libraries : Dict[str, str], default {}
775
+ Supported for backward compatibility. When used with packages, packages will take precedence.
776
+ python : str, optional, default None
777
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
778
+ that the version used will correspond to the version of the Python interpreter used to start the run.
779
+ disabled : bool, default False
780
+ If set to True, disables @conda.
781
+ """
782
+ ...
783
+
784
+ @typing.overload
785
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
786
+ ...
787
+
788
+ @typing.overload
789
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
790
+ ...
791
+
792
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
793
+ """
794
+ Specifies the Conda environment for the step.
795
+
796
+ Information in this decorator will augment any
797
+ attributes set in the `@conda_base` flow-level decorator. Hence,
798
+ you can use `@conda_base` to set packages required by all
799
+ steps and use `@conda` to specify step-specific overrides.
800
+
801
+
802
+ Parameters
803
+ ----------
804
+ packages : Dict[str, str], default {}
805
+ Packages to use for this step. The key is the name of the package
806
+ and the value is the version to use.
807
+ libraries : Dict[str, str], default {}
808
+ Supported for backward compatibility. When used with packages, packages will take precedence.
809
+ python : str, optional, default None
810
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
811
+ that the version used will correspond to the version of the Python interpreter used to start the run.
812
+ disabled : bool, default False
813
+ If set to True, disables @conda.
814
+ """
815
+ ...
816
+
817
+ @typing.overload
818
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
819
+ """
820
+ Specifies secrets to be retrieved and injected as environment variables prior to
821
+ the execution of a step.
822
+
823
+
824
+ Parameters
825
+ ----------
826
+ sources : List[Union[str, Dict[str, Any]]], default: []
827
+ List of secret specs, defining how the secrets are to be retrieved
828
+ role : str, optional, default: None
829
+ Role to use for fetching secrets
830
+ """
831
+ ...
832
+
833
+ @typing.overload
834
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
835
+ ...
836
+
837
+ @typing.overload
838
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
839
+ ...
840
+
841
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
842
+ """
843
+ Specifies secrets to be retrieved and injected as environment variables prior to
844
+ the execution of a step.
845
+
846
+
847
+ Parameters
848
+ ----------
849
+ sources : List[Union[str, Dict[str, Any]]], default: []
850
+ List of secret specs, defining how the secrets are to be retrieved
851
+ role : str, optional, default: None
852
+ Role to use for fetching secrets
853
+ """
854
+ ...
855
+
856
+ @typing.overload
857
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
858
+ """
859
+ Decorator prototype for all step decorators. This function gets specialized
860
+ and imported for all decorators types by _import_plugin_decorators().
861
+ """
862
+ ...
863
+
864
+ @typing.overload
865
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
866
+ ...
867
+
868
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
869
+ """
870
+ Decorator prototype for all step decorators. This function gets specialized
871
+ and imported for all decorators types by _import_plugin_decorators().
872
+ """
873
+ ...
874
+
875
+ def s3_proxy(*, integration_name: typing.Optional[str] = None, write_mode: typing.Optional[str] = None, debug: typing.Optional[bool] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
876
+ """
877
+ Set up an S3 proxy that caches objects in an external, S3‑compatible bucket
878
+ for S3 read and write requests.
879
+
880
+ This decorator requires an integration in the Outerbounds platform that
881
+ points to an external bucket. It affects S3 operations performed via
882
+ Metaflow's `get_aws_client` and `S3` within a `@step`.
883
+
884
+ Read operations
636
885
  ---------------
637
886
  All read operations pass through the proxy. If an object does not already
638
887
  exist in the external bucket, it is cached there. For example, if code reads
@@ -682,97 +931,254 @@ def nebius_s3_proxy(*, integration_name: typing.Optional[str] = None, write_mode
682
931
  """
683
932
  ...
684
933
 
685
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[typing.Dict[str, str]] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
934
+ @typing.overload
935
+ def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
686
936
  """
687
- Specifies that this step should execute on Kubernetes.
937
+ Specifies the resources needed when executing this step.
938
+
939
+ Use `@resources` to specify the resource requirements
940
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
941
+
942
+ You can choose the compute layer on the command line by executing e.g.
943
+ ```
944
+ python myflow.py run --with batch
945
+ ```
946
+ or
947
+ ```
948
+ python myflow.py run --with kubernetes
949
+ ```
950
+ which executes the flow on the desired system using the
951
+ requirements specified in `@resources`.
688
952
 
689
953
 
690
954
  Parameters
691
955
  ----------
692
956
  cpu : int, default 1
693
- Number of CPUs required for this step. If `@resources` is
694
- also present, the maximum value from all decorators is used.
695
- memory : int, default 4096
696
- Memory size (in MB) required for this step. If
697
- `@resources` is also present, the maximum value from all decorators is
698
- used.
699
- disk : int, default 10240
700
- Disk size (in MB) required for this step. If
701
- `@resources` is also present, the maximum value from all decorators is
702
- used.
703
- image : str, optional, default None
704
- Docker image to use when launching on Kubernetes. If not specified, and
705
- METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
706
- not, a default Docker image mapping to the current version of Python is used.
707
- image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
708
- If given, the imagePullPolicy to be applied to the Docker image of the step.
709
- image_pull_secrets: List[str], default []
710
- The default is extracted from METAFLOW_KUBERNETES_IMAGE_PULL_SECRETS.
711
- Kubernetes image pull secrets to use when pulling container images
712
- in Kubernetes.
713
- service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
714
- Kubernetes service account to use when launching pod in Kubernetes.
715
- secrets : List[str], optional, default None
716
- Kubernetes secrets to use when launching pod in Kubernetes. These
717
- secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
718
- in Metaflow configuration.
719
- node_selector: Union[Dict[str,str], str], optional, default None
720
- Kubernetes node selector(s) to apply to the pod running the task.
721
- Can be passed in as a comma separated string of values e.g.
722
- 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
723
- {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
724
- namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
725
- Kubernetes namespace to use when launching pod in Kubernetes.
957
+ Number of CPUs required for this step.
726
958
  gpu : int, optional, default None
727
- Number of GPUs required for this step. A value of zero implies that
728
- the scheduled node should not have GPUs.
729
- gpu_vendor : str, default KUBERNETES_GPU_VENDOR
730
- The vendor of the GPUs to be used for this step.
731
- tolerations : List[Dict[str,str]], default []
732
- The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
733
- Kubernetes tolerations to use when launching pod in Kubernetes.
734
- labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
735
- Kubernetes labels to use when launching pod in Kubernetes.
736
- annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
737
- Kubernetes annotations to use when launching pod in Kubernetes.
738
- use_tmpfs : bool, default False
739
- This enables an explicit tmpfs mount for this step.
740
- tmpfs_tempdir : bool, default True
741
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
742
- tmpfs_size : int, optional, default: None
743
- The value for the size (in MiB) of the tmpfs mount for this step.
744
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
745
- memory allocated for this step.
746
- tmpfs_path : str, optional, default /metaflow_temp
747
- Path to tmpfs mount for this step.
748
- persistent_volume_claims : Dict[str, str], optional, default None
749
- A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
750
- volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
751
- shared_memory: int, optional
752
- Shared memory size (in MiB) required for this step
753
- port: int, optional
754
- Port number to specify in the Kubernetes job object
755
- compute_pool : str, optional, default None
756
- Compute pool to be used for for this step.
757
- If not specified, any accessible compute pool within the perimeter is used.
758
- hostname_resolution_timeout: int, default 10 * 60
759
- Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
760
- Only applicable when @parallel is used.
761
- qos: str, default: Burstable
762
- Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
959
+ Number of GPUs required for this step.
960
+ disk : int, optional, default None
961
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
962
+ memory : int, default 4096
963
+ Memory size (in MB) required for this step.
964
+ shared_memory : int, optional, default None
965
+ The value for the size (in MiB) of the /dev/shm volume for this step.
966
+ This parameter maps to the `--shm-size` option in Docker.
967
+ """
968
+ ...
969
+
970
+ @typing.overload
971
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
972
+ ...
973
+
974
+ @typing.overload
975
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
976
+ ...
977
+
978
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
979
+ """
980
+ Specifies the resources needed when executing this step.
763
981
 
764
- security_context: Dict[str, Any], optional, default None
765
- Container security context. Applies to the task container. Allows the following keys:
766
- - privileged: bool, optional, default None
767
- - allow_privilege_escalation: bool, optional, default None
768
- - run_as_user: int, optional, default None
769
- - run_as_group: int, optional, default None
770
- - run_as_non_root: bool, optional, default None
982
+ Use `@resources` to specify the resource requirements
983
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
984
+
985
+ You can choose the compute layer on the command line by executing e.g.
986
+ ```
987
+ python myflow.py run --with batch
988
+ ```
989
+ or
990
+ ```
991
+ python myflow.py run --with kubernetes
992
+ ```
993
+ which executes the flow on the desired system using the
994
+ requirements specified in `@resources`.
995
+
996
+
997
+ Parameters
998
+ ----------
999
+ cpu : int, default 1
1000
+ Number of CPUs required for this step.
1001
+ gpu : int, optional, default None
1002
+ Number of GPUs required for this step.
1003
+ disk : int, optional, default None
1004
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
1005
+ memory : int, default 4096
1006
+ Memory size (in MB) required for this step.
1007
+ shared_memory : int, optional, default None
1008
+ The value for the size (in MiB) of the /dev/shm volume for this step.
1009
+ This parameter maps to the `--shm-size` option in Docker.
771
1010
  """
772
1011
  ...
773
1012
 
774
- def s3_proxy(*, integration_name: typing.Optional[str] = None, write_mode: typing.Optional[str] = None, debug: typing.Optional[bool] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1013
+ @typing.overload
1014
+ def model(*, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1015
+ """
1016
+ Enables loading / saving of models within a step.
1017
+
1018
+ > Examples
1019
+ - Saving Models
1020
+ ```python
1021
+ @model
1022
+ @step
1023
+ def train(self):
1024
+ # current.model.save returns a dictionary reference to the model saved
1025
+ self.my_model = current.model.save(
1026
+ path_to_my_model,
1027
+ label="my_model",
1028
+ metadata={
1029
+ "epochs": 10,
1030
+ "batch-size": 32,
1031
+ "learning-rate": 0.001,
1032
+ }
1033
+ )
1034
+ self.next(self.test)
1035
+
1036
+ @model(load="my_model")
1037
+ @step
1038
+ def test(self):
1039
+ # `current.model.loaded` returns a dictionary of the loaded models
1040
+ # where the key is the name of the artifact and the value is the path to the model
1041
+ print(os.listdir(current.model.loaded["my_model"]))
1042
+ self.next(self.end)
1043
+ ```
1044
+
1045
+ - Loading models
1046
+ ```python
1047
+ @step
1048
+ def train(self):
1049
+ # current.model.load returns the path to the model loaded
1050
+ checkpoint_path = current.model.load(
1051
+ self.checkpoint_key,
1052
+ )
1053
+ model_path = current.model.load(
1054
+ self.model,
1055
+ )
1056
+ self.next(self.test)
1057
+ ```
1058
+
1059
+
1060
+ Parameters
1061
+ ----------
1062
+ load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
1063
+ Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
1064
+ These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
1065
+ If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
1066
+ the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
1067
+ If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
1068
+
1069
+ temp_dir_root : str, default: None
1070
+ The root directory under which `current.model.loaded` will store loaded models
1071
+ """
1072
+ ...
1073
+
1074
+ @typing.overload
1075
+ def model(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1076
+ ...
1077
+
1078
+ @typing.overload
1079
+ def model(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1080
+ ...
1081
+
1082
+ def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None):
1083
+ """
1084
+ Enables loading / saving of models within a step.
1085
+
1086
+ > Examples
1087
+ - Saving Models
1088
+ ```python
1089
+ @model
1090
+ @step
1091
+ def train(self):
1092
+ # current.model.save returns a dictionary reference to the model saved
1093
+ self.my_model = current.model.save(
1094
+ path_to_my_model,
1095
+ label="my_model",
1096
+ metadata={
1097
+ "epochs": 10,
1098
+ "batch-size": 32,
1099
+ "learning-rate": 0.001,
1100
+ }
1101
+ )
1102
+ self.next(self.test)
1103
+
1104
+ @model(load="my_model")
1105
+ @step
1106
+ def test(self):
1107
+ # `current.model.loaded` returns a dictionary of the loaded models
1108
+ # where the key is the name of the artifact and the value is the path to the model
1109
+ print(os.listdir(current.model.loaded["my_model"]))
1110
+ self.next(self.end)
1111
+ ```
1112
+
1113
+ - Loading models
1114
+ ```python
1115
+ @step
1116
+ def train(self):
1117
+ # current.model.load returns the path to the model loaded
1118
+ checkpoint_path = current.model.load(
1119
+ self.checkpoint_key,
1120
+ )
1121
+ model_path = current.model.load(
1122
+ self.model,
1123
+ )
1124
+ self.next(self.test)
1125
+ ```
1126
+
1127
+
1128
+ Parameters
1129
+ ----------
1130
+ load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
1131
+ Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
1132
+ These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
1133
+ If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
1134
+ the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
1135
+ If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
1136
+
1137
+ temp_dir_root : str, default: None
1138
+ The root directory under which `current.model.loaded` will store loaded models
1139
+ """
1140
+ ...
1141
+
1142
+ @typing.overload
1143
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1144
+ """
1145
+ Specifies environment variables to be set prior to the execution of a step.
1146
+
1147
+
1148
+ Parameters
1149
+ ----------
1150
+ vars : Dict[str, str], default {}
1151
+ Dictionary of environment variables to set.
1152
+ """
1153
+ ...
1154
+
1155
+ @typing.overload
1156
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1157
+ ...
1158
+
1159
+ @typing.overload
1160
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1161
+ ...
1162
+
1163
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
1164
+ """
1165
+ Specifies environment variables to be set prior to the execution of a step.
1166
+
1167
+
1168
+ Parameters
1169
+ ----------
1170
+ vars : Dict[str, str], default {}
1171
+ Dictionary of environment variables to set.
1172
+ """
1173
+ ...
1174
+
1175
+ def coreweave_s3_proxy(*, integration_name: typing.Optional[str] = None, write_mode: typing.Optional[str] = None, debug: typing.Optional[bool] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
775
1176
  """
1177
+ `@coreweave_s3_proxy` is a CoreWeave-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
1178
+ It exists to make it easier for users to know that this decorator should only be used with
1179
+ a Neo Cloud like CoreWeave. The underlying mechanics of the decorator is the same as the `@s3_proxy`:
1180
+
1181
+
776
1182
  Set up an S3 proxy that caches objects in an external, S3‑compatible bucket
777
1183
  for S3 read and write requests.
778
1184
 
@@ -830,27 +1236,86 @@ def s3_proxy(*, integration_name: typing.Optional[str] = None, write_mode: typin
830
1236
  """
831
1237
  ...
832
1238
 
833
- def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, cache_scope: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1239
+ @typing.overload
1240
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
834
1241
  """
835
- Decorator that helps cache, version, and store models/datasets from the Hugging Face Hub.
1242
+ Specifies a timeout for your step.
836
1243
 
837
- Examples
838
- --------
1244
+ This decorator is useful if this step may hang indefinitely.
839
1245
 
840
- ```python
841
- # **Usage: creating references to models from the Hugging Face Hub that may be loaded in downstream steps**
842
- @huggingface_hub
843
- @step
844
- def pull_model_from_huggingface(self):
845
- # `current.huggingface_hub.snapshot_download` downloads the model from the Hugging Face Hub
846
- # and saves it in the backend storage based on the model's `repo_id`. If there exists a model
847
- # with the same `repo_id` in the backend storage, it will not download the model again. The return
848
- # value of the function is a reference to the model in the backend storage.
849
- # This reference can be used to load the model in the subsequent steps via `@model(load=["llama_model"])`
1246
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1247
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
1248
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
850
1249
 
851
- self.model_id = "mistralai/Mistral-7B-Instruct-v0.1"
852
- self.llama_model = current.huggingface_hub.snapshot_download(
853
- repo_id=self.model_id,
1250
+ Note that all the values specified in parameters are added together so if you specify
1251
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1252
+
1253
+
1254
+ Parameters
1255
+ ----------
1256
+ seconds : int, default 0
1257
+ Number of seconds to wait prior to timing out.
1258
+ minutes : int, default 0
1259
+ Number of minutes to wait prior to timing out.
1260
+ hours : int, default 0
1261
+ Number of hours to wait prior to timing out.
1262
+ """
1263
+ ...
1264
+
1265
+ @typing.overload
1266
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1267
+ ...
1268
+
1269
+ @typing.overload
1270
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1271
+ ...
1272
+
1273
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
1274
+ """
1275
+ Specifies a timeout for your step.
1276
+
1277
+ This decorator is useful if this step may hang indefinitely.
1278
+
1279
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1280
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
1281
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
1282
+
1283
+ Note that all the values specified in parameters are added together so if you specify
1284
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1285
+
1286
+
1287
+ Parameters
1288
+ ----------
1289
+ seconds : int, default 0
1290
+ Number of seconds to wait prior to timing out.
1291
+ minutes : int, default 0
1292
+ Number of minutes to wait prior to timing out.
1293
+ hours : int, default 0
1294
+ Number of hours to wait prior to timing out.
1295
+ """
1296
+ ...
1297
+
1298
+ def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, cache_scope: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1299
+ """
1300
+ Decorator that helps cache, version, and store models/datasets from the Hugging Face Hub.
1301
+
1302
+ Examples
1303
+ --------
1304
+
1305
+ ```python
1306
+ # **Usage: creating references to models from the Hugging Face Hub that may be loaded in downstream steps**
1307
+ @huggingface_hub
1308
+ @step
1309
+ def pull_model_from_huggingface(self):
1310
+ # `current.huggingface_hub.snapshot_download` downloads the model from the Hugging Face Hub
1311
+ # and saves it in the backend storage based on the model's `repo_id`. If there exists a model
1312
+ # with the same `repo_id` in the backend storage, it will not download the model again. The return
1313
+ # value of the function is a reference to the model in the backend storage.
1314
+ # This reference can be used to load the model in the subsequent steps via `@model(load=["llama_model"])`
1315
+
1316
+ self.model_id = "mistralai/Mistral-7B-Instruct-v0.1"
1317
+ self.llama_model = current.huggingface_hub.snapshot_download(
1318
+ repo_id=self.model_id,
854
1319
  allow_patterns=["*.safetensors", "*.json", "tokenizer.*"],
855
1320
  )
856
1321
  self.next(self.train)
@@ -947,6 +1412,22 @@ def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, cache_scope:
947
1412
  """
948
1413
  ...
949
1414
 
1415
+ def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1416
+ """
1417
+ Specifies that this step should execute on DGX cloud.
1418
+
1419
+
1420
+ Parameters
1421
+ ----------
1422
+ gpu : int
1423
+ Number of GPUs to use.
1424
+ gpu_type : str
1425
+ Type of Nvidia GPU to use.
1426
+ queue_timeout : int
1427
+ Time to keep the job in NVCF's queue.
1428
+ """
1429
+ ...
1430
+
950
1431
  @typing.overload
951
1432
  def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
952
1433
  """
@@ -964,11 +1445,11 @@ def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepF
964
1445
  """
965
1446
  ...
966
1447
 
967
- def coreweave_s3_proxy(*, integration_name: typing.Optional[str] = None, write_mode: typing.Optional[str] = None, debug: typing.Optional[bool] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1448
+ def nebius_s3_proxy(*, integration_name: typing.Optional[str] = None, write_mode: typing.Optional[str] = None, debug: typing.Optional[bool] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
968
1449
  """
969
- `@coreweave_s3_proxy` is a CoreWeave-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
1450
+ `@nebius_s3_proxy` is a Nebius-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
970
1451
  It exists to make it easier for users to know that this decorator should only be used with
971
- a Neo Cloud like CoreWeave. The underlying mechanics of the decorator is the same as the `@s3_proxy`:
1452
+ a Neo Cloud like Nebius. The underlying mechanics of the decorator is the same as the `@s3_proxy`:
972
1453
 
973
1454
 
974
1455
  Set up an S3 proxy that caches objects in an external, S3‑compatible bucket
@@ -1023,853 +1504,135 @@ def coreweave_s3_proxy(*, integration_name: typing.Optional[str] = None, write_m
1023
1504
  Controls whether writes also go to the external bucket.
1024
1505
  - `origin` (default)
1025
1506
  - `origin-and-cache`
1026
- debug : bool, optional
1027
- Enables debug logging for proxy operations.
1028
- """
1029
- ...
1030
-
1031
- @typing.overload
1032
- def app_deploy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1033
- """
1034
- Decorator prototype for all step decorators. This function gets specialized
1035
- and imported for all decorators types by _import_plugin_decorators().
1036
- """
1037
- ...
1038
-
1039
- @typing.overload
1040
- def app_deploy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1041
- ...
1042
-
1043
- def app_deploy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1044
- """
1045
- Decorator prototype for all step decorators. This function gets specialized
1046
- and imported for all decorators types by _import_plugin_decorators().
1047
- """
1048
- ...
1049
-
1050
- @typing.overload
1051
- def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1052
- """
1053
- Specifies the resources needed when executing this step.
1054
-
1055
- Use `@resources` to specify the resource requirements
1056
- independently of the specific compute layer (`@batch`, `@kubernetes`).
1057
-
1058
- You can choose the compute layer on the command line by executing e.g.
1059
- ```
1060
- python myflow.py run --with batch
1061
- ```
1062
- or
1063
- ```
1064
- python myflow.py run --with kubernetes
1065
- ```
1066
- which executes the flow on the desired system using the
1067
- requirements specified in `@resources`.
1068
-
1069
-
1070
- Parameters
1071
- ----------
1072
- cpu : int, default 1
1073
- Number of CPUs required for this step.
1074
- gpu : int, optional, default None
1075
- Number of GPUs required for this step.
1076
- disk : int, optional, default None
1077
- Disk size (in MB) required for this step. Only applies on Kubernetes.
1078
- memory : int, default 4096
1079
- Memory size (in MB) required for this step.
1080
- shared_memory : int, optional, default None
1081
- The value for the size (in MiB) of the /dev/shm volume for this step.
1082
- This parameter maps to the `--shm-size` option in Docker.
1083
- """
1084
- ...
1085
-
1086
- @typing.overload
1087
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1088
- ...
1089
-
1090
- @typing.overload
1091
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1092
- ...
1093
-
1094
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
1095
- """
1096
- Specifies the resources needed when executing this step.
1097
-
1098
- Use `@resources` to specify the resource requirements
1099
- independently of the specific compute layer (`@batch`, `@kubernetes`).
1100
-
1101
- You can choose the compute layer on the command line by executing e.g.
1102
- ```
1103
- python myflow.py run --with batch
1104
- ```
1105
- or
1106
- ```
1107
- python myflow.py run --with kubernetes
1108
- ```
1109
- which executes the flow on the desired system using the
1110
- requirements specified in `@resources`.
1111
-
1112
-
1113
- Parameters
1114
- ----------
1115
- cpu : int, default 1
1116
- Number of CPUs required for this step.
1117
- gpu : int, optional, default None
1118
- Number of GPUs required for this step.
1119
- disk : int, optional, default None
1120
- Disk size (in MB) required for this step. Only applies on Kubernetes.
1121
- memory : int, default 4096
1122
- Memory size (in MB) required for this step.
1123
- shared_memory : int, optional, default None
1124
- The value for the size (in MiB) of the /dev/shm volume for this step.
1125
- This parameter maps to the `--shm-size` option in Docker.
1126
- """
1127
- ...
1128
-
1129
- @typing.overload
1130
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1131
- """
1132
- Specifies the PyPI packages for the step.
1133
-
1134
- Information in this decorator will augment any
1135
- attributes set in the `@pyi_base` flow-level decorator. Hence,
1136
- you can use `@pypi_base` to set packages required by all
1137
- steps and use `@pypi` to specify step-specific overrides.
1138
-
1139
-
1140
- Parameters
1141
- ----------
1142
- packages : Dict[str, str], default: {}
1143
- Packages to use for this step. The key is the name of the package
1144
- and the value is the version to use.
1145
- python : str, optional, default: None
1146
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1147
- that the version used will correspond to the version of the Python interpreter used to start the run.
1148
- """
1149
- ...
1150
-
1151
- @typing.overload
1152
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1153
- ...
1154
-
1155
- @typing.overload
1156
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1157
- ...
1158
-
1159
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1160
- """
1161
- Specifies the PyPI packages for the step.
1162
-
1163
- Information in this decorator will augment any
1164
- attributes set in the `@pyi_base` flow-level decorator. Hence,
1165
- you can use `@pypi_base` to set packages required by all
1166
- steps and use `@pypi` to specify step-specific overrides.
1167
-
1168
-
1169
- Parameters
1170
- ----------
1171
- packages : Dict[str, str], default: {}
1172
- Packages to use for this step. The key is the name of the package
1173
- and the value is the version to use.
1174
- python : str, optional, default: None
1175
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1176
- that the version used will correspond to the version of the Python interpreter used to start the run.
1177
- """
1178
- ...
1179
-
1180
- @typing.overload
1181
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1182
- """
1183
- Specifies a timeout for your step.
1184
-
1185
- This decorator is useful if this step may hang indefinitely.
1186
-
1187
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1188
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
1189
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
1190
-
1191
- Note that all the values specified in parameters are added together so if you specify
1192
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1193
-
1194
-
1195
- Parameters
1196
- ----------
1197
- seconds : int, default 0
1198
- Number of seconds to wait prior to timing out.
1199
- minutes : int, default 0
1200
- Number of minutes to wait prior to timing out.
1201
- hours : int, default 0
1202
- Number of hours to wait prior to timing out.
1203
- """
1204
- ...
1205
-
1206
- @typing.overload
1207
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1208
- ...
1209
-
1210
- @typing.overload
1211
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1212
- ...
1213
-
1214
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
1215
- """
1216
- Specifies a timeout for your step.
1217
-
1218
- This decorator is useful if this step may hang indefinitely.
1219
-
1220
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1221
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
1222
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
1223
-
1224
- Note that all the values specified in parameters are added together so if you specify
1225
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1226
-
1227
-
1228
- Parameters
1229
- ----------
1230
- seconds : int, default 0
1231
- Number of seconds to wait prior to timing out.
1232
- minutes : int, default 0
1233
- Number of minutes to wait prior to timing out.
1234
- hours : int, default 0
1235
- Number of hours to wait prior to timing out.
1236
- """
1237
- ...
1238
-
1239
- def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy: str, force_cache_update: bool, debug: bool, circuit_breaker_config: dict, timeout_config: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1240
- """
1241
- This decorator is used to run Ollama APIs as Metaflow task sidecars.
1242
-
1243
- User code call
1244
- --------------
1245
- @ollama(
1246
- models=[...],
1247
- ...
1248
- )
1249
-
1250
- Valid backend options
1251
- ---------------------
1252
- - 'local': Run as a separate process on the local task machine.
1253
- - (TODO) 'managed': Outerbounds hosts and selects compute provider.
1254
- - (TODO) 'remote': Spin up separate instance to serve Ollama models.
1255
-
1256
- Valid model options
1257
- -------------------
1258
- Any model here https://ollama.com/search, e.g. 'llama3.2', 'llama3.3'
1259
-
1260
-
1261
- Parameters
1262
- ----------
1263
- models: list[str]
1264
- List of Ollama containers running models in sidecars.
1265
- backend: str
1266
- Determines where and how to run the Ollama process.
1267
- force_pull: bool
1268
- Whether to run `ollama pull` no matter what, or first check the remote cache in Metaflow datastore for this model key.
1269
- cache_update_policy: str
1270
- Cache update policy: "auto", "force", or "never".
1271
- force_cache_update: bool
1272
- Simple override for "force" cache update policy.
1273
- debug: bool
1274
- Whether to turn on verbose debugging logs.
1275
- circuit_breaker_config: dict
1276
- Configuration for circuit breaker protection. Keys: failure_threshold, recovery_timeout, reset_timeout.
1277
- timeout_config: dict
1278
- Configuration for various operation timeouts. Keys: pull, stop, health_check, install, server_startup.
1279
- """
1280
- ...
1281
-
1282
- @typing.overload
1283
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1284
- """
1285
- Specifies secrets to be retrieved and injected as environment variables prior to
1286
- the execution of a step.
1287
-
1288
-
1289
- Parameters
1290
- ----------
1291
- sources : List[Union[str, Dict[str, Any]]], default: []
1292
- List of secret specs, defining how the secrets are to be retrieved
1293
- role : str, optional, default: None
1294
- Role to use for fetching secrets
1295
- """
1296
- ...
1297
-
1298
- @typing.overload
1299
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1300
- ...
1301
-
1302
- @typing.overload
1303
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1304
- ...
1305
-
1306
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
1307
- """
1308
- Specifies secrets to be retrieved and injected as environment variables prior to
1309
- the execution of a step.
1310
-
1311
-
1312
- Parameters
1313
- ----------
1314
- sources : List[Union[str, Dict[str, Any]]], default: []
1315
- List of secret specs, defining how the secrets are to be retrieved
1316
- role : str, optional, default: None
1317
- Role to use for fetching secrets
1318
- """
1319
- ...
1320
-
1321
- @typing.overload
1322
- def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1323
- """
1324
- Creates a human-readable report, a Metaflow Card, after this step completes.
1325
-
1326
- Note that you may add multiple `@card` decorators in a step with different parameters.
1327
-
1328
-
1329
- Parameters
1330
- ----------
1331
- type : str, default 'default'
1332
- Card type.
1333
- id : str, optional, default None
1334
- If multiple cards are present, use this id to identify this card.
1335
- options : Dict[str, Any], default {}
1336
- Options passed to the card. The contents depend on the card type.
1337
- timeout : int, default 45
1338
- Interrupt reporting if it takes more than this many seconds.
1339
- """
1340
- ...
1341
-
1342
- @typing.overload
1343
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1344
- ...
1345
-
1346
- @typing.overload
1347
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1348
- ...
1349
-
1350
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
1351
- """
1352
- Creates a human-readable report, a Metaflow Card, after this step completes.
1353
-
1354
- Note that you may add multiple `@card` decorators in a step with different parameters.
1355
-
1356
-
1357
- Parameters
1358
- ----------
1359
- type : str, default 'default'
1360
- Card type.
1361
- id : str, optional, default None
1362
- If multiple cards are present, use this id to identify this card.
1363
- options : Dict[str, Any], default {}
1364
- Options passed to the card. The contents depend on the card type.
1365
- timeout : int, default 45
1366
- Interrupt reporting if it takes more than this many seconds.
1367
- """
1368
- ...
1369
-
1370
- def vllm(*, model: str, backend: str, openai_api_server: bool, debug: bool, card_refresh_interval: int, max_retries: int, retry_alert_frequency: int, engine_args: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1371
- """
1372
- This decorator is used to run vllm APIs as Metaflow task sidecars.
1373
-
1374
- User code call
1375
- --------------
1376
- @vllm(
1377
- model="...",
1378
- ...
1379
- )
1380
-
1381
- Valid backend options
1382
- ---------------------
1383
- - 'local': Run as a separate process on the local task machine.
1384
-
1385
- Valid model options
1386
- -------------------
1387
- Any HuggingFace model identifier, e.g. 'meta-llama/Llama-3.2-1B'
1388
-
1389
- NOTE: vLLM's OpenAI-compatible server serves ONE model per server instance.
1390
- If you need multiple models, you must create multiple @vllm decorators.
1391
-
1392
-
1393
- Parameters
1394
- ----------
1395
- model: str
1396
- HuggingFace model identifier to be served by vLLM.
1397
- backend: str
1398
- Determines where and how to run the vLLM process.
1399
- openai_api_server: bool
1400
- Whether to use OpenAI-compatible API server mode (subprocess) instead of native engine.
1401
- Default is False (uses native engine).
1402
- Set to True for backward compatibility with existing code.
1403
- debug: bool
1404
- Whether to turn on verbose debugging logs.
1405
- card_refresh_interval: int
1406
- Interval in seconds for refreshing the vLLM status card.
1407
- Only used when openai_api_server=True.
1408
- max_retries: int
1409
- Maximum number of retries checking for vLLM server startup.
1410
- Only used when openai_api_server=True.
1411
- retry_alert_frequency: int
1412
- Frequency of alert logs for vLLM server startup retries.
1413
- Only used when openai_api_server=True.
1414
- engine_args : dict
1415
- Additional keyword arguments to pass to the vLLM engine.
1416
- For example, `tensor_parallel_size=2`.
1417
- """
1418
- ...
1419
-
1420
- @typing.overload
1421
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1422
- """
1423
- Specifies the Conda environment for the step.
1424
-
1425
- Information in this decorator will augment any
1426
- attributes set in the `@conda_base` flow-level decorator. Hence,
1427
- you can use `@conda_base` to set packages required by all
1428
- steps and use `@conda` to specify step-specific overrides.
1429
-
1430
-
1431
- Parameters
1432
- ----------
1433
- packages : Dict[str, str], default {}
1434
- Packages to use for this step. The key is the name of the package
1435
- and the value is the version to use.
1436
- libraries : Dict[str, str], default {}
1437
- Supported for backward compatibility. When used with packages, packages will take precedence.
1438
- python : str, optional, default None
1439
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1440
- that the version used will correspond to the version of the Python interpreter used to start the run.
1441
- disabled : bool, default False
1442
- If set to True, disables @conda.
1443
- """
1444
- ...
1445
-
1446
- @typing.overload
1447
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1448
- ...
1449
-
1450
- @typing.overload
1451
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1452
- ...
1453
-
1454
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1455
- """
1456
- Specifies the Conda environment for the step.
1457
-
1458
- Information in this decorator will augment any
1459
- attributes set in the `@conda_base` flow-level decorator. Hence,
1460
- you can use `@conda_base` to set packages required by all
1461
- steps and use `@conda` to specify step-specific overrides.
1462
-
1463
-
1464
- Parameters
1465
- ----------
1466
- packages : Dict[str, str], default {}
1467
- Packages to use for this step. The key is the name of the package
1468
- and the value is the version to use.
1469
- libraries : Dict[str, str], default {}
1470
- Supported for backward compatibility. When used with packages, packages will take precedence.
1471
- python : str, optional, default None
1472
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1473
- that the version used will correspond to the version of the Python interpreter used to start the run.
1474
- disabled : bool, default False
1475
- If set to True, disables @conda.
1476
- """
1477
- ...
1478
-
1479
- @typing.overload
1480
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1481
- """
1482
- Decorator prototype for all step decorators. This function gets specialized
1483
- and imported for all decorators types by _import_plugin_decorators().
1484
- """
1485
- ...
1486
-
1487
- @typing.overload
1488
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1489
- ...
1490
-
1491
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1492
- """
1493
- Decorator prototype for all step decorators. This function gets specialized
1494
- and imported for all decorators types by _import_plugin_decorators().
1495
- """
1496
- ...
1497
-
1498
- def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1499
- """
1500
- Specifies that this step should execute on DGX cloud.
1501
-
1502
-
1503
- Parameters
1504
- ----------
1505
- gpu : int
1506
- Number of GPUs to use.
1507
- gpu_type : str
1508
- Type of Nvidia GPU to use.
1509
- """
1510
- ...
1511
-
1512
- def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1513
- """
1514
- Specifies what flows belong to the same project.
1515
-
1516
- A project-specific namespace is created for all flows that
1517
- use the same `@project(name)`.
1518
-
1519
-
1520
- Parameters
1521
- ----------
1522
- name : str
1523
- Project name. Make sure that the name is unique amongst all
1524
- projects that use the same production scheduler. The name may
1525
- contain only lowercase alphanumeric characters and underscores.
1526
-
1527
- branch : Optional[str], default None
1528
- The branch to use. If not specified, the branch is set to
1529
- `user.<username>` unless `production` is set to `True`. This can
1530
- also be set on the command line using `--branch` as a top-level option.
1531
- It is an error to specify `branch` in the decorator and on the command line.
1532
-
1533
- production : bool, default False
1534
- Whether or not the branch is the production branch. This can also be set on the
1535
- command line using `--production` as a top-level option. It is an error to specify
1536
- `production` in the decorator and on the command line.
1537
- The project branch name will be:
1538
- - if `branch` is specified:
1539
- - if `production` is True: `prod.<branch>`
1540
- - if `production` is False: `test.<branch>`
1541
- - if `branch` is not specified:
1542
- - if `production` is True: `prod`
1543
- - if `production` is False: `user.<username>`
1544
- """
1545
- ...
1546
-
1547
- @typing.overload
1548
- def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1549
- """
1550
- Specifies the Conda environment for all steps of the flow.
1551
-
1552
- Use `@conda_base` to set common libraries required by all
1553
- steps and use `@conda` to specify step-specific additions.
1554
-
1555
-
1556
- Parameters
1557
- ----------
1558
- packages : Dict[str, str], default {}
1559
- Packages to use for this flow. The key is the name of the package
1560
- and the value is the version to use.
1561
- libraries : Dict[str, str], default {}
1562
- Supported for backward compatibility. When used with packages, packages will take precedence.
1563
- python : str, optional, default None
1564
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1565
- that the version used will correspond to the version of the Python interpreter used to start the run.
1566
- disabled : bool, default False
1567
- If set to True, disables Conda.
1568
- """
1569
- ...
1570
-
1571
- @typing.overload
1572
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1573
- ...
1574
-
1575
- def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1576
- """
1577
- Specifies the Conda environment for all steps of the flow.
1578
-
1579
- Use `@conda_base` to set common libraries required by all
1580
- steps and use `@conda` to specify step-specific additions.
1581
-
1582
-
1583
- Parameters
1584
- ----------
1585
- packages : Dict[str, str], default {}
1586
- Packages to use for this flow. The key is the name of the package
1587
- and the value is the version to use.
1588
- libraries : Dict[str, str], default {}
1589
- Supported for backward compatibility. When used with packages, packages will take precedence.
1590
- python : str, optional, default None
1591
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1592
- that the version used will correspond to the version of the Python interpreter used to start the run.
1593
- disabled : bool, default False
1594
- If set to True, disables Conda.
1595
- """
1596
- ...
1597
-
1598
- @typing.overload
1599
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1600
- """
1601
- Specifies the PyPI packages for all steps of the flow.
1602
-
1603
- Use `@pypi_base` to set common packages required by all
1604
- steps and use `@pypi` to specify step-specific overrides.
1605
-
1606
- Parameters
1607
- ----------
1608
- packages : Dict[str, str], default: {}
1609
- Packages to use for this flow. The key is the name of the package
1610
- and the value is the version to use.
1611
- python : str, optional, default: None
1612
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1613
- that the version used will correspond to the version of the Python interpreter used to start the run.
1614
- """
1615
- ...
1616
-
1617
- @typing.overload
1618
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1619
- ...
1620
-
1621
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1622
- """
1623
- Specifies the PyPI packages for all steps of the flow.
1624
-
1625
- Use `@pypi_base` to set common packages required by all
1626
- steps and use `@pypi` to specify step-specific overrides.
1627
-
1628
- Parameters
1629
- ----------
1630
- packages : Dict[str, str], default: {}
1631
- Packages to use for this flow. The key is the name of the package
1632
- and the value is the version to use.
1633
- python : str, optional, default: None
1634
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1635
- that the version used will correspond to the version of the Python interpreter used to start the run.
1636
- """
1637
- ...
1638
-
1639
- @typing.overload
1640
- def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1641
- """
1642
- Specifies the event(s) that this flow depends on.
1643
-
1644
- ```
1645
- @trigger(event='foo')
1646
- ```
1647
- or
1648
- ```
1649
- @trigger(events=['foo', 'bar'])
1650
- ```
1651
-
1652
- Additionally, you can specify the parameter mappings
1653
- to map event payload to Metaflow parameters for the flow.
1654
- ```
1655
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1656
- ```
1657
- or
1658
- ```
1659
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1660
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1661
- ```
1662
-
1663
- 'parameters' can also be a list of strings and tuples like so:
1664
- ```
1665
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1666
- ```
1667
- This is equivalent to:
1668
- ```
1669
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1670
- ```
1671
-
1672
-
1673
- Parameters
1674
- ----------
1675
- event : Union[str, Dict[str, Any]], optional, default None
1676
- Event dependency for this flow.
1677
- events : List[Union[str, Dict[str, Any]]], default []
1678
- Events dependency for this flow.
1679
- options : Dict[str, Any], default {}
1680
- Backend-specific configuration for tuning eventing behavior.
1681
- """
1682
- ...
1683
-
1684
- @typing.overload
1685
- def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1686
- ...
1687
-
1688
- def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1689
- """
1690
- Specifies the event(s) that this flow depends on.
1691
-
1692
- ```
1693
- @trigger(event='foo')
1694
- ```
1695
- or
1696
- ```
1697
- @trigger(events=['foo', 'bar'])
1698
- ```
1699
-
1700
- Additionally, you can specify the parameter mappings
1701
- to map event payload to Metaflow parameters for the flow.
1702
- ```
1703
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1704
- ```
1705
- or
1706
- ```
1707
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1708
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1709
- ```
1710
-
1711
- 'parameters' can also be a list of strings and tuples like so:
1712
- ```
1713
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1714
- ```
1715
- This is equivalent to:
1716
- ```
1717
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1718
- ```
1507
+ debug : bool, optional
1508
+ Enables debug logging for proxy operations.
1509
+ """
1510
+ ...
1511
+
1512
+ @typing.overload
1513
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1514
+ """
1515
+ Specifies the times when the flow should be run when running on a
1516
+ production scheduler.
1719
1517
 
1720
1518
 
1721
1519
  Parameters
1722
1520
  ----------
1723
- event : Union[str, Dict[str, Any]], optional, default None
1724
- Event dependency for this flow.
1725
- events : List[Union[str, Dict[str, Any]]], default []
1726
- Events dependency for this flow.
1727
- options : Dict[str, Any], default {}
1728
- Backend-specific configuration for tuning eventing behavior.
1521
+ hourly : bool, default False
1522
+ Run the workflow hourly.
1523
+ daily : bool, default True
1524
+ Run the workflow daily.
1525
+ weekly : bool, default False
1526
+ Run the workflow weekly.
1527
+ cron : str, optional, default None
1528
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1529
+ specified by this expression.
1530
+ timezone : str, optional, default None
1531
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1532
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1729
1533
  """
1730
1534
  ...
1731
1535
 
1732
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1536
+ @typing.overload
1537
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1538
+ ...
1539
+
1540
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1733
1541
  """
1734
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1735
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1542
+ Specifies the times when the flow should be run when running on a
1543
+ production scheduler.
1736
1544
 
1737
1545
 
1738
1546
  Parameters
1739
1547
  ----------
1740
- timeout : int
1741
- Time, in seconds before the task times out and fails. (Default: 3600)
1742
- poke_interval : int
1743
- Time in seconds that the job should wait in between each try. (Default: 60)
1744
- mode : str
1745
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1746
- exponential_backoff : bool
1747
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1748
- pool : str
1749
- the slot pool this task should run in,
1750
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1751
- soft_fail : bool
1752
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1753
- name : str
1754
- Name of the sensor on Airflow
1755
- description : str
1756
- Description of sensor in the Airflow UI
1757
- external_dag_id : str
1758
- The dag_id that contains the task you want to wait for.
1759
- external_task_ids : List[str]
1760
- The list of task_ids that you want to wait for.
1761
- If None (default value) the sensor waits for the DAG. (Default: None)
1762
- allowed_states : List[str]
1763
- Iterable of allowed states, (Default: ['success'])
1764
- failed_states : List[str]
1765
- Iterable of failed or dis-allowed states. (Default: None)
1766
- execution_delta : datetime.timedelta
1767
- time difference with the previous execution to look at,
1768
- the default is the same logical date as the current task or DAG. (Default: None)
1769
- check_existence: bool
1770
- Set to True to check if the external task exists or check if
1771
- the DAG to wait for exists. (Default: True)
1548
+ hourly : bool, default False
1549
+ Run the workflow hourly.
1550
+ daily : bool, default True
1551
+ Run the workflow daily.
1552
+ weekly : bool, default False
1553
+ Run the workflow weekly.
1554
+ cron : str, optional, default None
1555
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1556
+ specified by this expression.
1557
+ timezone : str, optional, default None
1558
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1559
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1772
1560
  """
1773
1561
  ...
1774
1562
 
1775
1563
  @typing.overload
1776
- def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1564
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1777
1565
  """
1778
- Specifies the flow(s) that this flow depends on.
1779
-
1780
- ```
1781
- @trigger_on_finish(flow='FooFlow')
1782
- ```
1783
- or
1784
- ```
1785
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1786
- ```
1787
- This decorator respects the @project decorator and triggers the flow
1788
- when upstream runs within the same namespace complete successfully
1789
-
1790
- Additionally, you can specify project aware upstream flow dependencies
1791
- by specifying the fully qualified project_flow_name.
1792
- ```
1793
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1794
- ```
1795
- or
1796
- ```
1797
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1798
- ```
1799
-
1800
- You can also specify just the project or project branch (other values will be
1801
- inferred from the current project or project branch):
1802
- ```
1803
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1804
- ```
1805
-
1806
- Note that `branch` is typically one of:
1807
- - `prod`
1808
- - `user.bob`
1809
- - `test.my_experiment`
1810
- - `prod.staging`
1566
+ Specifies the PyPI packages for all steps of the flow.
1811
1567
 
1568
+ Use `@pypi_base` to set common packages required by all
1569
+ steps and use `@pypi` to specify step-specific overrides.
1812
1570
 
1813
1571
  Parameters
1814
1572
  ----------
1815
- flow : Union[str, Dict[str, str]], optional, default None
1816
- Upstream flow dependency for this flow.
1817
- flows : List[Union[str, Dict[str, str]]], default []
1818
- Upstream flow dependencies for this flow.
1819
- options : Dict[str, Any], default {}
1820
- Backend-specific configuration for tuning eventing behavior.
1573
+ packages : Dict[str, str], default: {}
1574
+ Packages to use for this flow. The key is the name of the package
1575
+ and the value is the version to use.
1576
+ python : str, optional, default: None
1577
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1578
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1821
1579
  """
1822
1580
  ...
1823
1581
 
1824
1582
  @typing.overload
1825
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1583
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1826
1584
  ...
1827
1585
 
1828
- def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1586
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1829
1587
  """
1830
- Specifies the flow(s) that this flow depends on.
1831
-
1832
- ```
1833
- @trigger_on_finish(flow='FooFlow')
1834
- ```
1835
- or
1836
- ```
1837
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1838
- ```
1839
- This decorator respects the @project decorator and triggers the flow
1840
- when upstream runs within the same namespace complete successfully
1588
+ Specifies the PyPI packages for all steps of the flow.
1841
1589
 
1842
- Additionally, you can specify project aware upstream flow dependencies
1843
- by specifying the fully qualified project_flow_name.
1844
- ```
1845
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1846
- ```
1847
- or
1848
- ```
1849
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1850
- ```
1590
+ Use `@pypi_base` to set common packages required by all
1591
+ steps and use `@pypi` to specify step-specific overrides.
1851
1592
 
1852
- You can also specify just the project or project branch (other values will be
1853
- inferred from the current project or project branch):
1854
- ```
1855
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1856
- ```
1593
+ Parameters
1594
+ ----------
1595
+ packages : Dict[str, str], default: {}
1596
+ Packages to use for this flow. The key is the name of the package
1597
+ and the value is the version to use.
1598
+ python : str, optional, default: None
1599
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1600
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1601
+ """
1602
+ ...
1603
+
1604
+ def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1605
+ """
1606
+ Specifies what flows belong to the same project.
1857
1607
 
1858
- Note that `branch` is typically one of:
1859
- - `prod`
1860
- - `user.bob`
1861
- - `test.my_experiment`
1862
- - `prod.staging`
1608
+ A project-specific namespace is created for all flows that
1609
+ use the same `@project(name)`.
1863
1610
 
1864
1611
 
1865
1612
  Parameters
1866
1613
  ----------
1867
- flow : Union[str, Dict[str, str]], optional, default None
1868
- Upstream flow dependency for this flow.
1869
- flows : List[Union[str, Dict[str, str]]], default []
1870
- Upstream flow dependencies for this flow.
1871
- options : Dict[str, Any], default {}
1872
- Backend-specific configuration for tuning eventing behavior.
1614
+ name : str
1615
+ Project name. Make sure that the name is unique amongst all
1616
+ projects that use the same production scheduler. The name may
1617
+ contain only lowercase alphanumeric characters and underscores.
1618
+
1619
+ branch : Optional[str], default None
1620
+ The branch to use. If not specified, the branch is set to
1621
+ `user.<username>` unless `production` is set to `True`. This can
1622
+ also be set on the command line using `--branch` as a top-level option.
1623
+ It is an error to specify `branch` in the decorator and on the command line.
1624
+
1625
+ production : bool, default False
1626
+ Whether or not the branch is the production branch. This can also be set on the
1627
+ command line using `--production` as a top-level option. It is an error to specify
1628
+ `production` in the decorator and on the command line.
1629
+ The project branch name will be:
1630
+ - if `branch` is specified:
1631
+ - if `production` is True: `prod.<branch>`
1632
+ - if `production` is False: `test.<branch>`
1633
+ - if `branch` is not specified:
1634
+ - if `production` is True: `prod`
1635
+ - if `production` is False: `user.<username>`
1873
1636
  """
1874
1637
  ...
1875
1638
 
@@ -1987,6 +1750,57 @@ def with_artifact_store(f: typing.Optional[typing.Type[FlowSpecDerived]] = None)
1987
1750
  """
1988
1751
  ...
1989
1752
 
1753
+ @typing.overload
1754
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1755
+ """
1756
+ Specifies the Conda environment for all steps of the flow.
1757
+
1758
+ Use `@conda_base` to set common libraries required by all
1759
+ steps and use `@conda` to specify step-specific additions.
1760
+
1761
+
1762
+ Parameters
1763
+ ----------
1764
+ packages : Dict[str, str], default {}
1765
+ Packages to use for this flow. The key is the name of the package
1766
+ and the value is the version to use.
1767
+ libraries : Dict[str, str], default {}
1768
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1769
+ python : str, optional, default None
1770
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1771
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1772
+ disabled : bool, default False
1773
+ If set to True, disables Conda.
1774
+ """
1775
+ ...
1776
+
1777
+ @typing.overload
1778
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1779
+ ...
1780
+
1781
+ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1782
+ """
1783
+ Specifies the Conda environment for all steps of the flow.
1784
+
1785
+ Use `@conda_base` to set common libraries required by all
1786
+ steps and use `@conda` to specify step-specific additions.
1787
+
1788
+
1789
+ Parameters
1790
+ ----------
1791
+ packages : Dict[str, str], default {}
1792
+ Packages to use for this flow. The key is the name of the package
1793
+ and the value is the version to use.
1794
+ libraries : Dict[str, str], default {}
1795
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1796
+ python : str, optional, default None
1797
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1798
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1799
+ disabled : bool, default False
1800
+ If set to True, disables Conda.
1801
+ """
1802
+ ...
1803
+
1990
1804
  def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1991
1805
  """
1992
1806
  The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
@@ -2030,54 +1844,240 @@ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, expone
2030
1844
  """
2031
1845
  ...
2032
1846
 
1847
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1848
+ """
1849
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1850
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1851
+
1852
+
1853
+ Parameters
1854
+ ----------
1855
+ timeout : int
1856
+ Time, in seconds before the task times out and fails. (Default: 3600)
1857
+ poke_interval : int
1858
+ Time in seconds that the job should wait in between each try. (Default: 60)
1859
+ mode : str
1860
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1861
+ exponential_backoff : bool
1862
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1863
+ pool : str
1864
+ the slot pool this task should run in,
1865
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1866
+ soft_fail : bool
1867
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1868
+ name : str
1869
+ Name of the sensor on Airflow
1870
+ description : str
1871
+ Description of sensor in the Airflow UI
1872
+ external_dag_id : str
1873
+ The dag_id that contains the task you want to wait for.
1874
+ external_task_ids : List[str]
1875
+ The list of task_ids that you want to wait for.
1876
+ If None (default value) the sensor waits for the DAG. (Default: None)
1877
+ allowed_states : List[str]
1878
+ Iterable of allowed states, (Default: ['success'])
1879
+ failed_states : List[str]
1880
+ Iterable of failed or dis-allowed states. (Default: None)
1881
+ execution_delta : datetime.timedelta
1882
+ time difference with the previous execution to look at,
1883
+ the default is the same logical date as the current task or DAG. (Default: None)
1884
+ check_existence: bool
1885
+ Set to True to check if the external task exists or check if
1886
+ the DAG to wait for exists. (Default: True)
1887
+ """
1888
+ ...
1889
+
2033
1890
  @typing.overload
2034
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1891
+ def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
2035
1892
  """
2036
- Specifies the times when the flow should be run when running on a
2037
- production scheduler.
1893
+ Specifies the flow(s) that this flow depends on.
1894
+
1895
+ ```
1896
+ @trigger_on_finish(flow='FooFlow')
1897
+ ```
1898
+ or
1899
+ ```
1900
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1901
+ ```
1902
+ This decorator respects the @project decorator and triggers the flow
1903
+ when upstream runs within the same namespace complete successfully
1904
+
1905
+ Additionally, you can specify project aware upstream flow dependencies
1906
+ by specifying the fully qualified project_flow_name.
1907
+ ```
1908
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1909
+ ```
1910
+ or
1911
+ ```
1912
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1913
+ ```
1914
+
1915
+ You can also specify just the project or project branch (other values will be
1916
+ inferred from the current project or project branch):
1917
+ ```
1918
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1919
+ ```
1920
+
1921
+ Note that `branch` is typically one of:
1922
+ - `prod`
1923
+ - `user.bob`
1924
+ - `test.my_experiment`
1925
+ - `prod.staging`
2038
1926
 
2039
1927
 
2040
1928
  Parameters
2041
1929
  ----------
2042
- hourly : bool, default False
2043
- Run the workflow hourly.
2044
- daily : bool, default True
2045
- Run the workflow daily.
2046
- weekly : bool, default False
2047
- Run the workflow weekly.
2048
- cron : str, optional, default None
2049
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
2050
- specified by this expression.
2051
- timezone : str, optional, default None
2052
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
2053
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1930
+ flow : Union[str, Dict[str, str]], optional, default None
1931
+ Upstream flow dependency for this flow.
1932
+ flows : List[Union[str, Dict[str, str]]], default []
1933
+ Upstream flow dependencies for this flow.
1934
+ options : Dict[str, Any], default {}
1935
+ Backend-specific configuration for tuning eventing behavior.
2054
1936
  """
2055
1937
  ...
2056
1938
 
2057
1939
  @typing.overload
2058
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1940
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
2059
1941
  ...
2060
1942
 
2061
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1943
+ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
2062
1944
  """
2063
- Specifies the times when the flow should be run when running on a
2064
- production scheduler.
1945
+ Specifies the flow(s) that this flow depends on.
1946
+
1947
+ ```
1948
+ @trigger_on_finish(flow='FooFlow')
1949
+ ```
1950
+ or
1951
+ ```
1952
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1953
+ ```
1954
+ This decorator respects the @project decorator and triggers the flow
1955
+ when upstream runs within the same namespace complete successfully
1956
+
1957
+ Additionally, you can specify project aware upstream flow dependencies
1958
+ by specifying the fully qualified project_flow_name.
1959
+ ```
1960
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1961
+ ```
1962
+ or
1963
+ ```
1964
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1965
+ ```
1966
+
1967
+ You can also specify just the project or project branch (other values will be
1968
+ inferred from the current project or project branch):
1969
+ ```
1970
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1971
+ ```
1972
+
1973
+ Note that `branch` is typically one of:
1974
+ - `prod`
1975
+ - `user.bob`
1976
+ - `test.my_experiment`
1977
+ - `prod.staging`
2065
1978
 
2066
1979
 
2067
1980
  Parameters
2068
1981
  ----------
2069
- hourly : bool, default False
2070
- Run the workflow hourly.
2071
- daily : bool, default True
2072
- Run the workflow daily.
2073
- weekly : bool, default False
2074
- Run the workflow weekly.
2075
- cron : str, optional, default None
2076
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
2077
- specified by this expression.
2078
- timezone : str, optional, default None
2079
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
2080
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1982
+ flow : Union[str, Dict[str, str]], optional, default None
1983
+ Upstream flow dependency for this flow.
1984
+ flows : List[Union[str, Dict[str, str]]], default []
1985
+ Upstream flow dependencies for this flow.
1986
+ options : Dict[str, Any], default {}
1987
+ Backend-specific configuration for tuning eventing behavior.
1988
+ """
1989
+ ...
1990
+
1991
+ @typing.overload
1992
+ def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1993
+ """
1994
+ Specifies the event(s) that this flow depends on.
1995
+
1996
+ ```
1997
+ @trigger(event='foo')
1998
+ ```
1999
+ or
2000
+ ```
2001
+ @trigger(events=['foo', 'bar'])
2002
+ ```
2003
+
2004
+ Additionally, you can specify the parameter mappings
2005
+ to map event payload to Metaflow parameters for the flow.
2006
+ ```
2007
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
2008
+ ```
2009
+ or
2010
+ ```
2011
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
2012
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
2013
+ ```
2014
+
2015
+ 'parameters' can also be a list of strings and tuples like so:
2016
+ ```
2017
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
2018
+ ```
2019
+ This is equivalent to:
2020
+ ```
2021
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
2022
+ ```
2023
+
2024
+
2025
+ Parameters
2026
+ ----------
2027
+ event : Union[str, Dict[str, Any]], optional, default None
2028
+ Event dependency for this flow.
2029
+ events : List[Union[str, Dict[str, Any]]], default []
2030
+ Events dependency for this flow.
2031
+ options : Dict[str, Any], default {}
2032
+ Backend-specific configuration for tuning eventing behavior.
2033
+ """
2034
+ ...
2035
+
2036
+ @typing.overload
2037
+ def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
2038
+ ...
2039
+
2040
+ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
2041
+ """
2042
+ Specifies the event(s) that this flow depends on.
2043
+
2044
+ ```
2045
+ @trigger(event='foo')
2046
+ ```
2047
+ or
2048
+ ```
2049
+ @trigger(events=['foo', 'bar'])
2050
+ ```
2051
+
2052
+ Additionally, you can specify the parameter mappings
2053
+ to map event payload to Metaflow parameters for the flow.
2054
+ ```
2055
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
2056
+ ```
2057
+ or
2058
+ ```
2059
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
2060
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
2061
+ ```
2062
+
2063
+ 'parameters' can also be a list of strings and tuples like so:
2064
+ ```
2065
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
2066
+ ```
2067
+ This is equivalent to:
2068
+ ```
2069
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
2070
+ ```
2071
+
2072
+
2073
+ Parameters
2074
+ ----------
2075
+ event : Union[str, Dict[str, Any]], optional, default None
2076
+ Event dependency for this flow.
2077
+ events : List[Union[str, Dict[str, Any]]], default []
2078
+ Events dependency for this flow.
2079
+ options : Dict[str, Any], default {}
2080
+ Backend-specific configuration for tuning eventing behavior.
2081
2081
  """
2082
2082
  ...
2083
2083