ob-metaflow-stubs 6.0.9.3__py2.py3-none-any.whl → 6.0.10.0__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ob-metaflow-stubs might be problematic. Click here for more details.

Files changed (262) hide show
  1. metaflow-stubs/__init__.pyi +721 -721
  2. metaflow-stubs/cards.pyi +2 -2
  3. metaflow-stubs/cli.pyi +2 -2
  4. metaflow-stubs/cli_components/__init__.pyi +2 -2
  5. metaflow-stubs/cli_components/utils.pyi +2 -2
  6. metaflow-stubs/client/__init__.pyi +2 -2
  7. metaflow-stubs/client/core.pyi +6 -6
  8. metaflow-stubs/client/filecache.pyi +3 -3
  9. metaflow-stubs/events.pyi +3 -3
  10. metaflow-stubs/exception.pyi +2 -2
  11. metaflow-stubs/flowspec.pyi +5 -5
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +4 -4
  14. metaflow-stubs/meta_files.pyi +2 -2
  15. metaflow-stubs/metadata_provider/__init__.pyi +2 -2
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +2 -2
  17. metaflow-stubs/metadata_provider/metadata.pyi +2 -2
  18. metaflow-stubs/metadata_provider/util.pyi +2 -2
  19. metaflow-stubs/metaflow_config.pyi +2 -2
  20. metaflow-stubs/metaflow_current.pyi +42 -42
  21. metaflow-stubs/metaflow_git.pyi +2 -2
  22. metaflow-stubs/mf_extensions/__init__.pyi +2 -2
  23. metaflow-stubs/mf_extensions/obcheckpoint/__init__.pyi +2 -2
  24. metaflow-stubs/mf_extensions/obcheckpoint/plugins/__init__.pyi +2 -2
  25. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/__init__.pyi +2 -2
  26. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/__init__.pyi +2 -2
  27. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/async_cards.pyi +2 -2
  28. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/deco_injection_mixin.pyi +2 -2
  29. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/extra_components.pyi +3 -3
  30. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/__init__.pyi +2 -2
  31. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/__init__.pyi +2 -2
  32. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/checkpoint_lister.pyi +3 -3
  33. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/lineage_card.pyi +2 -2
  34. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/checkpoint_storage.pyi +4 -4
  35. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/constructors.pyi +2 -2
  36. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/core.pyi +4 -4
  37. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/decorator.pyi +4 -4
  38. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/exceptions.pyi +2 -2
  39. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/final_api.pyi +3 -3
  40. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/lineage.pyi +2 -2
  41. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/__init__.pyi +2 -2
  42. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/context.pyi +3 -3
  43. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/core.pyi +3 -3
  44. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/decorator.pyi +2 -2
  45. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/exceptions.pyi +2 -2
  46. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/task_utils.pyi +2 -2
  47. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/utils.pyi +2 -2
  48. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastructures.pyi +2 -2
  49. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/exceptions.pyi +2 -2
  50. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/__init__.pyi +2 -2
  51. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/decorator.pyi +3 -3
  52. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/__init__.pyi +2 -2
  53. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/core.pyi +2 -2
  54. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/exceptions.pyi +2 -2
  55. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/model_storage.pyi +3 -3
  56. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/__init__.pyi +2 -2
  57. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/flowspec_utils.pyi +2 -2
  58. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/general.pyi +2 -2
  59. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/identity_utils.pyi +2 -2
  60. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/__init__.pyi +2 -2
  61. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/base.pyi +2 -2
  62. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/tar.pyi +2 -2
  63. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/tar_utils.pyi +2 -2
  64. metaflow-stubs/mf_extensions/outerbounds/__init__.pyi +2 -2
  65. metaflow-stubs/mf_extensions/outerbounds/plugins/__init__.pyi +2 -2
  66. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/__init__.pyi +2 -2
  67. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/__init__.pyi +2 -2
  68. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_state_machine.pyi +2 -2
  69. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/__init__.pyi +2 -2
  70. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/spinner/__init__.pyi +2 -2
  71. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/spinner/spinners.pyi +2 -2
  72. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/app_cli.pyi +2 -2
  73. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/app_config.pyi +3 -3
  74. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/capsule.pyi +4 -4
  75. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/click_importer.pyi +2 -2
  76. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/code_package/__init__.pyi +2 -2
  77. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/code_package/code_packager.pyi +5 -3
  78. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/__init__.pyi +2 -2
  79. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/cli_generator.pyi +2 -2
  80. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/config_utils.pyi +4 -4
  81. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/schema_export.pyi +2 -2
  82. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/typed_configs.pyi +6 -5
  83. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/unified_config.pyi +6 -3
  84. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/dependencies.pyi +3 -3
  85. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/deployer.pyi +4 -4
  86. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/experimental/__init__.pyi +2 -2
  87. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/perimeters.pyi +2 -2
  88. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/utils.pyi +3 -3
  89. metaflow-stubs/mf_extensions/outerbounds/plugins/aws/__init__.pyi +2 -2
  90. metaflow-stubs/mf_extensions/outerbounds/plugins/aws/assume_role_decorator.pyi +3 -3
  91. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/__init__.pyi +2 -2
  92. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/async_cards.pyi +2 -2
  93. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/injector.pyi +2 -2
  94. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/__init__.pyi +2 -2
  95. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/coreweave.pyi +3 -3
  96. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/nebius.pyi +3 -3
  97. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/__init__.pyi +2 -2
  98. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/baker.pyi +4 -4
  99. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/docker_environment.pyi +2 -2
  100. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/fast_bakery.pyi +2 -2
  101. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/__init__.pyi +2 -2
  102. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/pod_killer.pyi +2 -2
  103. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/__init__.pyi +2 -2
  104. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/constants.pyi +2 -2
  105. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/exceptions.pyi +2 -2
  106. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/ollama.pyi +2 -2
  107. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/status_card.pyi +2 -2
  108. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/__init__.pyi +2 -2
  109. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/snowflake.pyi +2 -2
  110. metaflow-stubs/mf_extensions/outerbounds/profilers/__init__.pyi +2 -2
  111. metaflow-stubs/mf_extensions/outerbounds/profilers/gpu.pyi +2 -2
  112. metaflow-stubs/mf_extensions/outerbounds/remote_config.pyi +3 -3
  113. metaflow-stubs/mf_extensions/outerbounds/toplevel/__init__.pyi +2 -2
  114. metaflow-stubs/mf_extensions/outerbounds/toplevel/global_aliases_for_metaflow_package.pyi +2 -2
  115. metaflow-stubs/mf_extensions/outerbounds/toplevel/s3_proxy.pyi +2 -2
  116. metaflow-stubs/multicore_utils.pyi +2 -2
  117. metaflow-stubs/ob_internal.pyi +2 -2
  118. metaflow-stubs/packaging_sys/__init__.pyi +5 -5
  119. metaflow-stubs/packaging_sys/backend.pyi +4 -4
  120. metaflow-stubs/packaging_sys/distribution_support.pyi +4 -4
  121. metaflow-stubs/packaging_sys/tar_backend.pyi +5 -5
  122. metaflow-stubs/packaging_sys/utils.pyi +2 -2
  123. metaflow-stubs/packaging_sys/v1.pyi +2 -2
  124. metaflow-stubs/parameters.pyi +3 -3
  125. metaflow-stubs/plugins/__init__.pyi +9 -9
  126. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  127. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  128. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  129. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  130. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
  131. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
  132. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
  133. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  134. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  135. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  136. metaflow-stubs/plugins/argo/argo_workflows.pyi +3 -3
  137. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +3 -3
  138. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +3 -3
  139. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +2 -2
  140. metaflow-stubs/plugins/argo/exit_hooks.pyi +2 -2
  141. metaflow-stubs/plugins/aws/__init__.pyi +2 -2
  142. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  143. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  144. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  145. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  146. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  147. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
  148. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  149. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +3 -3
  150. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  151. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  152. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  153. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  154. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  155. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +3 -3
  156. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +3 -3
  157. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  158. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  159. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  160. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +3 -3
  161. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  162. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  163. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  164. metaflow-stubs/plugins/cards/__init__.pyi +6 -6
  165. metaflow-stubs/plugins/cards/card_client.pyi +3 -3
  166. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  167. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  168. metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
  169. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  170. metaflow-stubs/plugins/cards/card_modules/basic.pyi +3 -3
  171. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  172. metaflow-stubs/plugins/cards/card_modules/components.pyi +4 -4
  173. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  174. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  175. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  176. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  177. metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
  178. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  179. metaflow-stubs/plugins/catch_decorator.pyi +2 -2
  180. metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
  181. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  182. metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
  183. metaflow-stubs/plugins/datatools/s3/s3.pyi +3 -3
  184. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  185. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  186. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  187. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  188. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  189. metaflow-stubs/plugins/events_decorator.pyi +2 -2
  190. metaflow-stubs/plugins/exit_hook/__init__.pyi +2 -2
  191. metaflow-stubs/plugins/exit_hook/exit_hook_decorator.pyi +2 -2
  192. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  193. metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
  194. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  195. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +3 -3
  196. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  197. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  198. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  199. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  200. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  201. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +2 -2
  202. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
  203. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  204. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
  205. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
  206. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +2 -2
  207. metaflow-stubs/plugins/ollama/__init__.pyi +3 -3
  208. metaflow-stubs/plugins/optuna/__init__.pyi +2 -2
  209. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  210. metaflow-stubs/plugins/perimeters.pyi +2 -2
  211. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  212. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  213. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  214. metaflow-stubs/plugins/pypi/conda_environment.pyi +4 -4
  215. metaflow-stubs/plugins/pypi/parsers.pyi +2 -2
  216. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  217. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  218. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  219. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  220. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  221. metaflow-stubs/plugins/secrets/__init__.pyi +3 -3
  222. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +3 -3
  223. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  224. metaflow-stubs/plugins/secrets/secrets_func.pyi +2 -2
  225. metaflow-stubs/plugins/secrets/secrets_spec.pyi +2 -2
  226. metaflow-stubs/plugins/secrets/utils.pyi +2 -2
  227. metaflow-stubs/plugins/snowflake/__init__.pyi +2 -2
  228. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  229. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +2 -2
  230. metaflow-stubs/plugins/timeout_decorator.pyi +2 -2
  231. metaflow-stubs/plugins/torchtune/__init__.pyi +2 -2
  232. metaflow-stubs/plugins/uv/__init__.pyi +2 -2
  233. metaflow-stubs/plugins/uv/uv_environment.pyi +2 -2
  234. metaflow-stubs/profilers/__init__.pyi +2 -2
  235. metaflow-stubs/pylint_wrapper.pyi +2 -2
  236. metaflow-stubs/runner/__init__.pyi +2 -2
  237. metaflow-stubs/runner/deployer.pyi +33 -33
  238. metaflow-stubs/runner/deployer_impl.pyi +3 -3
  239. metaflow-stubs/runner/metaflow_runner.pyi +3 -3
  240. metaflow-stubs/runner/nbdeploy.pyi +2 -2
  241. metaflow-stubs/runner/nbrun.pyi +2 -2
  242. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  243. metaflow-stubs/runner/utils.pyi +3 -3
  244. metaflow-stubs/system/__init__.pyi +2 -2
  245. metaflow-stubs/system/system_logger.pyi +3 -3
  246. metaflow-stubs/system/system_monitor.pyi +2 -2
  247. metaflow-stubs/tagging_util.pyi +2 -2
  248. metaflow-stubs/tuple_util.pyi +2 -2
  249. metaflow-stubs/user_configs/__init__.pyi +2 -2
  250. metaflow-stubs/user_configs/config_options.pyi +2 -2
  251. metaflow-stubs/user_configs/config_parameters.pyi +7 -7
  252. metaflow-stubs/user_decorators/__init__.pyi +2 -2
  253. metaflow-stubs/user_decorators/common.pyi +2 -2
  254. metaflow-stubs/user_decorators/mutable_flow.pyi +5 -5
  255. metaflow-stubs/user_decorators/mutable_step.pyi +4 -4
  256. metaflow-stubs/user_decorators/user_flow_decorator.pyi +4 -4
  257. metaflow-stubs/user_decorators/user_step_decorator.pyi +5 -5
  258. {ob_metaflow_stubs-6.0.9.3.dist-info → ob_metaflow_stubs-6.0.10.0.dist-info}/METADATA +1 -1
  259. ob_metaflow_stubs-6.0.10.0.dist-info/RECORD +262 -0
  260. ob_metaflow_stubs-6.0.9.3.dist-info/RECORD +0 -262
  261. {ob_metaflow_stubs-6.0.9.3.dist-info → ob_metaflow_stubs-6.0.10.0.dist-info}/WHEEL +0 -0
  262. {ob_metaflow_stubs-6.0.9.3.dist-info → ob_metaflow_stubs-6.0.10.0.dist-info}/top_level.txt +0 -0
@@ -1,15 +1,15 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.18.1.1+obcheckpoint(0.2.4);ob(v1) #
4
- # Generated on 2025-09-02T19:19:25.341768 #
3
+ # MF version: 2.18.2.1+obcheckpoint(0.2.4);ob(v1) #
4
+ # Generated on 2025-09-08T21:00:14.553698 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import typing
12
11
  import datetime
12
+ import typing
13
13
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
14
14
  StepFlag = typing.NewType("StepFlag", bool)
15
15
 
@@ -39,18 +39,18 @@ from .user_decorators.user_step_decorator import UserStepDecorator as UserStepDe
39
39
  from .user_decorators.user_step_decorator import StepMutator as StepMutator
40
40
  from .user_decorators.user_step_decorator import user_step_decorator as user_step_decorator
41
41
  from .user_decorators.user_flow_decorator import FlowMutator as FlowMutator
42
+ from . import cards as cards
42
43
  from . import metaflow_git as metaflow_git
43
44
  from . import events as events
44
- from . import cards as cards
45
45
  from . import tuple_util as tuple_util
46
46
  from . import runner as runner
47
47
  from . import plugins as plugins
48
48
  from .mf_extensions.outerbounds.toplevel.global_aliases_for_metaflow_package import S3 as S3
49
49
  from . import includefile as includefile
50
50
  from .includefile import IncludeFile as IncludeFile
51
- from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
52
- from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
53
51
  from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
52
+ from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
53
+ from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
54
54
  from . import client as client
55
55
  from .client.core import namespace as namespace
56
56
  from .client.core import get_namespace as get_namespace
@@ -168,178 +168,192 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
168
168
  ...
169
169
 
170
170
  @typing.overload
171
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
171
+ def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
172
172
  """
173
- Specifies the Conda environment for the step.
173
+ Specifies the resources needed when executing this step.
174
174
 
175
- Information in this decorator will augment any
176
- attributes set in the `@conda_base` flow-level decorator. Hence,
177
- you can use `@conda_base` to set packages required by all
178
- steps and use `@conda` to specify step-specific overrides.
175
+ Use `@resources` to specify the resource requirements
176
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
177
+
178
+ You can choose the compute layer on the command line by executing e.g.
179
+ ```
180
+ python myflow.py run --with batch
181
+ ```
182
+ or
183
+ ```
184
+ python myflow.py run --with kubernetes
185
+ ```
186
+ which executes the flow on the desired system using the
187
+ requirements specified in `@resources`.
179
188
 
180
189
 
181
190
  Parameters
182
191
  ----------
183
- packages : Dict[str, str], default {}
184
- Packages to use for this step. The key is the name of the package
185
- and the value is the version to use.
186
- libraries : Dict[str, str], default {}
187
- Supported for backward compatibility. When used with packages, packages will take precedence.
188
- python : str, optional, default None
189
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
190
- that the version used will correspond to the version of the Python interpreter used to start the run.
191
- disabled : bool, default False
192
- If set to True, disables @conda.
192
+ cpu : int, default 1
193
+ Number of CPUs required for this step.
194
+ gpu : int, optional, default None
195
+ Number of GPUs required for this step.
196
+ disk : int, optional, default None
197
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
198
+ memory : int, default 4096
199
+ Memory size (in MB) required for this step.
200
+ shared_memory : int, optional, default None
201
+ The value for the size (in MiB) of the /dev/shm volume for this step.
202
+ This parameter maps to the `--shm-size` option in Docker.
193
203
  """
194
204
  ...
195
205
 
196
206
  @typing.overload
197
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
207
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
198
208
  ...
199
209
 
200
210
  @typing.overload
201
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
211
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
202
212
  ...
203
213
 
204
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
214
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
205
215
  """
206
- Specifies the Conda environment for the step.
216
+ Specifies the resources needed when executing this step.
207
217
 
208
- Information in this decorator will augment any
209
- attributes set in the `@conda_base` flow-level decorator. Hence,
210
- you can use `@conda_base` to set packages required by all
211
- steps and use `@conda` to specify step-specific overrides.
218
+ Use `@resources` to specify the resource requirements
219
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
220
+
221
+ You can choose the compute layer on the command line by executing e.g.
222
+ ```
223
+ python myflow.py run --with batch
224
+ ```
225
+ or
226
+ ```
227
+ python myflow.py run --with kubernetes
228
+ ```
229
+ which executes the flow on the desired system using the
230
+ requirements specified in `@resources`.
212
231
 
213
232
 
214
233
  Parameters
215
234
  ----------
216
- packages : Dict[str, str], default {}
217
- Packages to use for this step. The key is the name of the package
218
- and the value is the version to use.
219
- libraries : Dict[str, str], default {}
220
- Supported for backward compatibility. When used with packages, packages will take precedence.
221
- python : str, optional, default None
222
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
223
- that the version used will correspond to the version of the Python interpreter used to start the run.
224
- disabled : bool, default False
225
- If set to True, disables @conda.
235
+ cpu : int, default 1
236
+ Number of CPUs required for this step.
237
+ gpu : int, optional, default None
238
+ Number of GPUs required for this step.
239
+ disk : int, optional, default None
240
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
241
+ memory : int, default 4096
242
+ Memory size (in MB) required for this step.
243
+ shared_memory : int, optional, default None
244
+ The value for the size (in MiB) of the /dev/shm volume for this step.
245
+ This parameter maps to the `--shm-size` option in Docker.
226
246
  """
227
247
  ...
228
248
 
229
249
  @typing.overload
230
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
250
+ def test_append_card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
231
251
  """
232
- Internal decorator to support Fast bakery
252
+ A simple decorator that demonstrates using CardDecoratorInjector
253
+ to inject a card and render simple markdown content.
233
254
  """
234
255
  ...
235
256
 
236
257
  @typing.overload
237
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
258
+ def test_append_card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
238
259
  ...
239
260
 
240
- def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
261
+ def test_append_card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
241
262
  """
242
- Internal decorator to support Fast bakery
263
+ A simple decorator that demonstrates using CardDecoratorInjector
264
+ to inject a card and render simple markdown content.
243
265
  """
244
266
  ...
245
267
 
246
268
  @typing.overload
247
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
269
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
248
270
  """
249
- Specifies that the step will success under all circumstances.
250
-
251
- The decorator will create an optional artifact, specified by `var`, which
252
- contains the exception raised. You can use it to detect the presence
253
- of errors, indicating that all happy-path artifacts produced by the step
254
- are missing.
271
+ Specifies environment variables to be set prior to the execution of a step.
255
272
 
256
273
 
257
274
  Parameters
258
275
  ----------
259
- var : str, optional, default None
260
- Name of the artifact in which to store the caught exception.
261
- If not specified, the exception is not stored.
262
- print_exception : bool, default True
263
- Determines whether or not the exception is printed to
264
- stdout when caught.
276
+ vars : Dict[str, str], default {}
277
+ Dictionary of environment variables to set.
265
278
  """
266
279
  ...
267
280
 
268
281
  @typing.overload
269
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
282
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
270
283
  ...
271
284
 
272
285
  @typing.overload
273
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
286
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
274
287
  ...
275
288
 
276
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
289
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
277
290
  """
278
- Specifies that the step will success under all circumstances.
279
-
280
- The decorator will create an optional artifact, specified by `var`, which
281
- contains the exception raised. You can use it to detect the presence
282
- of errors, indicating that all happy-path artifacts produced by the step
283
- are missing.
291
+ Specifies environment variables to be set prior to the execution of a step.
284
292
 
285
293
 
286
294
  Parameters
287
295
  ----------
288
- var : str, optional, default None
289
- Name of the artifact in which to store the caught exception.
290
- If not specified, the exception is not stored.
291
- print_exception : bool, default True
292
- Determines whether or not the exception is printed to
293
- stdout when caught.
296
+ vars : Dict[str, str], default {}
297
+ Dictionary of environment variables to set.
294
298
  """
295
299
  ...
296
300
 
297
301
  @typing.overload
298
- def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
302
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
299
303
  """
300
- Creates a human-readable report, a Metaflow Card, after this step completes.
304
+ Specifies the Conda environment for the step.
301
305
 
302
- Note that you may add multiple `@card` decorators in a step with different parameters.
306
+ Information in this decorator will augment any
307
+ attributes set in the `@conda_base` flow-level decorator. Hence,
308
+ you can use `@conda_base` to set packages required by all
309
+ steps and use `@conda` to specify step-specific overrides.
303
310
 
304
311
 
305
312
  Parameters
306
313
  ----------
307
- type : str, default 'default'
308
- Card type.
309
- id : str, optional, default None
310
- If multiple cards are present, use this id to identify this card.
311
- options : Dict[str, Any], default {}
312
- Options passed to the card. The contents depend on the card type.
313
- timeout : int, default 45
314
- Interrupt reporting if it takes more than this many seconds.
314
+ packages : Dict[str, str], default {}
315
+ Packages to use for this step. The key is the name of the package
316
+ and the value is the version to use.
317
+ libraries : Dict[str, str], default {}
318
+ Supported for backward compatibility. When used with packages, packages will take precedence.
319
+ python : str, optional, default None
320
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
321
+ that the version used will correspond to the version of the Python interpreter used to start the run.
322
+ disabled : bool, default False
323
+ If set to True, disables @conda.
315
324
  """
316
325
  ...
317
326
 
318
327
  @typing.overload
319
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
328
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
320
329
  ...
321
330
 
322
331
  @typing.overload
323
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
332
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
324
333
  ...
325
334
 
326
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
335
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
327
336
  """
328
- Creates a human-readable report, a Metaflow Card, after this step completes.
337
+ Specifies the Conda environment for the step.
329
338
 
330
- Note that you may add multiple `@card` decorators in a step with different parameters.
339
+ Information in this decorator will augment any
340
+ attributes set in the `@conda_base` flow-level decorator. Hence,
341
+ you can use `@conda_base` to set packages required by all
342
+ steps and use `@conda` to specify step-specific overrides.
331
343
 
332
344
 
333
345
  Parameters
334
346
  ----------
335
- type : str, default 'default'
336
- Card type.
337
- id : str, optional, default None
338
- If multiple cards are present, use this id to identify this card.
339
- options : Dict[str, Any], default {}
340
- Options passed to the card. The contents depend on the card type.
341
- timeout : int, default 45
342
- Interrupt reporting if it takes more than this many seconds.
347
+ packages : Dict[str, str], default {}
348
+ Packages to use for this step. The key is the name of the package
349
+ and the value is the version to use.
350
+ libraries : Dict[str, str], default {}
351
+ Supported for backward compatibility. When used with packages, packages will take precedence.
352
+ python : str, optional, default None
353
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
354
+ that the version used will correspond to the version of the Python interpreter used to start the run.
355
+ disabled : bool, default False
356
+ If set to True, disables @conda.
343
357
  """
344
358
  ...
345
359
 
@@ -398,7 +412,7 @@ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
398
412
  """
399
413
  ...
400
414
 
401
- def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
415
+ def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
402
416
  """
403
417
  Specifies that this step should execute on DGX cloud.
404
418
 
@@ -409,156 +423,126 @@ def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[t
409
423
  Number of GPUs to use.
410
424
  gpu_type : str
411
425
  Type of Nvidia GPU to use.
412
- queue_timeout : int
413
- Time to keep the job in NVCF's queue.
414
426
  """
415
427
  ...
416
428
 
417
- @typing.overload
418
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
429
+ def s3_proxy(*, integration_name: typing.Optional[str] = None, write_mode: typing.Optional[str] = None, debug: typing.Optional[bool] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
419
430
  """
420
- Specifies environment variables to be set prior to the execution of a step.
431
+ S3 Proxy decorator for routing S3 requests through a local proxy service.
421
432
 
422
433
 
423
434
  Parameters
424
435
  ----------
425
- vars : Dict[str, str], default {}
426
- Dictionary of environment variables to set.
436
+ integration_name : str, optional
437
+ Name of the S3 proxy integration. If not specified, will use the only
438
+ available S3 proxy integration in the namespace (fails if multiple exist).
439
+ write_mode : str, optional
440
+ The desired behavior during write operations to target (origin) S3 bucket.
441
+ allowed options are:
442
+ "origin-and-cache" -> write to both the target S3 bucket and local object
443
+ storage
444
+ "origin" -> only write to the target S3 bucket
445
+ "cache" -> only write to the object storage service used for caching
446
+ debug : bool, optional
447
+ Enable debug logging for proxy operations.
427
448
  """
428
449
  ...
429
450
 
430
- @typing.overload
431
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
432
- ...
433
-
434
- @typing.overload
435
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
436
- ...
437
-
438
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
451
+ def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy: str, force_cache_update: bool, debug: bool, circuit_breaker_config: dict, timeout_config: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
439
452
  """
440
- Specifies environment variables to be set prior to the execution of a step.
453
+ This decorator is used to run Ollama APIs as Metaflow task sidecars.
454
+
455
+ User code call
456
+ --------------
457
+ @ollama(
458
+ models=[...],
459
+ ...
460
+ )
461
+
462
+ Valid backend options
463
+ ---------------------
464
+ - 'local': Run as a separate process on the local task machine.
465
+ - (TODO) 'managed': Outerbounds hosts and selects compute provider.
466
+ - (TODO) 'remote': Spin up separate instance to serve Ollama models.
467
+
468
+ Valid model options
469
+ -------------------
470
+ Any model here https://ollama.com/search, e.g. 'llama3.2', 'llama3.3'
441
471
 
442
472
 
443
473
  Parameters
444
474
  ----------
445
- vars : Dict[str, str], default {}
446
- Dictionary of environment variables to set.
447
- """
448
- ...
449
-
450
- def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
451
- """
452
- Specifies that this step should execute on DGX cloud.
453
-
454
-
455
- Parameters
456
- ----------
457
- gpu : int
458
- Number of GPUs to use.
459
- gpu_type : str
460
- Type of Nvidia GPU to use.
475
+ models: list[str]
476
+ List of Ollama containers running models in sidecars.
477
+ backend: str
478
+ Determines where and how to run the Ollama process.
479
+ force_pull: bool
480
+ Whether to run `ollama pull` no matter what, or first check the remote cache in Metaflow datastore for this model key.
481
+ cache_update_policy: str
482
+ Cache update policy: "auto", "force", or "never".
483
+ force_cache_update: bool
484
+ Simple override for "force" cache update policy.
485
+ debug: bool
486
+ Whether to turn on verbose debugging logs.
487
+ circuit_breaker_config: dict
488
+ Configuration for circuit breaker protection. Keys: failure_threshold, recovery_timeout, reset_timeout.
489
+ timeout_config: dict
490
+ Configuration for various operation timeouts. Keys: pull, stop, health_check, install, server_startup.
461
491
  """
462
492
  ...
463
493
 
464
494
  @typing.overload
465
- def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
495
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
466
496
  """
467
- Specifies the resources needed when executing this step.
468
-
469
- Use `@resources` to specify the resource requirements
470
- independently of the specific compute layer (`@batch`, `@kubernetes`).
471
-
472
- You can choose the compute layer on the command line by executing e.g.
473
- ```
474
- python myflow.py run --with batch
475
- ```
476
- or
477
- ```
478
- python myflow.py run --with kubernetes
479
- ```
480
- which executes the flow on the desired system using the
481
- requirements specified in `@resources`.
497
+ Specifies secrets to be retrieved and injected as environment variables prior to
498
+ the execution of a step.
482
499
 
483
500
 
484
501
  Parameters
485
502
  ----------
486
- cpu : int, default 1
487
- Number of CPUs required for this step.
488
- gpu : int, optional, default None
489
- Number of GPUs required for this step.
490
- disk : int, optional, default None
491
- Disk size (in MB) required for this step. Only applies on Kubernetes.
492
- memory : int, default 4096
493
- Memory size (in MB) required for this step.
494
- shared_memory : int, optional, default None
495
- The value for the size (in MiB) of the /dev/shm volume for this step.
496
- This parameter maps to the `--shm-size` option in Docker.
503
+ sources : List[Union[str, Dict[str, Any]]], default: []
504
+ List of secret specs, defining how the secrets are to be retrieved
505
+ role : str, optional, default: None
506
+ Role to use for fetching secrets
497
507
  """
498
508
  ...
499
509
 
500
510
  @typing.overload
501
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
511
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
502
512
  ...
503
513
 
504
514
  @typing.overload
505
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
515
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
506
516
  ...
507
517
 
508
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
518
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
509
519
  """
510
- Specifies the resources needed when executing this step.
511
-
512
- Use `@resources` to specify the resource requirements
513
- independently of the specific compute layer (`@batch`, `@kubernetes`).
514
-
515
- You can choose the compute layer on the command line by executing e.g.
516
- ```
517
- python myflow.py run --with batch
518
- ```
519
- or
520
- ```
521
- python myflow.py run --with kubernetes
522
- ```
523
- which executes the flow on the desired system using the
524
- requirements specified in `@resources`.
520
+ Specifies secrets to be retrieved and injected as environment variables prior to
521
+ the execution of a step.
525
522
 
526
523
 
527
524
  Parameters
528
525
  ----------
529
- cpu : int, default 1
530
- Number of CPUs required for this step.
531
- gpu : int, optional, default None
532
- Number of GPUs required for this step.
533
- disk : int, optional, default None
534
- Disk size (in MB) required for this step. Only applies on Kubernetes.
535
- memory : int, default 4096
536
- Memory size (in MB) required for this step.
537
- shared_memory : int, optional, default None
538
- The value for the size (in MiB) of the /dev/shm volume for this step.
539
- This parameter maps to the `--shm-size` option in Docker.
526
+ sources : List[Union[str, Dict[str, Any]]], default: []
527
+ List of secret specs, defining how the secrets are to be retrieved
528
+ role : str, optional, default: None
529
+ Role to use for fetching secrets
540
530
  """
541
531
  ...
542
532
 
543
- def s3_proxy(*, integration_name: typing.Optional[str] = None, write_mode: typing.Optional[str] = None, debug: typing.Optional[bool] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
533
+ def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
544
534
  """
545
- S3 Proxy decorator for routing S3 requests through a local proxy service.
535
+ Specifies that this step should execute on DGX cloud.
546
536
 
547
537
 
548
538
  Parameters
549
539
  ----------
550
- integration_name : str, optional
551
- Name of the S3 proxy integration. If not specified, will use the only
552
- available S3 proxy integration in the namespace (fails if multiple exist).
553
- write_mode : str, optional
554
- The desired behavior during write operations to target (origin) S3 bucket.
555
- allowed options are:
556
- "origin-and-cache" -> write to both the target S3 bucket and local object
557
- storage
558
- "origin" -> only write to the target S3 bucket
559
- "cache" -> only write to the object storage service used for caching
560
- debug : bool, optional
561
- Enable debug logging for proxy operations.
540
+ gpu : int
541
+ Number of GPUs to use.
542
+ gpu_type : str
543
+ Type of Nvidia GPU to use.
544
+ queue_timeout : int
545
+ Time to keep the job in NVCF's queue.
562
546
  """
563
547
  ...
564
548
 
@@ -710,123 +694,70 @@ def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None
710
694
  ...
711
695
 
712
696
  @typing.overload
713
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
697
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
714
698
  """
715
- Specifies a timeout for your step.
716
-
717
- This decorator is useful if this step may hang indefinitely.
718
-
719
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
720
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
721
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
722
-
723
- Note that all the values specified in parameters are added together so if you specify
724
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
725
-
726
-
727
- Parameters
728
- ----------
729
- seconds : int, default 0
730
- Number of seconds to wait prior to timing out.
731
- minutes : int, default 0
732
- Number of minutes to wait prior to timing out.
733
- hours : int, default 0
734
- Number of hours to wait prior to timing out.
699
+ Internal decorator to support Fast bakery
735
700
  """
736
701
  ...
737
702
 
738
703
  @typing.overload
739
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
740
- ...
741
-
742
- @typing.overload
743
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
704
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
744
705
  ...
745
706
 
746
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
707
+ def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
747
708
  """
748
- Specifies a timeout for your step.
749
-
750
- This decorator is useful if this step may hang indefinitely.
751
-
752
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
753
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
754
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
755
-
756
- Note that all the values specified in parameters are added together so if you specify
757
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
758
-
759
-
760
- Parameters
761
- ----------
762
- seconds : int, default 0
763
- Number of seconds to wait prior to timing out.
764
- minutes : int, default 0
765
- Number of minutes to wait prior to timing out.
766
- hours : int, default 0
767
- Number of hours to wait prior to timing out.
709
+ Internal decorator to support Fast bakery
768
710
  """
769
711
  ...
770
712
 
771
- def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy: str, force_cache_update: bool, debug: bool, circuit_breaker_config: dict, timeout_config: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
713
+ @typing.overload
714
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
772
715
  """
773
- This decorator is used to run Ollama APIs as Metaflow task sidecars.
774
-
775
- User code call
776
- --------------
777
- @ollama(
778
- models=[...],
779
- ...
780
- )
781
-
782
- Valid backend options
783
- ---------------------
784
- - 'local': Run as a separate process on the local task machine.
785
- - (TODO) 'managed': Outerbounds hosts and selects compute provider.
786
- - (TODO) 'remote': Spin up separate instance to serve Ollama models.
716
+ Specifies the PyPI packages for the step.
787
717
 
788
- Valid model options
789
- -------------------
790
- Any model here https://ollama.com/search, e.g. 'llama3.2', 'llama3.3'
718
+ Information in this decorator will augment any
719
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
720
+ you can use `@pypi_base` to set packages required by all
721
+ steps and use `@pypi` to specify step-specific overrides.
791
722
 
792
723
 
793
724
  Parameters
794
725
  ----------
795
- models: list[str]
796
- List of Ollama containers running models in sidecars.
797
- backend: str
798
- Determines where and how to run the Ollama process.
799
- force_pull: bool
800
- Whether to run `ollama pull` no matter what, or first check the remote cache in Metaflow datastore for this model key.
801
- cache_update_policy: str
802
- Cache update policy: "auto", "force", or "never".
803
- force_cache_update: bool
804
- Simple override for "force" cache update policy.
805
- debug: bool
806
- Whether to turn on verbose debugging logs.
807
- circuit_breaker_config: dict
808
- Configuration for circuit breaker protection. Keys: failure_threshold, recovery_timeout, reset_timeout.
809
- timeout_config: dict
810
- Configuration for various operation timeouts. Keys: pull, stop, health_check, install, server_startup.
726
+ packages : Dict[str, str], default: {}
727
+ Packages to use for this step. The key is the name of the package
728
+ and the value is the version to use.
729
+ python : str, optional, default: None
730
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
731
+ that the version used will correspond to the version of the Python interpreter used to start the run.
811
732
  """
812
733
  ...
813
734
 
814
735
  @typing.overload
815
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
816
- """
817
- Decorator prototype for all step decorators. This function gets specialized
818
- and imported for all decorators types by _import_plugin_decorators().
819
- """
736
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
820
737
  ...
821
738
 
822
739
  @typing.overload
823
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
740
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
824
741
  ...
825
742
 
826
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
743
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
827
744
  """
828
- Decorator prototype for all step decorators. This function gets specialized
829
- and imported for all decorators types by _import_plugin_decorators().
745
+ Specifies the PyPI packages for the step.
746
+
747
+ Information in this decorator will augment any
748
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
749
+ you can use `@pypi_base` to set packages required by all
750
+ steps and use `@pypi` to specify step-specific overrides.
751
+
752
+
753
+ Parameters
754
+ ----------
755
+ packages : Dict[str, str], default: {}
756
+ Packages to use for this step. The key is the name of the package
757
+ and the value is the version to use.
758
+ python : str, optional, default: None
759
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
760
+ that the version used will correspond to the version of the Python interpreter used to start the run.
830
761
  """
831
762
  ...
832
763
 
@@ -919,7 +850,49 @@ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: ty
919
850
  """
920
851
  ...
921
852
 
922
- def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
853
+ @typing.overload
854
+ def nebius_s3_proxy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
855
+ """
856
+ Nebius-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
857
+ It exists to make it easier for users to know that this decorator should only be used with
858
+ a Neo Cloud like Nebius.
859
+ """
860
+ ...
861
+
862
+ @typing.overload
863
+ def nebius_s3_proxy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
864
+ ...
865
+
866
+ def nebius_s3_proxy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
867
+ """
868
+ Nebius-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
869
+ It exists to make it easier for users to know that this decorator should only be used with
870
+ a Neo Cloud like Nebius.
871
+ """
872
+ ...
873
+
874
+ @typing.overload
875
+ def coreweave_s3_proxy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
876
+ """
877
+ CoreWeave-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
878
+ It exists to make it easier for users to know that this decorator should only be used with
879
+ a Neo Cloud like CoreWeave.
880
+ """
881
+ ...
882
+
883
+ @typing.overload
884
+ def coreweave_s3_proxy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
885
+ ...
886
+
887
+ def coreweave_s3_proxy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
888
+ """
889
+ CoreWeave-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
890
+ It exists to make it easier for users to know that this decorator should only be used with
891
+ a Neo Cloud like CoreWeave.
892
+ """
893
+ ...
894
+
895
+ def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
923
896
  """
924
897
  Decorator that helps cache, version and store models/datasets from huggingface hub.
925
898
 
@@ -999,66 +972,6 @@ def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.
999
972
  """
1000
973
  ...
1001
974
 
1002
- @typing.overload
1003
- def coreweave_s3_proxy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1004
- """
1005
- CoreWeave-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
1006
- It exists to make it easier for users to know that this decorator should only be used with
1007
- a Neo Cloud like CoreWeave.
1008
- """
1009
- ...
1010
-
1011
- @typing.overload
1012
- def coreweave_s3_proxy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1013
- ...
1014
-
1015
- def coreweave_s3_proxy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1016
- """
1017
- CoreWeave-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
1018
- It exists to make it easier for users to know that this decorator should only be used with
1019
- a Neo Cloud like CoreWeave.
1020
- """
1021
- ...
1022
-
1023
- @typing.overload
1024
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1025
- """
1026
- Specifies secrets to be retrieved and injected as environment variables prior to
1027
- the execution of a step.
1028
-
1029
-
1030
- Parameters
1031
- ----------
1032
- sources : List[Union[str, Dict[str, Any]]], default: []
1033
- List of secret specs, defining how the secrets are to be retrieved
1034
- role : str, optional, default: None
1035
- Role to use for fetching secrets
1036
- """
1037
- ...
1038
-
1039
- @typing.overload
1040
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1041
- ...
1042
-
1043
- @typing.overload
1044
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1045
- ...
1046
-
1047
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
1048
- """
1049
- Specifies secrets to be retrieved and injected as environment variables prior to
1050
- the execution of a step.
1051
-
1052
-
1053
- Parameters
1054
- ----------
1055
- sources : List[Union[str, Dict[str, Any]]], default: []
1056
- List of secret specs, defining how the secrets are to be retrieved
1057
- role : str, optional, default: None
1058
- Role to use for fetching secrets
1059
- """
1060
- ...
1061
-
1062
975
  @typing.overload
1063
976
  def model(*, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1064
977
  """
@@ -1189,7 +1102,7 @@ def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
1189
1102
  ...
1190
1103
 
1191
1104
  @typing.overload
1192
- def app_deploy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1105
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1193
1106
  """
1194
1107
  Decorator prototype for all step decorators. This function gets specialized
1195
1108
  and imported for all decorators types by _import_plugin_decorators().
@@ -1197,10 +1110,10 @@ def app_deploy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.
1197
1110
  ...
1198
1111
 
1199
1112
  @typing.overload
1200
- def app_deploy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1113
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1201
1114
  ...
1202
1115
 
1203
- def app_deploy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1116
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1204
1117
  """
1205
1118
  Decorator prototype for all step decorators. This function gets specialized
1206
1119
  and imported for all decorators types by _import_plugin_decorators().
@@ -1208,194 +1121,230 @@ def app_deploy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None
1208
1121
  ...
1209
1122
 
1210
1123
  @typing.overload
1211
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1124
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1212
1125
  """
1213
- Specifies the PyPI packages for the step.
1126
+ Specifies a timeout for your step.
1214
1127
 
1215
- Information in this decorator will augment any
1216
- attributes set in the `@pyi_base` flow-level decorator. Hence,
1217
- you can use `@pypi_base` to set packages required by all
1218
- steps and use `@pypi` to specify step-specific overrides.
1128
+ This decorator is useful if this step may hang indefinitely.
1129
+
1130
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1131
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
1132
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
1133
+
1134
+ Note that all the values specified in parameters are added together so if you specify
1135
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1219
1136
 
1220
1137
 
1221
1138
  Parameters
1222
1139
  ----------
1223
- packages : Dict[str, str], default: {}
1224
- Packages to use for this step. The key is the name of the package
1225
- and the value is the version to use.
1226
- python : str, optional, default: None
1227
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1228
- that the version used will correspond to the version of the Python interpreter used to start the run.
1140
+ seconds : int, default 0
1141
+ Number of seconds to wait prior to timing out.
1142
+ minutes : int, default 0
1143
+ Number of minutes to wait prior to timing out.
1144
+ hours : int, default 0
1145
+ Number of hours to wait prior to timing out.
1229
1146
  """
1230
1147
  ...
1231
1148
 
1232
1149
  @typing.overload
1233
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1150
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1234
1151
  ...
1235
1152
 
1236
1153
  @typing.overload
1237
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1154
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1238
1155
  ...
1239
1156
 
1240
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1157
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
1241
1158
  """
1242
- Specifies the PyPI packages for the step.
1159
+ Specifies a timeout for your step.
1243
1160
 
1244
- Information in this decorator will augment any
1245
- attributes set in the `@pyi_base` flow-level decorator. Hence,
1246
- you can use `@pypi_base` to set packages required by all
1247
- steps and use `@pypi` to specify step-specific overrides.
1161
+ This decorator is useful if this step may hang indefinitely.
1162
+
1163
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1164
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
1165
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
1166
+
1167
+ Note that all the values specified in parameters are added together so if you specify
1168
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1248
1169
 
1249
1170
 
1250
1171
  Parameters
1251
1172
  ----------
1252
- packages : Dict[str, str], default: {}
1253
- Packages to use for this step. The key is the name of the package
1254
- and the value is the version to use.
1255
- python : str, optional, default: None
1256
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1257
- that the version used will correspond to the version of the Python interpreter used to start the run.
1173
+ seconds : int, default 0
1174
+ Number of seconds to wait prior to timing out.
1175
+ minutes : int, default 0
1176
+ Number of minutes to wait prior to timing out.
1177
+ hours : int, default 0
1178
+ Number of hours to wait prior to timing out.
1258
1179
  """
1259
1180
  ...
1260
1181
 
1261
1182
  @typing.overload
1262
- def test_append_card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1183
+ def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1263
1184
  """
1264
- A simple decorator that demonstrates using CardDecoratorInjector
1265
- to inject a card and render simple markdown content.
1185
+ Creates a human-readable report, a Metaflow Card, after this step completes.
1186
+
1187
+ Note that you may add multiple `@card` decorators in a step with different parameters.
1188
+
1189
+
1190
+ Parameters
1191
+ ----------
1192
+ type : str, default 'default'
1193
+ Card type.
1194
+ id : str, optional, default None
1195
+ If multiple cards are present, use this id to identify this card.
1196
+ options : Dict[str, Any], default {}
1197
+ Options passed to the card. The contents depend on the card type.
1198
+ timeout : int, default 45
1199
+ Interrupt reporting if it takes more than this many seconds.
1266
1200
  """
1267
1201
  ...
1268
1202
 
1269
1203
  @typing.overload
1270
- def test_append_card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1204
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1271
1205
  ...
1272
1206
 
1273
- def test_append_card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1207
+ @typing.overload
1208
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1209
+ ...
1210
+
1211
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
1274
1212
  """
1275
- A simple decorator that demonstrates using CardDecoratorInjector
1276
- to inject a card and render simple markdown content.
1213
+ Creates a human-readable report, a Metaflow Card, after this step completes.
1214
+
1215
+ Note that you may add multiple `@card` decorators in a step with different parameters.
1216
+
1217
+
1218
+ Parameters
1219
+ ----------
1220
+ type : str, default 'default'
1221
+ Card type.
1222
+ id : str, optional, default None
1223
+ If multiple cards are present, use this id to identify this card.
1224
+ options : Dict[str, Any], default {}
1225
+ Options passed to the card. The contents depend on the card type.
1226
+ timeout : int, default 45
1227
+ Interrupt reporting if it takes more than this many seconds.
1277
1228
  """
1278
1229
  ...
1279
1230
 
1280
1231
  @typing.overload
1281
- def nebius_s3_proxy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1232
+ def app_deploy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1282
1233
  """
1283
- Nebius-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
1284
- It exists to make it easier for users to know that this decorator should only be used with
1285
- a Neo Cloud like Nebius.
1234
+ Decorator prototype for all step decorators. This function gets specialized
1235
+ and imported for all decorators types by _import_plugin_decorators().
1286
1236
  """
1287
1237
  ...
1288
1238
 
1289
1239
  @typing.overload
1290
- def nebius_s3_proxy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1240
+ def app_deploy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1291
1241
  ...
1292
1242
 
1293
- def nebius_s3_proxy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1243
+ def app_deploy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1294
1244
  """
1295
- Nebius-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
1296
- It exists to make it easier for users to know that this decorator should only be used with
1297
- a Neo Cloud like Nebius.
1245
+ Decorator prototype for all step decorators. This function gets specialized
1246
+ and imported for all decorators types by _import_plugin_decorators().
1298
1247
  """
1299
1248
  ...
1300
1249
 
1301
- def vllm(*, model: str, backend: str, openai_api_server: bool, debug: bool, card_refresh_interval: int, max_retries: int, retry_alert_frequency: int, engine_args: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1250
+ @typing.overload
1251
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1302
1252
  """
1303
- This decorator is used to run vllm APIs as Metaflow task sidecars.
1304
-
1305
- User code call
1306
- --------------
1307
- @vllm(
1308
- model="...",
1309
- ...
1310
- )
1311
-
1312
- Valid backend options
1313
- ---------------------
1314
- - 'local': Run as a separate process on the local task machine.
1315
-
1316
- Valid model options
1317
- -------------------
1318
- Any HuggingFace model identifier, e.g. 'meta-llama/Llama-3.2-1B'
1253
+ Specifies that the step will success under all circumstances.
1319
1254
 
1320
- NOTE: vLLM's OpenAI-compatible server serves ONE model per server instance.
1321
- If you need multiple models, you must create multiple @vllm decorators.
1255
+ The decorator will create an optional artifact, specified by `var`, which
1256
+ contains the exception raised. You can use it to detect the presence
1257
+ of errors, indicating that all happy-path artifacts produced by the step
1258
+ are missing.
1322
1259
 
1323
1260
 
1324
1261
  Parameters
1325
1262
  ----------
1326
- model: str
1327
- HuggingFace model identifier to be served by vLLM.
1328
- backend: str
1329
- Determines where and how to run the vLLM process.
1330
- openai_api_server: bool
1331
- Whether to use OpenAI-compatible API server mode (subprocess) instead of native engine.
1332
- Default is False (uses native engine).
1333
- Set to True for backward compatibility with existing code.
1334
- debug: bool
1335
- Whether to turn on verbose debugging logs.
1336
- card_refresh_interval: int
1337
- Interval in seconds for refreshing the vLLM status card.
1338
- Only used when openai_api_server=True.
1339
- max_retries: int
1340
- Maximum number of retries checking for vLLM server startup.
1341
- Only used when openai_api_server=True.
1342
- retry_alert_frequency: int
1343
- Frequency of alert logs for vLLM server startup retries.
1344
- Only used when openai_api_server=True.
1345
- engine_args : dict
1346
- Additional keyword arguments to pass to the vLLM engine.
1347
- For example, `tensor_parallel_size=2`.
1263
+ var : str, optional, default None
1264
+ Name of the artifact in which to store the caught exception.
1265
+ If not specified, the exception is not stored.
1266
+ print_exception : bool, default True
1267
+ Determines whether or not the exception is printed to
1268
+ stdout when caught.
1348
1269
  """
1349
1270
  ...
1350
1271
 
1351
1272
  @typing.overload
1352
- def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1273
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1274
+ ...
1275
+
1276
+ @typing.overload
1277
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1278
+ ...
1279
+
1280
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
1353
1281
  """
1354
- Specifies the Conda environment for all steps of the flow.
1282
+ Specifies that the step will success under all circumstances.
1355
1283
 
1356
- Use `@conda_base` to set common libraries required by all
1357
- steps and use `@conda` to specify step-specific additions.
1284
+ The decorator will create an optional artifact, specified by `var`, which
1285
+ contains the exception raised. You can use it to detect the presence
1286
+ of errors, indicating that all happy-path artifacts produced by the step
1287
+ are missing.
1358
1288
 
1359
1289
 
1360
1290
  Parameters
1361
1291
  ----------
1362
- packages : Dict[str, str], default {}
1363
- Packages to use for this flow. The key is the name of the package
1364
- and the value is the version to use.
1365
- libraries : Dict[str, str], default {}
1366
- Supported for backward compatibility. When used with packages, packages will take precedence.
1367
- python : str, optional, default None
1368
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1369
- that the version used will correspond to the version of the Python interpreter used to start the run.
1370
- disabled : bool, default False
1371
- If set to True, disables Conda.
1292
+ var : str, optional, default None
1293
+ Name of the artifact in which to store the caught exception.
1294
+ If not specified, the exception is not stored.
1295
+ print_exception : bool, default True
1296
+ Determines whether or not the exception is printed to
1297
+ stdout when caught.
1372
1298
  """
1373
1299
  ...
1374
1300
 
1375
- @typing.overload
1376
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1377
- ...
1378
-
1379
- def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1301
+ def vllm(*, model: str, backend: str, openai_api_server: bool, debug: bool, card_refresh_interval: int, max_retries: int, retry_alert_frequency: int, engine_args: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1380
1302
  """
1381
- Specifies the Conda environment for all steps of the flow.
1303
+ This decorator is used to run vllm APIs as Metaflow task sidecars.
1382
1304
 
1383
- Use `@conda_base` to set common libraries required by all
1384
- steps and use `@conda` to specify step-specific additions.
1305
+ User code call
1306
+ --------------
1307
+ @vllm(
1308
+ model="...",
1309
+ ...
1310
+ )
1311
+
1312
+ Valid backend options
1313
+ ---------------------
1314
+ - 'local': Run as a separate process on the local task machine.
1315
+
1316
+ Valid model options
1317
+ -------------------
1318
+ Any HuggingFace model identifier, e.g. 'meta-llama/Llama-3.2-1B'
1319
+
1320
+ NOTE: vLLM's OpenAI-compatible server serves ONE model per server instance.
1321
+ If you need multiple models, you must create multiple @vllm decorators.
1385
1322
 
1386
1323
 
1387
1324
  Parameters
1388
1325
  ----------
1389
- packages : Dict[str, str], default {}
1390
- Packages to use for this flow. The key is the name of the package
1391
- and the value is the version to use.
1392
- libraries : Dict[str, str], default {}
1393
- Supported for backward compatibility. When used with packages, packages will take precedence.
1394
- python : str, optional, default None
1395
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1396
- that the version used will correspond to the version of the Python interpreter used to start the run.
1397
- disabled : bool, default False
1398
- If set to True, disables Conda.
1326
+ model: str
1327
+ HuggingFace model identifier to be served by vLLM.
1328
+ backend: str
1329
+ Determines where and how to run the vLLM process.
1330
+ openai_api_server: bool
1331
+ Whether to use OpenAI-compatible API server mode (subprocess) instead of native engine.
1332
+ Default is False (uses native engine).
1333
+ Set to True for backward compatibility with existing code.
1334
+ debug: bool
1335
+ Whether to turn on verbose debugging logs.
1336
+ card_refresh_interval: int
1337
+ Interval in seconds for refreshing the vLLM status card.
1338
+ Only used when openai_api_server=True.
1339
+ max_retries: int
1340
+ Maximum number of retries checking for vLLM server startup.
1341
+ Only used when openai_api_server=True.
1342
+ retry_alert_frequency: int
1343
+ Frequency of alert logs for vLLM server startup retries.
1344
+ Only used when openai_api_server=True.
1345
+ engine_args : dict
1346
+ Additional keyword arguments to pass to the vLLM engine.
1347
+ For example, `tensor_parallel_size=2`.
1399
1348
  """
1400
1349
  ...
1401
1350
 
@@ -1492,92 +1441,209 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
1492
1441
  """
1493
1442
  ...
1494
1443
 
1495
- def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1444
+ @typing.overload
1445
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1496
1446
  """
1497
- Specifies what flows belong to the same project.
1498
-
1499
- A project-specific namespace is created for all flows that
1500
- use the same `@project(name)`.
1447
+ Specifies the times when the flow should be run when running on a
1448
+ production scheduler.
1501
1449
 
1502
1450
 
1503
1451
  Parameters
1504
1452
  ----------
1505
- name : str
1506
- Project name. Make sure that the name is unique amongst all
1507
- projects that use the same production scheduler. The name may
1508
- contain only lowercase alphanumeric characters and underscores.
1509
-
1510
- branch : Optional[str], default None
1511
- The branch to use. If not specified, the branch is set to
1512
- `user.<username>` unless `production` is set to `True`. This can
1513
- also be set on the command line using `--branch` as a top-level option.
1514
- It is an error to specify `branch` in the decorator and on the command line.
1515
-
1516
- production : bool, default False
1517
- Whether or not the branch is the production branch. This can also be set on the
1518
- command line using `--production` as a top-level option. It is an error to specify
1519
- `production` in the decorator and on the command line.
1520
- The project branch name will be:
1521
- - if `branch` is specified:
1522
- - if `production` is True: `prod.<branch>`
1523
- - if `production` is False: `test.<branch>`
1524
- - if `branch` is not specified:
1525
- - if `production` is True: `prod`
1526
- - if `production` is False: `user.<username>`
1453
+ hourly : bool, default False
1454
+ Run the workflow hourly.
1455
+ daily : bool, default True
1456
+ Run the workflow daily.
1457
+ weekly : bool, default False
1458
+ Run the workflow weekly.
1459
+ cron : str, optional, default None
1460
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1461
+ specified by this expression.
1462
+ timezone : str, optional, default None
1463
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1464
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1527
1465
  """
1528
1466
  ...
1529
1467
 
1530
- def with_artifact_store(f: typing.Optional[typing.Type[FlowSpecDerived]] = None):
1468
+ @typing.overload
1469
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1470
+ ...
1471
+
1472
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1531
1473
  """
1532
- Allows setting external datastores to save data for the
1533
- `@checkpoint`/`@model`/`@huggingface_hub` decorators.
1534
-
1535
- This decorator is useful when users wish to save data to a different datastore
1536
- than what is configured in Metaflow. This can be for variety of reasons:
1474
+ Specifies the times when the flow should be run when running on a
1475
+ production scheduler.
1537
1476
 
1538
- 1. Data security: The objects needs to be stored in a bucket (object storage) that is not accessible by other flows.
1539
- 2. Data Locality: The location where the task is executing is not located in the same region as the datastore.
1540
- - Example: Metaflow datastore lives in US East, but the task is executing in Finland datacenters.
1541
- 3. Data Lifecycle Policies: The objects need to be archived / managed separately from the Metaflow managed objects.
1542
- - Example: Flow is training very large models that need to be stored separately and will be deleted more aggressively than the Metaflow managed objects.
1543
1477
 
1544
- Usage:
1478
+ Parameters
1545
1479
  ----------
1480
+ hourly : bool, default False
1481
+ Run the workflow hourly.
1482
+ daily : bool, default True
1483
+ Run the workflow daily.
1484
+ weekly : bool, default False
1485
+ Run the workflow weekly.
1486
+ cron : str, optional, default None
1487
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1488
+ specified by this expression.
1489
+ timezone : str, optional, default None
1490
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1491
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1492
+ """
1493
+ ...
1494
+
1495
+ @typing.overload
1496
+ def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1497
+ """
1498
+ Specifies the flow(s) that this flow depends on.
1546
1499
 
1547
- - Using a custom IAM role to access the datastore.
1500
+ ```
1501
+ @trigger_on_finish(flow='FooFlow')
1502
+ ```
1503
+ or
1504
+ ```
1505
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1506
+ ```
1507
+ This decorator respects the @project decorator and triggers the flow
1508
+ when upstream runs within the same namespace complete successfully
1548
1509
 
1549
- ```python
1550
- @with_artifact_store(
1551
- type="s3",
1552
- config=lambda: {
1553
- "root": "s3://my-bucket-foo/path/to/root",
1554
- "role_arn": ROLE,
1555
- },
1556
- )
1557
- class MyFlow(FlowSpec):
1510
+ Additionally, you can specify project aware upstream flow dependencies
1511
+ by specifying the fully qualified project_flow_name.
1512
+ ```
1513
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1514
+ ```
1515
+ or
1516
+ ```
1517
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1518
+ ```
1558
1519
 
1559
- @checkpoint
1560
- @step
1561
- def start(self):
1562
- with open("my_file.txt", "w") as f:
1563
- f.write("Hello, World!")
1564
- self.external_bucket_checkpoint = current.checkpoint.save("my_file.txt")
1565
- self.next(self.end)
1520
+ You can also specify just the project or project branch (other values will be
1521
+ inferred from the current project or project branch):
1522
+ ```
1523
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1524
+ ```
1566
1525
 
1567
- ```
1526
+ Note that `branch` is typically one of:
1527
+ - `prod`
1528
+ - `user.bob`
1529
+ - `test.my_experiment`
1530
+ - `prod.staging`
1568
1531
 
1569
- - Using credentials to access the s3-compatible datastore.
1570
1532
 
1571
- ```python
1572
- @with_artifact_store(
1573
- type="s3",
1574
- config=lambda: {
1575
- "root": "s3://my-bucket-foo/path/to/root",
1576
- "client_params": {
1577
- "aws_access_key_id": os.environ.get("MY_CUSTOM_ACCESS_KEY"),
1578
- "aws_secret_access_key": os.environ.get("MY_CUSTOM_SECRET_KEY"),
1579
- },
1580
- },
1533
+ Parameters
1534
+ ----------
1535
+ flow : Union[str, Dict[str, str]], optional, default None
1536
+ Upstream flow dependency for this flow.
1537
+ flows : List[Union[str, Dict[str, str]]], default []
1538
+ Upstream flow dependencies for this flow.
1539
+ options : Dict[str, Any], default {}
1540
+ Backend-specific configuration for tuning eventing behavior.
1541
+ """
1542
+ ...
1543
+
1544
+ @typing.overload
1545
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1546
+ ...
1547
+
1548
+ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1549
+ """
1550
+ Specifies the flow(s) that this flow depends on.
1551
+
1552
+ ```
1553
+ @trigger_on_finish(flow='FooFlow')
1554
+ ```
1555
+ or
1556
+ ```
1557
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1558
+ ```
1559
+ This decorator respects the @project decorator and triggers the flow
1560
+ when upstream runs within the same namespace complete successfully
1561
+
1562
+ Additionally, you can specify project aware upstream flow dependencies
1563
+ by specifying the fully qualified project_flow_name.
1564
+ ```
1565
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1566
+ ```
1567
+ or
1568
+ ```
1569
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1570
+ ```
1571
+
1572
+ You can also specify just the project or project branch (other values will be
1573
+ inferred from the current project or project branch):
1574
+ ```
1575
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1576
+ ```
1577
+
1578
+ Note that `branch` is typically one of:
1579
+ - `prod`
1580
+ - `user.bob`
1581
+ - `test.my_experiment`
1582
+ - `prod.staging`
1583
+
1584
+
1585
+ Parameters
1586
+ ----------
1587
+ flow : Union[str, Dict[str, str]], optional, default None
1588
+ Upstream flow dependency for this flow.
1589
+ flows : List[Union[str, Dict[str, str]]], default []
1590
+ Upstream flow dependencies for this flow.
1591
+ options : Dict[str, Any], default {}
1592
+ Backend-specific configuration for tuning eventing behavior.
1593
+ """
1594
+ ...
1595
+
1596
+ def with_artifact_store(f: typing.Optional[typing.Type[FlowSpecDerived]] = None):
1597
+ """
1598
+ Allows setting external datastores to save data for the
1599
+ `@checkpoint`/`@model`/`@huggingface_hub` decorators.
1600
+
1601
+ This decorator is useful when users wish to save data to a different datastore
1602
+ than what is configured in Metaflow. This can be for variety of reasons:
1603
+
1604
+ 1. Data security: The objects needs to be stored in a bucket (object storage) that is not accessible by other flows.
1605
+ 2. Data Locality: The location where the task is executing is not located in the same region as the datastore.
1606
+ - Example: Metaflow datastore lives in US East, but the task is executing in Finland datacenters.
1607
+ 3. Data Lifecycle Policies: The objects need to be archived / managed separately from the Metaflow managed objects.
1608
+ - Example: Flow is training very large models that need to be stored separately and will be deleted more aggressively than the Metaflow managed objects.
1609
+
1610
+ Usage:
1611
+ ----------
1612
+
1613
+ - Using a custom IAM role to access the datastore.
1614
+
1615
+ ```python
1616
+ @with_artifact_store(
1617
+ type="s3",
1618
+ config=lambda: {
1619
+ "root": "s3://my-bucket-foo/path/to/root",
1620
+ "role_arn": ROLE,
1621
+ },
1622
+ )
1623
+ class MyFlow(FlowSpec):
1624
+
1625
+ @checkpoint
1626
+ @step
1627
+ def start(self):
1628
+ with open("my_file.txt", "w") as f:
1629
+ f.write("Hello, World!")
1630
+ self.external_bucket_checkpoint = current.checkpoint.save("my_file.txt")
1631
+ self.next(self.end)
1632
+
1633
+ ```
1634
+
1635
+ - Using credentials to access the s3-compatible datastore.
1636
+
1637
+ ```python
1638
+ @with_artifact_store(
1639
+ type="s3",
1640
+ config=lambda: {
1641
+ "root": "s3://my-bucket-foo/path/to/root",
1642
+ "client_params": {
1643
+ "aws_access_key_id": os.environ.get("MY_CUSTOM_ACCESS_KEY"),
1644
+ "aws_secret_access_key": os.environ.get("MY_CUSTOM_SECRET_KEY"),
1645
+ },
1646
+ },
1581
1647
  )
1582
1648
  class MyFlow(FlowSpec):
1583
1649
 
@@ -1641,46 +1707,44 @@ def with_artifact_store(f: typing.Optional[typing.Type[FlowSpecDerived]] = None)
1641
1707
  """
1642
1708
  ...
1643
1709
 
1644
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1710
+ @typing.overload
1711
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1645
1712
  """
1646
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1647
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1713
+ Specifies the PyPI packages for all steps of the flow.
1648
1714
 
1715
+ Use `@pypi_base` to set common packages required by all
1716
+ steps and use `@pypi` to specify step-specific overrides.
1649
1717
 
1650
1718
  Parameters
1651
1719
  ----------
1652
- timeout : int
1653
- Time, in seconds before the task times out and fails. (Default: 3600)
1654
- poke_interval : int
1655
- Time in seconds that the job should wait in between each try. (Default: 60)
1656
- mode : str
1657
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1658
- exponential_backoff : bool
1659
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1660
- pool : str
1661
- the slot pool this task should run in,
1662
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1663
- soft_fail : bool
1664
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1665
- name : str
1666
- Name of the sensor on Airflow
1667
- description : str
1668
- Description of sensor in the Airflow UI
1669
- external_dag_id : str
1670
- The dag_id that contains the task you want to wait for.
1671
- external_task_ids : List[str]
1672
- The list of task_ids that you want to wait for.
1673
- If None (default value) the sensor waits for the DAG. (Default: None)
1674
- allowed_states : List[str]
1675
- Iterable of allowed states, (Default: ['success'])
1676
- failed_states : List[str]
1677
- Iterable of failed or dis-allowed states. (Default: None)
1678
- execution_delta : datetime.timedelta
1679
- time difference with the previous execution to look at,
1680
- the default is the same logical date as the current task or DAG. (Default: None)
1681
- check_existence: bool
1682
- Set to True to check if the external task exists or check if
1683
- the DAG to wait for exists. (Default: True)
1720
+ packages : Dict[str, str], default: {}
1721
+ Packages to use for this flow. The key is the name of the package
1722
+ and the value is the version to use.
1723
+ python : str, optional, default: None
1724
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1725
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1726
+ """
1727
+ ...
1728
+
1729
+ @typing.overload
1730
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1731
+ ...
1732
+
1733
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1734
+ """
1735
+ Specifies the PyPI packages for all steps of the flow.
1736
+
1737
+ Use `@pypi_base` to set common packages required by all
1738
+ steps and use `@pypi` to specify step-specific overrides.
1739
+
1740
+ Parameters
1741
+ ----------
1742
+ packages : Dict[str, str], default: {}
1743
+ Packages to use for this flow. The key is the name of the package
1744
+ and the value is the version to use.
1745
+ python : str, optional, default: None
1746
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1747
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1684
1748
  """
1685
1749
  ...
1686
1750
 
@@ -1728,195 +1792,131 @@ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, expone
1728
1792
  ...
1729
1793
 
1730
1794
  @typing.overload
1731
- def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1732
- """
1733
- Specifies the flow(s) that this flow depends on.
1734
-
1735
- ```
1736
- @trigger_on_finish(flow='FooFlow')
1737
- ```
1738
- or
1739
- ```
1740
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1741
- ```
1742
- This decorator respects the @project decorator and triggers the flow
1743
- when upstream runs within the same namespace complete successfully
1744
-
1745
- Additionally, you can specify project aware upstream flow dependencies
1746
- by specifying the fully qualified project_flow_name.
1747
- ```
1748
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1749
- ```
1750
- or
1751
- ```
1752
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1753
- ```
1754
-
1755
- You can also specify just the project or project branch (other values will be
1756
- inferred from the current project or project branch):
1757
- ```
1758
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1759
- ```
1760
-
1761
- Note that `branch` is typically one of:
1762
- - `prod`
1763
- - `user.bob`
1764
- - `test.my_experiment`
1765
- - `prod.staging`
1766
-
1767
-
1768
- Parameters
1769
- ----------
1770
- flow : Union[str, Dict[str, str]], optional, default None
1771
- Upstream flow dependency for this flow.
1772
- flows : List[Union[str, Dict[str, str]]], default []
1773
- Upstream flow dependencies for this flow.
1774
- options : Dict[str, Any], default {}
1775
- Backend-specific configuration for tuning eventing behavior.
1776
- """
1777
- ...
1778
-
1779
- @typing.overload
1780
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1781
- ...
1782
-
1783
- def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1795
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1784
1796
  """
1785
- Specifies the flow(s) that this flow depends on.
1786
-
1787
- ```
1788
- @trigger_on_finish(flow='FooFlow')
1789
- ```
1790
- or
1791
- ```
1792
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1793
- ```
1794
- This decorator respects the @project decorator and triggers the flow
1795
- when upstream runs within the same namespace complete successfully
1796
-
1797
- Additionally, you can specify project aware upstream flow dependencies
1798
- by specifying the fully qualified project_flow_name.
1799
- ```
1800
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1801
- ```
1802
- or
1803
- ```
1804
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1805
- ```
1806
-
1807
- You can also specify just the project or project branch (other values will be
1808
- inferred from the current project or project branch):
1809
- ```
1810
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1811
- ```
1812
-
1813
- Note that `branch` is typically one of:
1814
- - `prod`
1815
- - `user.bob`
1816
- - `test.my_experiment`
1817
- - `prod.staging`
1818
-
1797
+ Specifies the Conda environment for all steps of the flow.
1819
1798
 
1820
- Parameters
1821
- ----------
1822
- flow : Union[str, Dict[str, str]], optional, default None
1823
- Upstream flow dependency for this flow.
1824
- flows : List[Union[str, Dict[str, str]]], default []
1825
- Upstream flow dependencies for this flow.
1826
- options : Dict[str, Any], default {}
1827
- Backend-specific configuration for tuning eventing behavior.
1828
- """
1829
- ...
1830
-
1831
- @typing.overload
1832
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1833
- """
1834
- Specifies the PyPI packages for all steps of the flow.
1799
+ Use `@conda_base` to set common libraries required by all
1800
+ steps and use `@conda` to specify step-specific additions.
1835
1801
 
1836
- Use `@pypi_base` to set common packages required by all
1837
- steps and use `@pypi` to specify step-specific overrides.
1838
1802
 
1839
1803
  Parameters
1840
1804
  ----------
1841
- packages : Dict[str, str], default: {}
1805
+ packages : Dict[str, str], default {}
1842
1806
  Packages to use for this flow. The key is the name of the package
1843
1807
  and the value is the version to use.
1844
- python : str, optional, default: None
1808
+ libraries : Dict[str, str], default {}
1809
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1810
+ python : str, optional, default None
1845
1811
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1846
1812
  that the version used will correspond to the version of the Python interpreter used to start the run.
1813
+ disabled : bool, default False
1814
+ If set to True, disables Conda.
1847
1815
  """
1848
1816
  ...
1849
1817
 
1850
1818
  @typing.overload
1851
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1819
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1852
1820
  ...
1853
1821
 
1854
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1822
+ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1855
1823
  """
1856
- Specifies the PyPI packages for all steps of the flow.
1824
+ Specifies the Conda environment for all steps of the flow.
1825
+
1826
+ Use `@conda_base` to set common libraries required by all
1827
+ steps and use `@conda` to specify step-specific additions.
1857
1828
 
1858
- Use `@pypi_base` to set common packages required by all
1859
- steps and use `@pypi` to specify step-specific overrides.
1860
1829
 
1861
1830
  Parameters
1862
1831
  ----------
1863
- packages : Dict[str, str], default: {}
1832
+ packages : Dict[str, str], default {}
1864
1833
  Packages to use for this flow. The key is the name of the package
1865
1834
  and the value is the version to use.
1866
- python : str, optional, default: None
1835
+ libraries : Dict[str, str], default {}
1836
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1837
+ python : str, optional, default None
1867
1838
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1868
1839
  that the version used will correspond to the version of the Python interpreter used to start the run.
1840
+ disabled : bool, default False
1841
+ If set to True, disables Conda.
1869
1842
  """
1870
1843
  ...
1871
1844
 
1872
- @typing.overload
1873
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1845
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1874
1846
  """
1875
- Specifies the times when the flow should be run when running on a
1876
- production scheduler.
1847
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1848
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1877
1849
 
1878
1850
 
1879
1851
  Parameters
1880
1852
  ----------
1881
- hourly : bool, default False
1882
- Run the workflow hourly.
1883
- daily : bool, default True
1884
- Run the workflow daily.
1885
- weekly : bool, default False
1886
- Run the workflow weekly.
1887
- cron : str, optional, default None
1888
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1889
- specified by this expression.
1890
- timezone : str, optional, default None
1891
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1892
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1853
+ timeout : int
1854
+ Time, in seconds before the task times out and fails. (Default: 3600)
1855
+ poke_interval : int
1856
+ Time in seconds that the job should wait in between each try. (Default: 60)
1857
+ mode : str
1858
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1859
+ exponential_backoff : bool
1860
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1861
+ pool : str
1862
+ the slot pool this task should run in,
1863
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1864
+ soft_fail : bool
1865
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1866
+ name : str
1867
+ Name of the sensor on Airflow
1868
+ description : str
1869
+ Description of sensor in the Airflow UI
1870
+ external_dag_id : str
1871
+ The dag_id that contains the task you want to wait for.
1872
+ external_task_ids : List[str]
1873
+ The list of task_ids that you want to wait for.
1874
+ If None (default value) the sensor waits for the DAG. (Default: None)
1875
+ allowed_states : List[str]
1876
+ Iterable of allowed states, (Default: ['success'])
1877
+ failed_states : List[str]
1878
+ Iterable of failed or dis-allowed states. (Default: None)
1879
+ execution_delta : datetime.timedelta
1880
+ time difference with the previous execution to look at,
1881
+ the default is the same logical date as the current task or DAG. (Default: None)
1882
+ check_existence: bool
1883
+ Set to True to check if the external task exists or check if
1884
+ the DAG to wait for exists. (Default: True)
1893
1885
  """
1894
1886
  ...
1895
1887
 
1896
- @typing.overload
1897
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1898
- ...
1899
-
1900
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1888
+ def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1901
1889
  """
1902
- Specifies the times when the flow should be run when running on a
1903
- production scheduler.
1890
+ Specifies what flows belong to the same project.
1891
+
1892
+ A project-specific namespace is created for all flows that
1893
+ use the same `@project(name)`.
1904
1894
 
1905
1895
 
1906
1896
  Parameters
1907
1897
  ----------
1908
- hourly : bool, default False
1909
- Run the workflow hourly.
1910
- daily : bool, default True
1911
- Run the workflow daily.
1912
- weekly : bool, default False
1913
- Run the workflow weekly.
1914
- cron : str, optional, default None
1915
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1916
- specified by this expression.
1917
- timezone : str, optional, default None
1918
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1919
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1898
+ name : str
1899
+ Project name. Make sure that the name is unique amongst all
1900
+ projects that use the same production scheduler. The name may
1901
+ contain only lowercase alphanumeric characters and underscores.
1902
+
1903
+ branch : Optional[str], default None
1904
+ The branch to use. If not specified, the branch is set to
1905
+ `user.<username>` unless `production` is set to `True`. This can
1906
+ also be set on the command line using `--branch` as a top-level option.
1907
+ It is an error to specify `branch` in the decorator and on the command line.
1908
+
1909
+ production : bool, default False
1910
+ Whether or not the branch is the production branch. This can also be set on the
1911
+ command line using `--production` as a top-level option. It is an error to specify
1912
+ `production` in the decorator and on the command line.
1913
+ The project branch name will be:
1914
+ - if `branch` is specified:
1915
+ - if `production` is True: `prod.<branch>`
1916
+ - if `production` is False: `test.<branch>`
1917
+ - if `branch` is not specified:
1918
+ - if `production` is True: `prod`
1919
+ - if `production` is False: `user.<username>`
1920
1920
  """
1921
1921
  ...
1922
1922