ob-metaflow-stubs 6.0.4.3__py2.py3-none-any.whl → 6.0.4.4rc0__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (260) hide show
  1. metaflow-stubs/__init__.pyi +780 -780
  2. metaflow-stubs/cards.pyi +1 -1
  3. metaflow-stubs/cli.pyi +1 -1
  4. metaflow-stubs/cli_components/__init__.pyi +1 -1
  5. metaflow-stubs/cli_components/utils.pyi +1 -1
  6. metaflow-stubs/client/__init__.pyi +1 -1
  7. metaflow-stubs/client/core.pyi +4 -4
  8. metaflow-stubs/client/filecache.pyi +1 -1
  9. metaflow-stubs/events.pyi +2 -2
  10. metaflow-stubs/exception.pyi +1 -1
  11. metaflow-stubs/flowspec.pyi +3 -3
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +3 -3
  14. metaflow-stubs/meta_files.pyi +1 -1
  15. metaflow-stubs/metadata_provider/__init__.pyi +1 -1
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +1 -1
  17. metaflow-stubs/metadata_provider/metadata.pyi +1 -1
  18. metaflow-stubs/metadata_provider/util.pyi +1 -1
  19. metaflow-stubs/metaflow_config.pyi +1 -1
  20. metaflow-stubs/metaflow_current.pyi +64 -64
  21. metaflow-stubs/metaflow_git.pyi +1 -1
  22. metaflow-stubs/mf_extensions/__init__.pyi +1 -1
  23. metaflow-stubs/mf_extensions/obcheckpoint/__init__.pyi +1 -1
  24. metaflow-stubs/mf_extensions/obcheckpoint/plugins/__init__.pyi +1 -1
  25. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/__init__.pyi +1 -1
  26. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/__init__.pyi +1 -1
  27. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/async_cards.pyi +1 -1
  28. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/deco_injection_mixin.pyi +1 -1
  29. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/extra_components.pyi +1 -1
  30. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/__init__.pyi +1 -1
  31. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/__init__.pyi +1 -1
  32. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/checkpoint_lister.pyi +2 -2
  33. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/lineage_card.pyi +1 -1
  34. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/checkpoint_storage.pyi +1 -1
  35. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/constructors.pyi +1 -1
  36. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/core.pyi +3 -3
  37. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/decorator.pyi +4 -4
  38. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/exceptions.pyi +1 -1
  39. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/final_api.pyi +2 -2
  40. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/lineage.pyi +1 -1
  41. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/__init__.pyi +1 -1
  42. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/context.pyi +1 -1
  43. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/core.pyi +1 -1
  44. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/decorator.pyi +1 -1
  45. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/exceptions.pyi +1 -1
  46. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/task_utils.pyi +1 -1
  47. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/utils.pyi +1 -1
  48. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastructures.pyi +1 -1
  49. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/exceptions.pyi +1 -1
  50. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/__init__.pyi +1 -1
  51. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/decorator.pyi +1 -1
  52. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/__init__.pyi +1 -1
  53. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/core.pyi +2 -2
  54. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/exceptions.pyi +1 -1
  55. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/model_storage.pyi +1 -1
  56. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/__init__.pyi +1 -1
  57. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/flowspec_utils.pyi +1 -1
  58. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/general.pyi +1 -1
  59. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/identity_utils.pyi +1 -1
  60. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/__init__.pyi +1 -1
  61. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/base.pyi +1 -1
  62. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/tar.pyi +1 -1
  63. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/tar_utils.pyi +1 -1
  64. metaflow-stubs/mf_extensions/outerbounds/__init__.pyi +1 -1
  65. metaflow-stubs/mf_extensions/outerbounds/plugins/__init__.pyi +1 -1
  66. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/__init__.pyi +1 -1
  67. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/__init__.pyi +1 -1
  68. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_state_machine.pyi +1 -1
  69. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/__init__.pyi +1 -1
  70. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/spinner/__init__.pyi +1 -1
  71. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/spinner/spinners.pyi +1 -1
  72. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/app_cli.pyi +2 -2
  73. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/app_config.pyi +1 -1
  74. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/capsule.pyi +3 -3
  75. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/click_importer.pyi +1 -1
  76. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/code_package/__init__.pyi +1 -1
  77. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/code_package/code_packager.pyi +2 -2
  78. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/__init__.pyi +1 -1
  79. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/cli_generator.pyi +1 -1
  80. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/config_utils.pyi +2 -2
  81. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/schema_export.pyi +1 -1
  82. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/typed_configs.pyi +2 -2
  83. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/unified_config.pyi +1 -1
  84. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/dependencies.pyi +2 -2
  85. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/deployer.pyi +4 -4
  86. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/experimental/__init__.pyi +1 -1
  87. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/perimeters.pyi +1 -1
  88. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/utils.pyi +2 -2
  89. metaflow-stubs/mf_extensions/outerbounds/plugins/aws/__init__.pyi +1 -1
  90. metaflow-stubs/mf_extensions/outerbounds/plugins/aws/assume_role_decorator.pyi +2 -2
  91. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/__init__.pyi +1 -1
  92. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/async_cards.pyi +2 -2
  93. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/injector.pyi +1 -1
  94. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/__init__.pyi +1 -1
  95. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/coreweave.pyi +2 -2
  96. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/nebius.pyi +2 -2
  97. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/__init__.pyi +1 -1
  98. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/baker.pyi +3 -3
  99. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/docker_environment.pyi +1 -1
  100. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/fast_bakery.pyi +1 -1
  101. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/__init__.pyi +1 -1
  102. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/pod_killer.pyi +1 -1
  103. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/__init__.pyi +1 -1
  104. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/constants.pyi +1 -1
  105. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/exceptions.pyi +1 -1
  106. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/ollama.pyi +1 -1
  107. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/status_card.pyi +1 -1
  108. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/__init__.pyi +1 -1
  109. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/snowflake.pyi +1 -1
  110. metaflow-stubs/mf_extensions/outerbounds/profilers/__init__.pyi +1 -1
  111. metaflow-stubs/mf_extensions/outerbounds/profilers/gpu.pyi +1 -1
  112. metaflow-stubs/mf_extensions/outerbounds/remote_config.pyi +1 -1
  113. metaflow-stubs/mf_extensions/outerbounds/toplevel/__init__.pyi +1 -1
  114. metaflow-stubs/mf_extensions/outerbounds/toplevel/global_aliases_for_metaflow_package.pyi +1 -1
  115. metaflow-stubs/multicore_utils.pyi +1 -1
  116. metaflow-stubs/ob_internal.pyi +1 -1
  117. metaflow-stubs/packaging_sys/__init__.pyi +5 -5
  118. metaflow-stubs/packaging_sys/backend.pyi +2 -2
  119. metaflow-stubs/packaging_sys/distribution_support.pyi +2 -2
  120. metaflow-stubs/packaging_sys/tar_backend.pyi +4 -4
  121. metaflow-stubs/packaging_sys/utils.pyi +1 -1
  122. metaflow-stubs/packaging_sys/v1.pyi +1 -1
  123. metaflow-stubs/parameters.pyi +3 -3
  124. metaflow-stubs/plugins/__init__.pyi +9 -9
  125. metaflow-stubs/plugins/airflow/__init__.pyi +1 -1
  126. metaflow-stubs/plugins/airflow/airflow_utils.pyi +1 -1
  127. metaflow-stubs/plugins/airflow/exception.pyi +1 -1
  128. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +1 -1
  129. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +1 -1
  130. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +1 -1
  131. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +1 -1
  132. metaflow-stubs/plugins/argo/__init__.pyi +1 -1
  133. metaflow-stubs/plugins/argo/argo_client.pyi +1 -1
  134. metaflow-stubs/plugins/argo/argo_events.pyi +1 -1
  135. metaflow-stubs/plugins/argo/argo_workflows.pyi +2 -2
  136. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +2 -2
  137. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +1 -1
  138. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +1 -1
  139. metaflow-stubs/plugins/argo/exit_hooks.pyi +1 -1
  140. metaflow-stubs/plugins/aws/__init__.pyi +1 -1
  141. metaflow-stubs/plugins/aws/aws_client.pyi +1 -1
  142. metaflow-stubs/plugins/aws/aws_utils.pyi +1 -1
  143. metaflow-stubs/plugins/aws/batch/__init__.pyi +1 -1
  144. metaflow-stubs/plugins/aws/batch/batch.pyi +1 -1
  145. metaflow-stubs/plugins/aws/batch/batch_client.pyi +1 -1
  146. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +1 -1
  147. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +1 -1
  148. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +2 -2
  149. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +1 -1
  150. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +1 -1
  151. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +1 -1
  152. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +1 -1
  153. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +1 -1
  154. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +2 -2
  155. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +2 -2
  156. metaflow-stubs/plugins/azure/__init__.pyi +1 -1
  157. metaflow-stubs/plugins/azure/azure_credential.pyi +1 -1
  158. metaflow-stubs/plugins/azure/azure_exceptions.pyi +1 -1
  159. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +2 -2
  160. metaflow-stubs/plugins/azure/azure_utils.pyi +1 -1
  161. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +1 -1
  162. metaflow-stubs/plugins/azure/includefile_support.pyi +1 -1
  163. metaflow-stubs/plugins/cards/__init__.pyi +5 -5
  164. metaflow-stubs/plugins/cards/card_client.pyi +2 -2
  165. metaflow-stubs/plugins/cards/card_creator.pyi +1 -1
  166. metaflow-stubs/plugins/cards/card_datastore.pyi +1 -1
  167. metaflow-stubs/plugins/cards/card_decorator.pyi +1 -1
  168. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +1 -1
  169. metaflow-stubs/plugins/cards/card_modules/basic.pyi +1 -1
  170. metaflow-stubs/plugins/cards/card_modules/card.pyi +1 -1
  171. metaflow-stubs/plugins/cards/card_modules/components.pyi +2 -2
  172. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +1 -1
  173. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +1 -1
  174. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +1 -1
  175. metaflow-stubs/plugins/cards/card_resolver.pyi +1 -1
  176. metaflow-stubs/plugins/cards/component_serializer.pyi +1 -1
  177. metaflow-stubs/plugins/cards/exception.pyi +1 -1
  178. metaflow-stubs/plugins/catch_decorator.pyi +2 -2
  179. metaflow-stubs/plugins/datatools/__init__.pyi +1 -1
  180. metaflow-stubs/plugins/datatools/local.pyi +1 -1
  181. metaflow-stubs/plugins/datatools/s3/__init__.pyi +1 -1
  182. metaflow-stubs/plugins/datatools/s3/s3.pyi +2 -2
  183. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +1 -1
  184. metaflow-stubs/plugins/datatools/s3/s3util.pyi +1 -1
  185. metaflow-stubs/plugins/debug_logger.pyi +1 -1
  186. metaflow-stubs/plugins/debug_monitor.pyi +1 -1
  187. metaflow-stubs/plugins/environment_decorator.pyi +1 -1
  188. metaflow-stubs/plugins/events_decorator.pyi +1 -1
  189. metaflow-stubs/plugins/exit_hook/__init__.pyi +1 -1
  190. metaflow-stubs/plugins/exit_hook/exit_hook_decorator.pyi +1 -1
  191. metaflow-stubs/plugins/frameworks/__init__.pyi +1 -1
  192. metaflow-stubs/plugins/frameworks/pytorch.pyi +1 -1
  193. metaflow-stubs/plugins/gcp/__init__.pyi +1 -1
  194. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +2 -2
  195. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +1 -1
  196. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +1 -1
  197. metaflow-stubs/plugins/gcp/gs_utils.pyi +1 -1
  198. metaflow-stubs/plugins/gcp/includefile_support.pyi +1 -1
  199. metaflow-stubs/plugins/kubernetes/__init__.pyi +1 -1
  200. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +1 -1
  201. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +1 -1
  202. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +1 -1
  203. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +1 -1
  204. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +1 -1
  205. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +1 -1
  206. metaflow-stubs/plugins/ollama/__init__.pyi +2 -2
  207. metaflow-stubs/plugins/parallel_decorator.pyi +1 -1
  208. metaflow-stubs/plugins/perimeters.pyi +1 -1
  209. metaflow-stubs/plugins/project_decorator.pyi +1 -1
  210. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  211. metaflow-stubs/plugins/pypi/conda_decorator.pyi +1 -1
  212. metaflow-stubs/plugins/pypi/conda_environment.pyi +3 -3
  213. metaflow-stubs/plugins/pypi/parsers.pyi +1 -1
  214. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +1 -1
  215. metaflow-stubs/plugins/pypi/pypi_environment.pyi +1 -1
  216. metaflow-stubs/plugins/pypi/utils.pyi +1 -1
  217. metaflow-stubs/plugins/resources_decorator.pyi +1 -1
  218. metaflow-stubs/plugins/retry_decorator.pyi +1 -1
  219. metaflow-stubs/plugins/secrets/__init__.pyi +1 -1
  220. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +2 -2
  221. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +1 -1
  222. metaflow-stubs/plugins/secrets/secrets_func.pyi +1 -1
  223. metaflow-stubs/plugins/secrets/secrets_spec.pyi +1 -1
  224. metaflow-stubs/plugins/secrets/utils.pyi +1 -1
  225. metaflow-stubs/plugins/snowflake/__init__.pyi +1 -1
  226. metaflow-stubs/plugins/storage_executor.pyi +1 -1
  227. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +1 -1
  228. metaflow-stubs/plugins/timeout_decorator.pyi +2 -2
  229. metaflow-stubs/plugins/torchtune/__init__.pyi +1 -1
  230. metaflow-stubs/plugins/uv/__init__.pyi +1 -1
  231. metaflow-stubs/plugins/uv/uv_environment.pyi +1 -1
  232. metaflow-stubs/profilers/__init__.pyi +1 -1
  233. metaflow-stubs/pylint_wrapper.pyi +1 -1
  234. metaflow-stubs/runner/__init__.pyi +1 -1
  235. metaflow-stubs/runner/deployer.pyi +4 -4
  236. metaflow-stubs/runner/deployer_impl.pyi +2 -2
  237. metaflow-stubs/runner/metaflow_runner.pyi +2 -2
  238. metaflow-stubs/runner/nbdeploy.pyi +1 -1
  239. metaflow-stubs/runner/nbrun.pyi +1 -1
  240. metaflow-stubs/runner/subprocess_manager.pyi +1 -1
  241. metaflow-stubs/runner/utils.pyi +2 -2
  242. metaflow-stubs/system/__init__.pyi +1 -1
  243. metaflow-stubs/system/system_logger.pyi +2 -2
  244. metaflow-stubs/system/system_monitor.pyi +1 -1
  245. metaflow-stubs/tagging_util.pyi +1 -1
  246. metaflow-stubs/tuple_util.pyi +1 -1
  247. metaflow-stubs/user_configs/__init__.pyi +1 -1
  248. metaflow-stubs/user_configs/config_options.pyi +2 -2
  249. metaflow-stubs/user_configs/config_parameters.pyi +4 -4
  250. metaflow-stubs/user_decorators/__init__.pyi +1 -1
  251. metaflow-stubs/user_decorators/common.pyi +1 -1
  252. metaflow-stubs/user_decorators/mutable_flow.pyi +4 -4
  253. metaflow-stubs/user_decorators/mutable_step.pyi +3 -3
  254. metaflow-stubs/user_decorators/user_flow_decorator.pyi +3 -3
  255. metaflow-stubs/user_decorators/user_step_decorator.pyi +4 -4
  256. {ob_metaflow_stubs-6.0.4.3.dist-info → ob_metaflow_stubs-6.0.4.4rc0.dist-info}/METADATA +1 -1
  257. ob_metaflow_stubs-6.0.4.4rc0.dist-info/RECORD +260 -0
  258. ob_metaflow_stubs-6.0.4.3.dist-info/RECORD +0 -260
  259. {ob_metaflow_stubs-6.0.4.3.dist-info → ob_metaflow_stubs-6.0.4.4rc0.dist-info}/WHEEL +0 -0
  260. {ob_metaflow_stubs-6.0.4.3.dist-info → ob_metaflow_stubs-6.0.4.4rc0.dist-info}/top_level.txt +0 -0
@@ -1,7 +1,7 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
3
  # MF version: 2.16.1.1+obcheckpoint(0.2.4);ob(v1) #
4
- # Generated on 2025-07-15T20:32:21.659179 #
4
+ # Generated on 2025-07-15T21:03:18.366830 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
@@ -39,17 +39,17 @@ from .user_decorators.user_step_decorator import UserStepDecorator as UserStepDe
39
39
  from .user_decorators.user_step_decorator import StepMutator as StepMutator
40
40
  from .user_decorators.user_step_decorator import user_step_decorator as user_step_decorator
41
41
  from .user_decorators.user_flow_decorator import FlowMutator as FlowMutator
42
- from . import tuple_util as tuple_util
43
42
  from . import cards as cards
44
43
  from . import metaflow_git as metaflow_git
44
+ from . import tuple_util as tuple_util
45
45
  from . import events as events
46
46
  from . import runner as runner
47
47
  from . import plugins as plugins
48
48
  from .mf_extensions.outerbounds.toplevel.global_aliases_for_metaflow_package import S3 as S3
49
49
  from . import includefile as includefile
50
50
  from .includefile import IncludeFile as IncludeFile
51
- from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
52
51
  from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
52
+ from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
53
53
  from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
54
54
  from . import client as client
55
55
  from .client.core import namespace as namespace
@@ -162,267 +162,358 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
162
162
  """
163
163
  ...
164
164
 
165
- @typing.overload
166
- def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
165
+ def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy: str, force_cache_update: bool, debug: bool, circuit_breaker_config: dict, timeout_config: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
167
166
  """
168
- Creates a human-readable report, a Metaflow Card, after this step completes.
167
+ This decorator is used to run Ollama APIs as Metaflow task sidecars.
169
168
 
170
- Note that you may add multiple `@card` decorators in a step with different parameters.
169
+ User code call
170
+ --------------
171
+ @ollama(
172
+ models=[...],
173
+ ...
174
+ )
175
+
176
+ Valid backend options
177
+ ---------------------
178
+ - 'local': Run as a separate process on the local task machine.
179
+ - (TODO) 'managed': Outerbounds hosts and selects compute provider.
180
+ - (TODO) 'remote': Spin up separate instance to serve Ollama models.
181
+
182
+ Valid model options
183
+ -------------------
184
+ Any model here https://ollama.com/search, e.g. 'llama3.2', 'llama3.3'
171
185
 
172
186
 
173
187
  Parameters
174
188
  ----------
175
- type : str, default 'default'
176
- Card type.
177
- id : str, optional, default None
178
- If multiple cards are present, use this id to identify this card.
179
- options : Dict[str, Any], default {}
180
- Options passed to the card. The contents depend on the card type.
181
- timeout : int, default 45
182
- Interrupt reporting if it takes more than this many seconds.
189
+ models: list[str]
190
+ List of Ollama containers running models in sidecars.
191
+ backend: str
192
+ Determines where and how to run the Ollama process.
193
+ force_pull: bool
194
+ Whether to run `ollama pull` no matter what, or first check the remote cache in Metaflow datastore for this model key.
195
+ cache_update_policy: str
196
+ Cache update policy: "auto", "force", or "never".
197
+ force_cache_update: bool
198
+ Simple override for "force" cache update policy.
199
+ debug: bool
200
+ Whether to turn on verbose debugging logs.
201
+ circuit_breaker_config: dict
202
+ Configuration for circuit breaker protection. Keys: failure_threshold, recovery_timeout, reset_timeout.
203
+ timeout_config: dict
204
+ Configuration for various operation timeouts. Keys: pull, stop, health_check, install, server_startup.
183
205
  """
184
206
  ...
185
207
 
186
208
  @typing.overload
187
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
188
- ...
189
-
190
- @typing.overload
191
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
192
- ...
193
-
194
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
209
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
195
210
  """
196
- Creates a human-readable report, a Metaflow Card, after this step completes.
211
+ Specifies the number of times the task corresponding
212
+ to a step needs to be retried.
197
213
 
198
- Note that you may add multiple `@card` decorators in a step with different parameters.
214
+ This decorator is useful for handling transient errors, such as networking issues.
215
+ If your task contains operations that can't be retried safely, e.g. database updates,
216
+ it is advisable to annotate it with `@retry(times=0)`.
217
+
218
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
219
+ decorator will execute a no-op task after all retries have been exhausted,
220
+ ensuring that the flow execution can continue.
199
221
 
200
222
 
201
223
  Parameters
202
224
  ----------
203
- type : str, default 'default'
204
- Card type.
205
- id : str, optional, default None
206
- If multiple cards are present, use this id to identify this card.
207
- options : Dict[str, Any], default {}
208
- Options passed to the card. The contents depend on the card type.
209
- timeout : int, default 45
210
- Interrupt reporting if it takes more than this many seconds.
225
+ times : int, default 3
226
+ Number of times to retry this task.
227
+ minutes_between_retries : int, default 2
228
+ Number of minutes between retries.
211
229
  """
212
230
  ...
213
231
 
214
232
  @typing.overload
215
- def app_deploy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
216
- """
217
- Decorator prototype for all step decorators. This function gets specialized
218
- and imported for all decorators types by _import_plugin_decorators().
219
- """
233
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
220
234
  ...
221
235
 
222
236
  @typing.overload
223
- def app_deploy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
237
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
224
238
  ...
225
239
 
226
- def app_deploy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
240
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
227
241
  """
228
- Decorator prototype for all step decorators. This function gets specialized
229
- and imported for all decorators types by _import_plugin_decorators().
242
+ Specifies the number of times the task corresponding
243
+ to a step needs to be retried.
244
+
245
+ This decorator is useful for handling transient errors, such as networking issues.
246
+ If your task contains operations that can't be retried safely, e.g. database updates,
247
+ it is advisable to annotate it with `@retry(times=0)`.
248
+
249
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
250
+ decorator will execute a no-op task after all retries have been exhausted,
251
+ ensuring that the flow execution can continue.
252
+
253
+
254
+ Parameters
255
+ ----------
256
+ times : int, default 3
257
+ Number of times to retry this task.
258
+ minutes_between_retries : int, default 2
259
+ Number of minutes between retries.
230
260
  """
231
261
  ...
232
262
 
233
- def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
263
+ def vllm(*, model: str, backend: str, openai_api_server: bool, debug: bool, card_refresh_interval: int, max_retries: int, retry_alert_frequency: int, engine_args: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
234
264
  """
235
- Specifies that this step should execute on DGX cloud.
265
+ This decorator is used to run vllm APIs as Metaflow task sidecars.
266
+
267
+ User code call
268
+ --------------
269
+ @vllm(
270
+ model="...",
271
+ ...
272
+ )
273
+
274
+ Valid backend options
275
+ ---------------------
276
+ - 'local': Run as a separate process on the local task machine.
277
+
278
+ Valid model options
279
+ -------------------
280
+ Any HuggingFace model identifier, e.g. 'meta-llama/Llama-3.2-1B'
281
+
282
+ NOTE: vLLM's OpenAI-compatible server serves ONE model per server instance.
283
+ If you need multiple models, you must create multiple @vllm decorators.
236
284
 
237
285
 
238
286
  Parameters
239
287
  ----------
240
- gpu : int
241
- Number of GPUs to use.
242
- gpu_type : str
243
- Type of Nvidia GPU to use.
244
- queue_timeout : int
245
- Time to keep the job in NVCF's queue.
288
+ model: str
289
+ HuggingFace model identifier to be served by vLLM.
290
+ backend: str
291
+ Determines where and how to run the vLLM process.
292
+ openai_api_server: bool
293
+ Whether to use OpenAI-compatible API server mode (subprocess) instead of native engine.
294
+ Default is False (uses native engine).
295
+ Set to True for backward compatibility with existing code.
296
+ debug: bool
297
+ Whether to turn on verbose debugging logs.
298
+ card_refresh_interval: int
299
+ Interval in seconds for refreshing the vLLM status card.
300
+ Only used when openai_api_server=True.
301
+ max_retries: int
302
+ Maximum number of retries checking for vLLM server startup.
303
+ Only used when openai_api_server=True.
304
+ retry_alert_frequency: int
305
+ Frequency of alert logs for vLLM server startup retries.
306
+ Only used when openai_api_server=True.
307
+ engine_args : dict
308
+ Additional keyword arguments to pass to the vLLM engine.
309
+ For example, `tensor_parallel_size=2`.
246
310
  """
247
311
  ...
248
312
 
249
313
  @typing.overload
250
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
314
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
251
315
  """
252
- Specifies environment variables to be set prior to the execution of a step.
316
+ Specifies the Conda environment for the step.
317
+
318
+ Information in this decorator will augment any
319
+ attributes set in the `@conda_base` flow-level decorator. Hence,
320
+ you can use `@conda_base` to set packages required by all
321
+ steps and use `@conda` to specify step-specific overrides.
253
322
 
254
323
 
255
324
  Parameters
256
325
  ----------
257
- vars : Dict[str, str], default {}
258
- Dictionary of environment variables to set.
326
+ packages : Dict[str, str], default {}
327
+ Packages to use for this step. The key is the name of the package
328
+ and the value is the version to use.
329
+ libraries : Dict[str, str], default {}
330
+ Supported for backward compatibility. When used with packages, packages will take precedence.
331
+ python : str, optional, default None
332
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
333
+ that the version used will correspond to the version of the Python interpreter used to start the run.
334
+ disabled : bool, default False
335
+ If set to True, disables @conda.
259
336
  """
260
337
  ...
261
338
 
262
339
  @typing.overload
263
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
340
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
264
341
  ...
265
342
 
266
343
  @typing.overload
267
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
344
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
268
345
  ...
269
346
 
270
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
347
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
271
348
  """
272
- Specifies environment variables to be set prior to the execution of a step.
349
+ Specifies the Conda environment for the step.
350
+
351
+ Information in this decorator will augment any
352
+ attributes set in the `@conda_base` flow-level decorator. Hence,
353
+ you can use `@conda_base` to set packages required by all
354
+ steps and use `@conda` to specify step-specific overrides.
273
355
 
274
356
 
275
357
  Parameters
276
358
  ----------
277
- vars : Dict[str, str], default {}
278
- Dictionary of environment variables to set.
359
+ packages : Dict[str, str], default {}
360
+ Packages to use for this step. The key is the name of the package
361
+ and the value is the version to use.
362
+ libraries : Dict[str, str], default {}
363
+ Supported for backward compatibility. When used with packages, packages will take precedence.
364
+ python : str, optional, default None
365
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
366
+ that the version used will correspond to the version of the Python interpreter used to start the run.
367
+ disabled : bool, default False
368
+ If set to True, disables @conda.
279
369
  """
280
370
  ...
281
371
 
282
372
  @typing.overload
283
- def checkpoint(*, load_policy: str = 'fresh', temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
373
+ def app_deploy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
284
374
  """
285
- Enables checkpointing for a step.
375
+ Decorator prototype for all step decorators. This function gets specialized
376
+ and imported for all decorators types by _import_plugin_decorators().
377
+ """
378
+ ...
379
+
380
+ @typing.overload
381
+ def app_deploy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
382
+ ...
383
+
384
+ def app_deploy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
385
+ """
386
+ Decorator prototype for all step decorators. This function gets specialized
387
+ and imported for all decorators types by _import_plugin_decorators().
388
+ """
389
+ ...
390
+
391
+ @typing.overload
392
+ def model(*, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
393
+ """
394
+ Enables loading / saving of models within a step.
286
395
 
287
396
  > Examples
288
-
289
- - Saving Checkpoints
290
-
397
+ - Saving Models
291
398
  ```python
292
- @checkpoint
399
+ @model
293
400
  @step
294
401
  def train(self):
295
- model = create_model(self.parameters, checkpoint_path = None)
296
- for i in range(self.epochs):
297
- # some training logic
298
- loss = model.train(self.dataset)
299
- if i % 10 == 0:
300
- model.save(
301
- current.checkpoint.directory,
302
- )
303
- # saves the contents of the `current.checkpoint.directory` as a checkpoint
304
- # and returns a reference dictionary to the checkpoint saved in the datastore
305
- self.latest_checkpoint = current.checkpoint.save(
306
- name="epoch_checkpoint",
307
- metadata={
308
- "epoch": i,
309
- "loss": loss,
310
- }
311
- )
312
- ```
313
-
314
- - Using Loaded Checkpoints
402
+ # current.model.save returns a dictionary reference to the model saved
403
+ self.my_model = current.model.save(
404
+ path_to_my_model,
405
+ label="my_model",
406
+ metadata={
407
+ "epochs": 10,
408
+ "batch-size": 32,
409
+ "learning-rate": 0.001,
410
+ }
411
+ )
412
+ self.next(self.test)
413
+
414
+ @model(load="my_model")
415
+ @step
416
+ def test(self):
417
+ # `current.model.loaded` returns a dictionary of the loaded models
418
+ # where the key is the name of the artifact and the value is the path to the model
419
+ print(os.listdir(current.model.loaded["my_model"]))
420
+ self.next(self.end)
421
+ ```
315
422
 
423
+ - Loading models
316
424
  ```python
317
- @retry(times=3)
318
- @checkpoint
319
425
  @step
320
426
  def train(self):
321
- # Assume that the task has restarted and the previous attempt of the task
322
- # saved a checkpoint
323
- checkpoint_path = None
324
- if current.checkpoint.is_loaded: # Check if a checkpoint is loaded
325
- print("Loaded checkpoint from the previous attempt")
326
- checkpoint_path = current.checkpoint.directory
327
-
328
- model = create_model(self.parameters, checkpoint_path = checkpoint_path)
329
- for i in range(self.epochs):
330
- ...
427
+ # current.model.load returns the path to the model loaded
428
+ checkpoint_path = current.model.load(
429
+ self.checkpoint_key,
430
+ )
431
+ model_path = current.model.load(
432
+ self.model,
433
+ )
434
+ self.next(self.test)
331
435
  ```
332
436
 
333
437
 
334
438
  Parameters
335
439
  ----------
336
- load_policy : str, default: "fresh"
337
- The policy for loading the checkpoint. The following policies are supported:
338
- - "eager": Loads the the latest available checkpoint within the namespace.
339
- With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
340
- will be loaded at the start of the task.
341
- - "none": Do not load any checkpoint
342
- - "fresh": Loads the lastest checkpoint created within the running Task.
343
- This mode helps loading checkpoints across various retry attempts of the same task.
344
- With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
345
- created within the task will be loaded when the task is retries execution on failure.
440
+ load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
441
+ Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
442
+ These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
443
+ If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
444
+ the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
445
+ If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
346
446
 
347
447
  temp_dir_root : str, default: None
348
- The root directory under which `current.checkpoint.directory` will be created.
448
+ The root directory under which `current.model.loaded` will store loaded models
349
449
  """
350
450
  ...
351
451
 
352
452
  @typing.overload
353
- def checkpoint(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
453
+ def model(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
354
454
  ...
355
455
 
356
456
  @typing.overload
357
- def checkpoint(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
457
+ def model(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
358
458
  ...
359
459
 
360
- def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load_policy: str = 'fresh', temp_dir_root: str = None):
460
+ def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None):
361
461
  """
362
- Enables checkpointing for a step.
462
+ Enables loading / saving of models within a step.
363
463
 
364
464
  > Examples
365
-
366
- - Saving Checkpoints
367
-
465
+ - Saving Models
368
466
  ```python
369
- @checkpoint
467
+ @model
370
468
  @step
371
469
  def train(self):
372
- model = create_model(self.parameters, checkpoint_path = None)
373
- for i in range(self.epochs):
374
- # some training logic
375
- loss = model.train(self.dataset)
376
- if i % 10 == 0:
377
- model.save(
378
- current.checkpoint.directory,
379
- )
380
- # saves the contents of the `current.checkpoint.directory` as a checkpoint
381
- # and returns a reference dictionary to the checkpoint saved in the datastore
382
- self.latest_checkpoint = current.checkpoint.save(
383
- name="epoch_checkpoint",
384
- metadata={
385
- "epoch": i,
386
- "loss": loss,
387
- }
388
- )
389
- ```
470
+ # current.model.save returns a dictionary reference to the model saved
471
+ self.my_model = current.model.save(
472
+ path_to_my_model,
473
+ label="my_model",
474
+ metadata={
475
+ "epochs": 10,
476
+ "batch-size": 32,
477
+ "learning-rate": 0.001,
478
+ }
479
+ )
480
+ self.next(self.test)
390
481
 
391
- - Using Loaded Checkpoints
482
+ @model(load="my_model")
483
+ @step
484
+ def test(self):
485
+ # `current.model.loaded` returns a dictionary of the loaded models
486
+ # where the key is the name of the artifact and the value is the path to the model
487
+ print(os.listdir(current.model.loaded["my_model"]))
488
+ self.next(self.end)
489
+ ```
392
490
 
491
+ - Loading models
393
492
  ```python
394
- @retry(times=3)
395
- @checkpoint
396
493
  @step
397
494
  def train(self):
398
- # Assume that the task has restarted and the previous attempt of the task
399
- # saved a checkpoint
400
- checkpoint_path = None
401
- if current.checkpoint.is_loaded: # Check if a checkpoint is loaded
402
- print("Loaded checkpoint from the previous attempt")
403
- checkpoint_path = current.checkpoint.directory
404
-
405
- model = create_model(self.parameters, checkpoint_path = checkpoint_path)
406
- for i in range(self.epochs):
407
- ...
495
+ # current.model.load returns the path to the model loaded
496
+ checkpoint_path = current.model.load(
497
+ self.checkpoint_key,
498
+ )
499
+ model_path = current.model.load(
500
+ self.model,
501
+ )
502
+ self.next(self.test)
408
503
  ```
409
504
 
410
505
 
411
506
  Parameters
412
507
  ----------
413
- load_policy : str, default: "fresh"
414
- The policy for loading the checkpoint. The following policies are supported:
415
- - "eager": Loads the the latest available checkpoint within the namespace.
416
- With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
417
- will be loaded at the start of the task.
418
- - "none": Do not load any checkpoint
419
- - "fresh": Loads the lastest checkpoint created within the running Task.
420
- This mode helps loading checkpoints across various retry attempts of the same task.
421
- With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
422
- created within the task will be loaded when the task is retries execution on failure.
508
+ load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
509
+ Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
510
+ These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
511
+ If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
512
+ the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
513
+ If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
423
514
 
424
515
  temp_dir_root : str, default: None
425
- The root directory under which `current.checkpoint.directory` will be created.
516
+ The root directory under which `current.model.loaded` will store loaded models
426
517
  """
427
518
  ...
428
519
 
@@ -477,6 +568,57 @@ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
477
568
  """
478
569
  ...
479
570
 
571
+ @typing.overload
572
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
573
+ """
574
+ Specifies the PyPI packages for the step.
575
+
576
+ Information in this decorator will augment any
577
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
578
+ you can use `@pypi_base` to set packages required by all
579
+ steps and use `@pypi` to specify step-specific overrides.
580
+
581
+
582
+ Parameters
583
+ ----------
584
+ packages : Dict[str, str], default: {}
585
+ Packages to use for this step. The key is the name of the package
586
+ and the value is the version to use.
587
+ python : str, optional, default: None
588
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
589
+ that the version used will correspond to the version of the Python interpreter used to start the run.
590
+ """
591
+ ...
592
+
593
+ @typing.overload
594
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
595
+ ...
596
+
597
+ @typing.overload
598
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
599
+ ...
600
+
601
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
602
+ """
603
+ Specifies the PyPI packages for the step.
604
+
605
+ Information in this decorator will augment any
606
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
607
+ you can use `@pypi_base` to set packages required by all
608
+ steps and use `@pypi` to specify step-specific overrides.
609
+
610
+
611
+ Parameters
612
+ ----------
613
+ packages : Dict[str, str], default: {}
614
+ Packages to use for this step. The key is the name of the package
615
+ and the value is the version to use.
616
+ python : str, optional, default: None
617
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
618
+ that the version used will correspond to the version of the Python interpreter used to start the run.
619
+ """
620
+ ...
621
+
480
622
  def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
481
623
  """
482
624
  Specifies that this step should execute on Kubernetes.
@@ -567,182 +709,256 @@ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: ty
567
709
  ...
568
710
 
569
711
  @typing.overload
570
- def model(*, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
712
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
571
713
  """
572
- Enables loading / saving of models within a step.
573
-
574
- > Examples
575
- - Saving Models
576
- ```python
577
- @model
578
- @step
579
- def train(self):
580
- # current.model.save returns a dictionary reference to the model saved
581
- self.my_model = current.model.save(
582
- path_to_my_model,
583
- label="my_model",
584
- metadata={
585
- "epochs": 10,
586
- "batch-size": 32,
587
- "learning-rate": 0.001,
588
- }
589
- )
590
- self.next(self.test)
714
+ Decorator prototype for all step decorators. This function gets specialized
715
+ and imported for all decorators types by _import_plugin_decorators().
716
+ """
717
+ ...
718
+
719
+ @typing.overload
720
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
721
+ ...
722
+
723
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
724
+ """
725
+ Decorator prototype for all step decorators. This function gets specialized
726
+ and imported for all decorators types by _import_plugin_decorators().
727
+ """
728
+ ...
729
+
730
+ def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
731
+ """
732
+ Specifies that this step should execute on DGX cloud.
591
733
 
592
- @model(load="my_model")
593
- @step
594
- def test(self):
595
- # `current.model.loaded` returns a dictionary of the loaded models
596
- # where the key is the name of the artifact and the value is the path to the model
597
- print(os.listdir(current.model.loaded["my_model"]))
598
- self.next(self.end)
599
- ```
600
734
 
601
- - Loading models
602
- ```python
603
- @step
604
- def train(self):
605
- # current.model.load returns the path to the model loaded
606
- checkpoint_path = current.model.load(
607
- self.checkpoint_key,
608
- )
609
- model_path = current.model.load(
610
- self.model,
611
- )
612
- self.next(self.test)
735
+ Parameters
736
+ ----------
737
+ gpu : int
738
+ Number of GPUs to use.
739
+ gpu_type : str
740
+ Type of Nvidia GPU to use.
741
+ queue_timeout : int
742
+ Time to keep the job in NVCF's queue.
743
+ """
744
+ ...
745
+
746
+ @typing.overload
747
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
748
+ """
749
+ Specifies environment variables to be set prior to the execution of a step.
750
+
751
+
752
+ Parameters
753
+ ----------
754
+ vars : Dict[str, str], default {}
755
+ Dictionary of environment variables to set.
756
+ """
757
+ ...
758
+
759
+ @typing.overload
760
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
761
+ ...
762
+
763
+ @typing.overload
764
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
765
+ ...
766
+
767
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
768
+ """
769
+ Specifies environment variables to be set prior to the execution of a step.
770
+
771
+
772
+ Parameters
773
+ ----------
774
+ vars : Dict[str, str], default {}
775
+ Dictionary of environment variables to set.
776
+ """
777
+ ...
778
+
779
+ @typing.overload
780
+ def checkpoint(*, load_policy: str = 'fresh', temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
781
+ """
782
+ Enables checkpointing for a step.
783
+
784
+ > Examples
785
+
786
+ - Saving Checkpoints
787
+
788
+ ```python
789
+ @checkpoint
790
+ @step
791
+ def train(self):
792
+ model = create_model(self.parameters, checkpoint_path = None)
793
+ for i in range(self.epochs):
794
+ # some training logic
795
+ loss = model.train(self.dataset)
796
+ if i % 10 == 0:
797
+ model.save(
798
+ current.checkpoint.directory,
799
+ )
800
+ # saves the contents of the `current.checkpoint.directory` as a checkpoint
801
+ # and returns a reference dictionary to the checkpoint saved in the datastore
802
+ self.latest_checkpoint = current.checkpoint.save(
803
+ name="epoch_checkpoint",
804
+ metadata={
805
+ "epoch": i,
806
+ "loss": loss,
807
+ }
808
+ )
809
+ ```
810
+
811
+ - Using Loaded Checkpoints
812
+
813
+ ```python
814
+ @retry(times=3)
815
+ @checkpoint
816
+ @step
817
+ def train(self):
818
+ # Assume that the task has restarted and the previous attempt of the task
819
+ # saved a checkpoint
820
+ checkpoint_path = None
821
+ if current.checkpoint.is_loaded: # Check if a checkpoint is loaded
822
+ print("Loaded checkpoint from the previous attempt")
823
+ checkpoint_path = current.checkpoint.directory
824
+
825
+ model = create_model(self.parameters, checkpoint_path = checkpoint_path)
826
+ for i in range(self.epochs):
827
+ ...
613
828
  ```
614
829
 
615
830
 
616
831
  Parameters
617
832
  ----------
618
- load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
619
- Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
620
- These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
621
- If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
622
- the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
623
- If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
833
+ load_policy : str, default: "fresh"
834
+ The policy for loading the checkpoint. The following policies are supported:
835
+ - "eager": Loads the the latest available checkpoint within the namespace.
836
+ With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
837
+ will be loaded at the start of the task.
838
+ - "none": Do not load any checkpoint
839
+ - "fresh": Loads the lastest checkpoint created within the running Task.
840
+ This mode helps loading checkpoints across various retry attempts of the same task.
841
+ With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
842
+ created within the task will be loaded when the task is retries execution on failure.
624
843
 
625
844
  temp_dir_root : str, default: None
626
- The root directory under which `current.model.loaded` will store loaded models
845
+ The root directory under which `current.checkpoint.directory` will be created.
627
846
  """
628
847
  ...
629
848
 
630
849
  @typing.overload
631
- def model(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
850
+ def checkpoint(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
632
851
  ...
633
852
 
634
853
  @typing.overload
635
- def model(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
854
+ def checkpoint(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
636
855
  ...
637
856
 
638
- def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None):
857
+ def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load_policy: str = 'fresh', temp_dir_root: str = None):
639
858
  """
640
- Enables loading / saving of models within a step.
859
+ Enables checkpointing for a step.
641
860
 
642
861
  > Examples
643
- - Saving Models
862
+
863
+ - Saving Checkpoints
864
+
644
865
  ```python
645
- @model
866
+ @checkpoint
646
867
  @step
647
868
  def train(self):
648
- # current.model.save returns a dictionary reference to the model saved
649
- self.my_model = current.model.save(
650
- path_to_my_model,
651
- label="my_model",
652
- metadata={
653
- "epochs": 10,
654
- "batch-size": 32,
655
- "learning-rate": 0.001,
656
- }
657
- )
658
- self.next(self.test)
659
-
660
- @model(load="my_model")
661
- @step
662
- def test(self):
663
- # `current.model.loaded` returns a dictionary of the loaded models
664
- # where the key is the name of the artifact and the value is the path to the model
665
- print(os.listdir(current.model.loaded["my_model"]))
666
- self.next(self.end)
869
+ model = create_model(self.parameters, checkpoint_path = None)
870
+ for i in range(self.epochs):
871
+ # some training logic
872
+ loss = model.train(self.dataset)
873
+ if i % 10 == 0:
874
+ model.save(
875
+ current.checkpoint.directory,
876
+ )
877
+ # saves the contents of the `current.checkpoint.directory` as a checkpoint
878
+ # and returns a reference dictionary to the checkpoint saved in the datastore
879
+ self.latest_checkpoint = current.checkpoint.save(
880
+ name="epoch_checkpoint",
881
+ metadata={
882
+ "epoch": i,
883
+ "loss": loss,
884
+ }
885
+ )
667
886
  ```
668
887
 
669
- - Loading models
888
+ - Using Loaded Checkpoints
889
+
670
890
  ```python
891
+ @retry(times=3)
892
+ @checkpoint
671
893
  @step
672
894
  def train(self):
673
- # current.model.load returns the path to the model loaded
674
- checkpoint_path = current.model.load(
675
- self.checkpoint_key,
676
- )
677
- model_path = current.model.load(
678
- self.model,
679
- )
680
- self.next(self.test)
895
+ # Assume that the task has restarted and the previous attempt of the task
896
+ # saved a checkpoint
897
+ checkpoint_path = None
898
+ if current.checkpoint.is_loaded: # Check if a checkpoint is loaded
899
+ print("Loaded checkpoint from the previous attempt")
900
+ checkpoint_path = current.checkpoint.directory
901
+
902
+ model = create_model(self.parameters, checkpoint_path = checkpoint_path)
903
+ for i in range(self.epochs):
904
+ ...
681
905
  ```
682
906
 
683
907
 
684
908
  Parameters
685
909
  ----------
686
- load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
687
- Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
688
- These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
689
- If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
690
- the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
691
- If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
910
+ load_policy : str, default: "fresh"
911
+ The policy for loading the checkpoint. The following policies are supported:
912
+ - "eager": Loads the the latest available checkpoint within the namespace.
913
+ With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
914
+ will be loaded at the start of the task.
915
+ - "none": Do not load any checkpoint
916
+ - "fresh": Loads the lastest checkpoint created within the running Task.
917
+ This mode helps loading checkpoints across various retry attempts of the same task.
918
+ With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
919
+ created within the task will be loaded when the task is retries execution on failure.
692
920
 
693
921
  temp_dir_root : str, default: None
694
- The root directory under which `current.model.loaded` will store loaded models
922
+ The root directory under which `current.checkpoint.directory` will be created.
695
923
  """
696
924
  ...
697
925
 
698
926
  @typing.overload
699
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
927
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
700
928
  """
701
- Specifies the PyPI packages for the step.
702
-
703
- Information in this decorator will augment any
704
- attributes set in the `@pyi_base` flow-level decorator. Hence,
705
- you can use `@pypi_base` to set packages required by all
706
- steps and use `@pypi` to specify step-specific overrides.
929
+ Specifies secrets to be retrieved and injected as environment variables prior to
930
+ the execution of a step.
707
931
 
708
932
 
709
933
  Parameters
710
934
  ----------
711
- packages : Dict[str, str], default: {}
712
- Packages to use for this step. The key is the name of the package
713
- and the value is the version to use.
714
- python : str, optional, default: None
715
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
716
- that the version used will correspond to the version of the Python interpreter used to start the run.
935
+ sources : List[Union[str, Dict[str, Any]]], default: []
936
+ List of secret specs, defining how the secrets are to be retrieved
937
+ role : str, optional, default: None
938
+ Role to use for fetching secrets
717
939
  """
718
940
  ...
719
941
 
720
942
  @typing.overload
721
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
943
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
722
944
  ...
723
945
 
724
946
  @typing.overload
725
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
947
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
726
948
  ...
727
949
 
728
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
950
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
729
951
  """
730
- Specifies the PyPI packages for the step.
731
-
732
- Information in this decorator will augment any
733
- attributes set in the `@pyi_base` flow-level decorator. Hence,
734
- you can use `@pypi_base` to set packages required by all
735
- steps and use `@pypi` to specify step-specific overrides.
952
+ Specifies secrets to be retrieved and injected as environment variables prior to
953
+ the execution of a step.
736
954
 
737
955
 
738
956
  Parameters
739
957
  ----------
740
- packages : Dict[str, str], default: {}
741
- Packages to use for this step. The key is the name of the package
742
- and the value is the version to use.
743
- python : str, optional, default: None
744
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
745
- that the version used will correspond to the version of the Python interpreter used to start the run.
958
+ sources : List[Union[str, Dict[str, Any]]], default: []
959
+ List of secret specs, defining how the secrets are to be retrieved
960
+ role : str, optional, default: None
961
+ Role to use for fetching secrets
746
962
  """
747
963
  ...
748
964
 
@@ -826,98 +1042,17 @@ def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.
826
1042
  """
827
1043
  ...
828
1044
 
829
- @typing.overload
830
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1045
+ def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
831
1046
  """
832
- Internal decorator to support Fast bakery
833
- """
834
- ...
835
-
836
- @typing.overload
837
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
838
- ...
839
-
840
- def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
841
- """
842
- Internal decorator to support Fast bakery
843
- """
844
- ...
845
-
846
- @typing.overload
847
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
848
- """
849
- Decorator prototype for all step decorators. This function gets specialized
850
- and imported for all decorators types by _import_plugin_decorators().
851
- """
852
- ...
853
-
854
- @typing.overload
855
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
856
- ...
857
-
858
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
859
- """
860
- Decorator prototype for all step decorators. This function gets specialized
861
- and imported for all decorators types by _import_plugin_decorators().
862
- """
863
- ...
864
-
865
- @typing.overload
866
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
867
- """
868
- Specifies the Conda environment for the step.
869
-
870
- Information in this decorator will augment any
871
- attributes set in the `@conda_base` flow-level decorator. Hence,
872
- you can use `@conda_base` to set packages required by all
873
- steps and use `@conda` to specify step-specific overrides.
874
-
875
-
876
- Parameters
877
- ----------
878
- packages : Dict[str, str], default {}
879
- Packages to use for this step. The key is the name of the package
880
- and the value is the version to use.
881
- libraries : Dict[str, str], default {}
882
- Supported for backward compatibility. When used with packages, packages will take precedence.
883
- python : str, optional, default None
884
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
885
- that the version used will correspond to the version of the Python interpreter used to start the run.
886
- disabled : bool, default False
887
- If set to True, disables @conda.
888
- """
889
- ...
890
-
891
- @typing.overload
892
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
893
- ...
894
-
895
- @typing.overload
896
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
897
- ...
898
-
899
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
900
- """
901
- Specifies the Conda environment for the step.
902
-
903
- Information in this decorator will augment any
904
- attributes set in the `@conda_base` flow-level decorator. Hence,
905
- you can use `@conda_base` to set packages required by all
906
- steps and use `@conda` to specify step-specific overrides.
1047
+ Specifies that this step should execute on DGX cloud.
907
1048
 
908
1049
 
909
1050
  Parameters
910
1051
  ----------
911
- packages : Dict[str, str], default {}
912
- Packages to use for this step. The key is the name of the package
913
- and the value is the version to use.
914
- libraries : Dict[str, str], default {}
915
- Supported for backward compatibility. When used with packages, packages will take precedence.
916
- python : str, optional, default None
917
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
918
- that the version used will correspond to the version of the Python interpreter used to start the run.
919
- disabled : bool, default False
920
- If set to True, disables @conda.
1052
+ gpu : int
1053
+ Number of GPUs to use.
1054
+ gpu_type : str
1055
+ Type of Nvidia GPU to use.
921
1056
  """
922
1057
  ...
923
1058
 
@@ -980,161 +1115,69 @@ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
980
1115
  """
981
1116
  ...
982
1117
 
983
- def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
984
- """
985
- Specifies that this step should execute on DGX cloud.
986
-
987
-
988
- Parameters
989
- ----------
990
- gpu : int
991
- Number of GPUs to use.
992
- gpu_type : str
993
- Type of Nvidia GPU to use.
994
- """
995
- ...
996
-
997
- def vllm(*, model: str, backend: str, openai_api_server: bool, debug: bool, card_refresh_interval: int, max_retries: int, retry_alert_frequency: int, engine_args: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
998
- """
999
- This decorator is used to run vllm APIs as Metaflow task sidecars.
1000
-
1001
- User code call
1002
- --------------
1003
- @vllm(
1004
- model="...",
1005
- ...
1006
- )
1007
-
1008
- Valid backend options
1009
- ---------------------
1010
- - 'local': Run as a separate process on the local task machine.
1011
-
1012
- Valid model options
1013
- -------------------
1014
- Any HuggingFace model identifier, e.g. 'meta-llama/Llama-3.2-1B'
1015
-
1016
- NOTE: vLLM's OpenAI-compatible server serves ONE model per server instance.
1017
- If you need multiple models, you must create multiple @vllm decorators.
1018
-
1019
-
1020
- Parameters
1021
- ----------
1022
- model: str
1023
- HuggingFace model identifier to be served by vLLM.
1024
- backend: str
1025
- Determines where and how to run the vLLM process.
1026
- openai_api_server: bool
1027
- Whether to use OpenAI-compatible API server mode (subprocess) instead of native engine.
1028
- Default is False (uses native engine).
1029
- Set to True for backward compatibility with existing code.
1030
- debug: bool
1031
- Whether to turn on verbose debugging logs.
1032
- card_refresh_interval: int
1033
- Interval in seconds for refreshing the vLLM status card.
1034
- Only used when openai_api_server=True.
1035
- max_retries: int
1036
- Maximum number of retries checking for vLLM server startup.
1037
- Only used when openai_api_server=True.
1038
- retry_alert_frequency: int
1039
- Frequency of alert logs for vLLM server startup retries.
1040
- Only used when openai_api_server=True.
1041
- engine_args : dict
1042
- Additional keyword arguments to pass to the vLLM engine.
1043
- For example, `tensor_parallel_size=2`.
1044
- """
1045
- ...
1046
-
1047
1118
  @typing.overload
1048
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1119
+ def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1049
1120
  """
1050
- Specifies the number of times the task corresponding
1051
- to a step needs to be retried.
1052
-
1053
- This decorator is useful for handling transient errors, such as networking issues.
1054
- If your task contains operations that can't be retried safely, e.g. database updates,
1055
- it is advisable to annotate it with `@retry(times=0)`.
1121
+ Creates a human-readable report, a Metaflow Card, after this step completes.
1056
1122
 
1057
- This can be used in conjunction with the `@catch` decorator. The `@catch`
1058
- decorator will execute a no-op task after all retries have been exhausted,
1059
- ensuring that the flow execution can continue.
1123
+ Note that you may add multiple `@card` decorators in a step with different parameters.
1060
1124
 
1061
1125
 
1062
1126
  Parameters
1063
1127
  ----------
1064
- times : int, default 3
1065
- Number of times to retry this task.
1066
- minutes_between_retries : int, default 2
1067
- Number of minutes between retries.
1128
+ type : str, default 'default'
1129
+ Card type.
1130
+ id : str, optional, default None
1131
+ If multiple cards are present, use this id to identify this card.
1132
+ options : Dict[str, Any], default {}
1133
+ Options passed to the card. The contents depend on the card type.
1134
+ timeout : int, default 45
1135
+ Interrupt reporting if it takes more than this many seconds.
1068
1136
  """
1069
1137
  ...
1070
1138
 
1071
1139
  @typing.overload
1072
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1140
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1073
1141
  ...
1074
1142
 
1075
1143
  @typing.overload
1076
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1144
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1077
1145
  ...
1078
1146
 
1079
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
1147
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
1080
1148
  """
1081
- Specifies the number of times the task corresponding
1082
- to a step needs to be retried.
1083
-
1084
- This decorator is useful for handling transient errors, such as networking issues.
1085
- If your task contains operations that can't be retried safely, e.g. database updates,
1086
- it is advisable to annotate it with `@retry(times=0)`.
1149
+ Creates a human-readable report, a Metaflow Card, after this step completes.
1087
1150
 
1088
- This can be used in conjunction with the `@catch` decorator. The `@catch`
1089
- decorator will execute a no-op task after all retries have been exhausted,
1090
- ensuring that the flow execution can continue.
1151
+ Note that you may add multiple `@card` decorators in a step with different parameters.
1091
1152
 
1092
1153
 
1093
1154
  Parameters
1094
1155
  ----------
1095
- times : int, default 3
1096
- Number of times to retry this task.
1097
- minutes_between_retries : int, default 2
1098
- Number of minutes between retries.
1156
+ type : str, default 'default'
1157
+ Card type.
1158
+ id : str, optional, default None
1159
+ If multiple cards are present, use this id to identify this card.
1160
+ options : Dict[str, Any], default {}
1161
+ Options passed to the card. The contents depend on the card type.
1162
+ timeout : int, default 45
1163
+ Interrupt reporting if it takes more than this many seconds.
1099
1164
  """
1100
1165
  ...
1101
1166
 
1102
1167
  @typing.overload
1103
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1168
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1104
1169
  """
1105
- Specifies secrets to be retrieved and injected as environment variables prior to
1106
- the execution of a step.
1107
-
1108
-
1109
- Parameters
1110
- ----------
1111
- sources : List[Union[str, Dict[str, Any]]], default: []
1112
- List of secret specs, defining how the secrets are to be retrieved
1113
- role : str, optional, default: None
1114
- Role to use for fetching secrets
1170
+ Internal decorator to support Fast bakery
1115
1171
  """
1116
1172
  ...
1117
1173
 
1118
1174
  @typing.overload
1119
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1120
- ...
1121
-
1122
- @typing.overload
1123
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1175
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1124
1176
  ...
1125
1177
 
1126
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
1178
+ def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1127
1179
  """
1128
- Specifies secrets to be retrieved and injected as environment variables prior to
1129
- the execution of a step.
1130
-
1131
-
1132
- Parameters
1133
- ----------
1134
- sources : List[Union[str, Dict[str, Any]]], default: []
1135
- List of secret specs, defining how the secrets are to be retrieved
1136
- role : str, optional, default: None
1137
- Role to use for fetching secrets
1180
+ Internal decorator to support Fast bakery
1138
1181
  """
1139
1182
  ...
1140
1183
 
@@ -1217,97 +1260,198 @@ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None]
1217
1260
  """
1218
1261
  ...
1219
1262
 
1220
- def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy: str, force_cache_update: bool, debug: bool, circuit_breaker_config: dict, timeout_config: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1263
+ @typing.overload
1264
+ def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1221
1265
  """
1222
- This decorator is used to run Ollama APIs as Metaflow task sidecars.
1223
-
1224
- User code call
1225
- --------------
1226
- @ollama(
1227
- models=[...],
1228
- ...
1229
- )
1266
+ Specifies the flow(s) that this flow depends on.
1230
1267
 
1231
- Valid backend options
1232
- ---------------------
1233
- - 'local': Run as a separate process on the local task machine.
1234
- - (TODO) 'managed': Outerbounds hosts and selects compute provider.
1235
- - (TODO) 'remote': Spin up separate instance to serve Ollama models.
1268
+ ```
1269
+ @trigger_on_finish(flow='FooFlow')
1270
+ ```
1271
+ or
1272
+ ```
1273
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1274
+ ```
1275
+ This decorator respects the @project decorator and triggers the flow
1276
+ when upstream runs within the same namespace complete successfully
1236
1277
 
1237
- Valid model options
1238
- -------------------
1239
- Any model here https://ollama.com/search, e.g. 'llama3.2', 'llama3.3'
1278
+ Additionally, you can specify project aware upstream flow dependencies
1279
+ by specifying the fully qualified project_flow_name.
1280
+ ```
1281
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1282
+ ```
1283
+ or
1284
+ ```
1285
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1286
+ ```
1287
+
1288
+ You can also specify just the project or project branch (other values will be
1289
+ inferred from the current project or project branch):
1290
+ ```
1291
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1292
+ ```
1293
+
1294
+ Note that `branch` is typically one of:
1295
+ - `prod`
1296
+ - `user.bob`
1297
+ - `test.my_experiment`
1298
+ - `prod.staging`
1240
1299
 
1241
1300
 
1242
1301
  Parameters
1243
1302
  ----------
1244
- models: list[str]
1245
- List of Ollama containers running models in sidecars.
1246
- backend: str
1247
- Determines where and how to run the Ollama process.
1248
- force_pull: bool
1249
- Whether to run `ollama pull` no matter what, or first check the remote cache in Metaflow datastore for this model key.
1250
- cache_update_policy: str
1251
- Cache update policy: "auto", "force", or "never".
1252
- force_cache_update: bool
1253
- Simple override for "force" cache update policy.
1254
- debug: bool
1255
- Whether to turn on verbose debugging logs.
1256
- circuit_breaker_config: dict
1257
- Configuration for circuit breaker protection. Keys: failure_threshold, recovery_timeout, reset_timeout.
1258
- timeout_config: dict
1259
- Configuration for various operation timeouts. Keys: pull, stop, health_check, install, server_startup.
1303
+ flow : Union[str, Dict[str, str]], optional, default None
1304
+ Upstream flow dependency for this flow.
1305
+ flows : List[Union[str, Dict[str, str]]], default []
1306
+ Upstream flow dependencies for this flow.
1307
+ options : Dict[str, Any], default {}
1308
+ Backend-specific configuration for tuning eventing behavior.
1260
1309
  """
1261
1310
  ...
1262
1311
 
1263
1312
  @typing.overload
1264
- def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1313
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1314
+ ...
1315
+
1316
+ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1265
1317
  """
1266
- Specifies the Conda environment for all steps of the flow.
1318
+ Specifies the flow(s) that this flow depends on.
1267
1319
 
1268
- Use `@conda_base` to set common libraries required by all
1269
- steps and use `@conda` to specify step-specific additions.
1320
+ ```
1321
+ @trigger_on_finish(flow='FooFlow')
1322
+ ```
1323
+ or
1324
+ ```
1325
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1326
+ ```
1327
+ This decorator respects the @project decorator and triggers the flow
1328
+ when upstream runs within the same namespace complete successfully
1329
+
1330
+ Additionally, you can specify project aware upstream flow dependencies
1331
+ by specifying the fully qualified project_flow_name.
1332
+ ```
1333
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1334
+ ```
1335
+ or
1336
+ ```
1337
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1338
+ ```
1339
+
1340
+ You can also specify just the project or project branch (other values will be
1341
+ inferred from the current project or project branch):
1342
+ ```
1343
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1344
+ ```
1345
+
1346
+ Note that `branch` is typically one of:
1347
+ - `prod`
1348
+ - `user.bob`
1349
+ - `test.my_experiment`
1350
+ - `prod.staging`
1270
1351
 
1271
1352
 
1272
1353
  Parameters
1273
1354
  ----------
1274
- packages : Dict[str, str], default {}
1275
- Packages to use for this flow. The key is the name of the package
1276
- and the value is the version to use.
1277
- libraries : Dict[str, str], default {}
1278
- Supported for backward compatibility. When used with packages, packages will take precedence.
1279
- python : str, optional, default None
1280
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1281
- that the version used will correspond to the version of the Python interpreter used to start the run.
1282
- disabled : bool, default False
1283
- If set to True, disables Conda.
1355
+ flow : Union[str, Dict[str, str]], optional, default None
1356
+ Upstream flow dependency for this flow.
1357
+ flows : List[Union[str, Dict[str, str]]], default []
1358
+ Upstream flow dependencies for this flow.
1359
+ options : Dict[str, Any], default {}
1360
+ Backend-specific configuration for tuning eventing behavior.
1284
1361
  """
1285
1362
  ...
1286
1363
 
1287
1364
  @typing.overload
1288
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1365
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1366
+ """
1367
+ Specifies the times when the flow should be run when running on a
1368
+ production scheduler.
1369
+
1370
+
1371
+ Parameters
1372
+ ----------
1373
+ hourly : bool, default False
1374
+ Run the workflow hourly.
1375
+ daily : bool, default True
1376
+ Run the workflow daily.
1377
+ weekly : bool, default False
1378
+ Run the workflow weekly.
1379
+ cron : str, optional, default None
1380
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1381
+ specified by this expression.
1382
+ timezone : str, optional, default None
1383
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1384
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1385
+ """
1289
1386
  ...
1290
1387
 
1291
- def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1388
+ @typing.overload
1389
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1390
+ ...
1391
+
1392
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1292
1393
  """
1293
- Specifies the Conda environment for all steps of the flow.
1394
+ Specifies the times when the flow should be run when running on a
1395
+ production scheduler.
1294
1396
 
1295
- Use `@conda_base` to set common libraries required by all
1296
- steps and use `@conda` to specify step-specific additions.
1397
+
1398
+ Parameters
1399
+ ----------
1400
+ hourly : bool, default False
1401
+ Run the workflow hourly.
1402
+ daily : bool, default True
1403
+ Run the workflow daily.
1404
+ weekly : bool, default False
1405
+ Run the workflow weekly.
1406
+ cron : str, optional, default None
1407
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1408
+ specified by this expression.
1409
+ timezone : str, optional, default None
1410
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1411
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1412
+ """
1413
+ ...
1414
+
1415
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1416
+ """
1417
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1418
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1419
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1420
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
1421
+ starts only after all sensors finish.
1297
1422
 
1298
1423
 
1299
1424
  Parameters
1300
1425
  ----------
1301
- packages : Dict[str, str], default {}
1302
- Packages to use for this flow. The key is the name of the package
1303
- and the value is the version to use.
1304
- libraries : Dict[str, str], default {}
1305
- Supported for backward compatibility. When used with packages, packages will take precedence.
1306
- python : str, optional, default None
1307
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1308
- that the version used will correspond to the version of the Python interpreter used to start the run.
1309
- disabled : bool, default False
1310
- If set to True, disables Conda.
1426
+ timeout : int
1427
+ Time, in seconds before the task times out and fails. (Default: 3600)
1428
+ poke_interval : int
1429
+ Time in seconds that the job should wait in between each try. (Default: 60)
1430
+ mode : str
1431
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1432
+ exponential_backoff : bool
1433
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1434
+ pool : str
1435
+ the slot pool this task should run in,
1436
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1437
+ soft_fail : bool
1438
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1439
+ name : str
1440
+ Name of the sensor on Airflow
1441
+ description : str
1442
+ Description of sensor in the Airflow UI
1443
+ bucket_key : Union[str, List[str]]
1444
+ The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1445
+ When it's specified as a full s3:// url, please leave `bucket_name` as None
1446
+ bucket_name : str
1447
+ Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1448
+ When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1449
+ wildcard_match : bool
1450
+ whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1451
+ aws_conn_id : str
1452
+ a reference to the s3 connection on Airflow. (Default: None)
1453
+ verify : bool
1454
+ Whether or not to verify SSL certificates for S3 connection. (Default: None)
1311
1455
  """
1312
1456
  ...
1313
1457
 
@@ -1403,76 +1547,25 @@ def with_artifact_store(f: typing.Optional[typing.Type[FlowSpecDerived]] = None)
1403
1547
  },
1404
1548
  }):
1405
1549
  load_model(
1406
- task.data.model_ref,
1407
- "test-models"
1408
- )
1409
- ```
1410
- Parameters:
1411
- ----------
1412
-
1413
- type: str
1414
- The type of the datastore. Can be one of 's3', 'gcs', 'azure' or any other supported metaflow Datastore.
1415
-
1416
- config: dict or Callable
1417
- Dictionary of configuration options for the datastore. The following keys are required:
1418
- - root: The root path in the datastore where the data will be saved. (needs to be in the format expected by the datastore)
1419
- - example: 's3://bucket-name/path/to/root'
1420
- - example: 'gs://bucket-name/path/to/root'
1421
- - example: 'https://myblockacc.blob.core.windows.net/metaflow/'
1422
- - role_arn (optional): AWS IAM role to access s3 bucket (only when `type` is 's3')
1423
- - session_vars (optional): AWS session variables to access s3 bucket (only when `type` is 's3')
1424
- - client_params (optional): AWS client parameters to access s3 bucket (only when `type` is 's3')
1425
- """
1426
- ...
1427
-
1428
- @typing.overload
1429
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1430
- """
1431
- Specifies the times when the flow should be run when running on a
1432
- production scheduler.
1433
-
1434
-
1435
- Parameters
1436
- ----------
1437
- hourly : bool, default False
1438
- Run the workflow hourly.
1439
- daily : bool, default True
1440
- Run the workflow daily.
1441
- weekly : bool, default False
1442
- Run the workflow weekly.
1443
- cron : str, optional, default None
1444
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1445
- specified by this expression.
1446
- timezone : str, optional, default None
1447
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1448
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1449
- """
1450
- ...
1451
-
1452
- @typing.overload
1453
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1454
- ...
1455
-
1456
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1457
- """
1458
- Specifies the times when the flow should be run when running on a
1459
- production scheduler.
1460
-
1461
-
1462
- Parameters
1463
- ----------
1464
- hourly : bool, default False
1465
- Run the workflow hourly.
1466
- daily : bool, default True
1467
- Run the workflow daily.
1468
- weekly : bool, default False
1469
- Run the workflow weekly.
1470
- cron : str, optional, default None
1471
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1472
- specified by this expression.
1473
- timezone : str, optional, default None
1474
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1475
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1550
+ task.data.model_ref,
1551
+ "test-models"
1552
+ )
1553
+ ```
1554
+ Parameters:
1555
+ ----------
1556
+
1557
+ type: str
1558
+ The type of the datastore. Can be one of 's3', 'gcs', 'azure' or any other supported metaflow Datastore.
1559
+
1560
+ config: dict or Callable
1561
+ Dictionary of configuration options for the datastore. The following keys are required:
1562
+ - root: The root path in the datastore where the data will be saved. (needs to be in the format expected by the datastore)
1563
+ - example: 's3://bucket-name/path/to/root'
1564
+ - example: 'gs://bucket-name/path/to/root'
1565
+ - example: 'https://myblockacc.blob.core.windows.net/metaflow/'
1566
+ - role_arn (optional): AWS IAM role to access s3 bucket (only when `type` is 's3')
1567
+ - session_vars (optional): AWS session variables to access s3 bucket (only when `type` is 's3')
1568
+ - client_params (optional): AWS client parameters to access s3 bucket (only when `type` is 's3')
1476
1569
  """
1477
1570
  ...
1478
1571
 
@@ -1552,6 +1645,49 @@ def project(*, name: str, branch: typing.Optional[str] = None, production: bool
1552
1645
  """
1553
1646
  ...
1554
1647
 
1648
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1649
+ """
1650
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1651
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1652
+
1653
+
1654
+ Parameters
1655
+ ----------
1656
+ timeout : int
1657
+ Time, in seconds before the task times out and fails. (Default: 3600)
1658
+ poke_interval : int
1659
+ Time in seconds that the job should wait in between each try. (Default: 60)
1660
+ mode : str
1661
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1662
+ exponential_backoff : bool
1663
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1664
+ pool : str
1665
+ the slot pool this task should run in,
1666
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1667
+ soft_fail : bool
1668
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1669
+ name : str
1670
+ Name of the sensor on Airflow
1671
+ description : str
1672
+ Description of sensor in the Airflow UI
1673
+ external_dag_id : str
1674
+ The dag_id that contains the task you want to wait for.
1675
+ external_task_ids : List[str]
1676
+ The list of task_ids that you want to wait for.
1677
+ If None (default value) the sensor waits for the DAG. (Default: None)
1678
+ allowed_states : List[str]
1679
+ Iterable of allowed states, (Default: ['success'])
1680
+ failed_states : List[str]
1681
+ Iterable of failed or dis-allowed states. (Default: None)
1682
+ execution_delta : datetime.timedelta
1683
+ time difference with the previous execution to look at,
1684
+ the default is the same logical date as the current task or DAG. (Default: None)
1685
+ check_existence: bool
1686
+ Set to True to check if the external task exists or check if
1687
+ the DAG to wait for exists. (Default: True)
1688
+ """
1689
+ ...
1690
+
1555
1691
  @typing.overload
1556
1692
  def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1557
1693
  """
@@ -1646,189 +1782,53 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
1646
1782
  ...
1647
1783
 
1648
1784
  @typing.overload
1649
- def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1785
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1650
1786
  """
1651
- Specifies the flow(s) that this flow depends on.
1652
-
1653
- ```
1654
- @trigger_on_finish(flow='FooFlow')
1655
- ```
1656
- or
1657
- ```
1658
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1659
- ```
1660
- This decorator respects the @project decorator and triggers the flow
1661
- when upstream runs within the same namespace complete successfully
1662
-
1663
- Additionally, you can specify project aware upstream flow dependencies
1664
- by specifying the fully qualified project_flow_name.
1665
- ```
1666
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1667
- ```
1668
- or
1669
- ```
1670
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1671
- ```
1672
-
1673
- You can also specify just the project or project branch (other values will be
1674
- inferred from the current project or project branch):
1675
- ```
1676
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1677
- ```
1787
+ Specifies the Conda environment for all steps of the flow.
1678
1788
 
1679
- Note that `branch` is typically one of:
1680
- - `prod`
1681
- - `user.bob`
1682
- - `test.my_experiment`
1683
- - `prod.staging`
1789
+ Use `@conda_base` to set common libraries required by all
1790
+ steps and use `@conda` to specify step-specific additions.
1684
1791
 
1685
1792
 
1686
1793
  Parameters
1687
1794
  ----------
1688
- flow : Union[str, Dict[str, str]], optional, default None
1689
- Upstream flow dependency for this flow.
1690
- flows : List[Union[str, Dict[str, str]]], default []
1691
- Upstream flow dependencies for this flow.
1692
- options : Dict[str, Any], default {}
1693
- Backend-specific configuration for tuning eventing behavior.
1795
+ packages : Dict[str, str], default {}
1796
+ Packages to use for this flow. The key is the name of the package
1797
+ and the value is the version to use.
1798
+ libraries : Dict[str, str], default {}
1799
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1800
+ python : str, optional, default None
1801
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1802
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1803
+ disabled : bool, default False
1804
+ If set to True, disables Conda.
1694
1805
  """
1695
1806
  ...
1696
1807
 
1697
1808
  @typing.overload
1698
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1699
- ...
1700
-
1701
- def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1702
- """
1703
- Specifies the flow(s) that this flow depends on.
1704
-
1705
- ```
1706
- @trigger_on_finish(flow='FooFlow')
1707
- ```
1708
- or
1709
- ```
1710
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1711
- ```
1712
- This decorator respects the @project decorator and triggers the flow
1713
- when upstream runs within the same namespace complete successfully
1714
-
1715
- Additionally, you can specify project aware upstream flow dependencies
1716
- by specifying the fully qualified project_flow_name.
1717
- ```
1718
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1719
- ```
1720
- or
1721
- ```
1722
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1723
- ```
1724
-
1725
- You can also specify just the project or project branch (other values will be
1726
- inferred from the current project or project branch):
1727
- ```
1728
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1729
- ```
1730
-
1731
- Note that `branch` is typically one of:
1732
- - `prod`
1733
- - `user.bob`
1734
- - `test.my_experiment`
1735
- - `prod.staging`
1736
-
1737
-
1738
- Parameters
1739
- ----------
1740
- flow : Union[str, Dict[str, str]], optional, default None
1741
- Upstream flow dependency for this flow.
1742
- flows : List[Union[str, Dict[str, str]]], default []
1743
- Upstream flow dependencies for this flow.
1744
- options : Dict[str, Any], default {}
1745
- Backend-specific configuration for tuning eventing behavior.
1746
- """
1809
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1747
1810
  ...
1748
1811
 
1749
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1812
+ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1750
1813
  """
1751
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1752
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1753
-
1814
+ Specifies the Conda environment for all steps of the flow.
1754
1815
 
1755
- Parameters
1756
- ----------
1757
- timeout : int
1758
- Time, in seconds before the task times out and fails. (Default: 3600)
1759
- poke_interval : int
1760
- Time in seconds that the job should wait in between each try. (Default: 60)
1761
- mode : str
1762
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1763
- exponential_backoff : bool
1764
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1765
- pool : str
1766
- the slot pool this task should run in,
1767
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1768
- soft_fail : bool
1769
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1770
- name : str
1771
- Name of the sensor on Airflow
1772
- description : str
1773
- Description of sensor in the Airflow UI
1774
- external_dag_id : str
1775
- The dag_id that contains the task you want to wait for.
1776
- external_task_ids : List[str]
1777
- The list of task_ids that you want to wait for.
1778
- If None (default value) the sensor waits for the DAG. (Default: None)
1779
- allowed_states : List[str]
1780
- Iterable of allowed states, (Default: ['success'])
1781
- failed_states : List[str]
1782
- Iterable of failed or dis-allowed states. (Default: None)
1783
- execution_delta : datetime.timedelta
1784
- time difference with the previous execution to look at,
1785
- the default is the same logical date as the current task or DAG. (Default: None)
1786
- check_existence: bool
1787
- Set to True to check if the external task exists or check if
1788
- the DAG to wait for exists. (Default: True)
1789
- """
1790
- ...
1791
-
1792
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1793
- """
1794
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1795
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1796
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1797
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
1798
- starts only after all sensors finish.
1816
+ Use `@conda_base` to set common libraries required by all
1817
+ steps and use `@conda` to specify step-specific additions.
1799
1818
 
1800
1819
 
1801
1820
  Parameters
1802
1821
  ----------
1803
- timeout : int
1804
- Time, in seconds before the task times out and fails. (Default: 3600)
1805
- poke_interval : int
1806
- Time in seconds that the job should wait in between each try. (Default: 60)
1807
- mode : str
1808
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1809
- exponential_backoff : bool
1810
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1811
- pool : str
1812
- the slot pool this task should run in,
1813
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1814
- soft_fail : bool
1815
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1816
- name : str
1817
- Name of the sensor on Airflow
1818
- description : str
1819
- Description of sensor in the Airflow UI
1820
- bucket_key : Union[str, List[str]]
1821
- The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1822
- When it's specified as a full s3:// url, please leave `bucket_name` as None
1823
- bucket_name : str
1824
- Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1825
- When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1826
- wildcard_match : bool
1827
- whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1828
- aws_conn_id : str
1829
- a reference to the s3 connection on Airflow. (Default: None)
1830
- verify : bool
1831
- Whether or not to verify SSL certificates for S3 connection. (Default: None)
1822
+ packages : Dict[str, str], default {}
1823
+ Packages to use for this flow. The key is the name of the package
1824
+ and the value is the version to use.
1825
+ libraries : Dict[str, str], default {}
1826
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1827
+ python : str, optional, default None
1828
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1829
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1830
+ disabled : bool, default False
1831
+ If set to True, disables Conda.
1832
1832
  """
1833
1833
  ...
1834
1834