ob-metaflow-stubs 6.0.4.6rc0__py2.py3-none-any.whl → 6.0.4.6rc1__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (249) hide show
  1. metaflow-stubs/__init__.pyi +879 -879
  2. metaflow-stubs/cards.pyi +1 -1
  3. metaflow-stubs/cli.pyi +1 -1
  4. metaflow-stubs/cli_components/__init__.pyi +1 -1
  5. metaflow-stubs/cli_components/utils.pyi +1 -1
  6. metaflow-stubs/client/__init__.pyi +1 -1
  7. metaflow-stubs/client/core.pyi +5 -5
  8. metaflow-stubs/client/filecache.pyi +2 -2
  9. metaflow-stubs/events.pyi +2 -2
  10. metaflow-stubs/exception.pyi +1 -1
  11. metaflow-stubs/flowspec.pyi +5 -5
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +3 -3
  14. metaflow-stubs/info_file.pyi +1 -1
  15. metaflow-stubs/metadata_provider/__init__.pyi +1 -1
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +1 -1
  17. metaflow-stubs/metadata_provider/metadata.pyi +2 -2
  18. metaflow-stubs/metadata_provider/util.pyi +1 -1
  19. metaflow-stubs/metaflow_config.pyi +1 -1
  20. metaflow-stubs/metaflow_current.pyi +44 -44
  21. metaflow-stubs/metaflow_git.pyi +1 -1
  22. metaflow-stubs/mf_extensions/__init__.pyi +1 -1
  23. metaflow-stubs/mf_extensions/obcheckpoint/__init__.pyi +1 -1
  24. metaflow-stubs/mf_extensions/obcheckpoint/plugins/__init__.pyi +1 -1
  25. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/__init__.pyi +1 -1
  26. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/__init__.pyi +1 -1
  27. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/async_cards.pyi +1 -1
  28. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/deco_injection_mixin.pyi +1 -1
  29. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/extra_components.pyi +3 -3
  30. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/__init__.pyi +1 -1
  31. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/__init__.pyi +1 -1
  32. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/checkpoint_lister.pyi +3 -3
  33. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/lineage_card.pyi +1 -1
  34. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/checkpoint_storage.pyi +4 -4
  35. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/constructors.pyi +1 -1
  36. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/core.pyi +3 -3
  37. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/decorator.pyi +4 -4
  38. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/exceptions.pyi +1 -1
  39. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/final_api.pyi +1 -1
  40. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/lineage.pyi +1 -1
  41. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/__init__.pyi +1 -1
  42. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/context.pyi +3 -3
  43. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/core.pyi +1 -1
  44. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/decorator.pyi +1 -1
  45. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/exceptions.pyi +1 -1
  46. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/task_utils.pyi +3 -3
  47. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/utils.pyi +1 -1
  48. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastructures.pyi +2 -2
  49. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/exceptions.pyi +1 -1
  50. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/__init__.pyi +1 -1
  51. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/decorator.pyi +2 -2
  52. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/__init__.pyi +1 -1
  53. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/core.pyi +2 -2
  54. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/exceptions.pyi +1 -1
  55. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/model_storage.pyi +3 -3
  56. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/__init__.pyi +1 -1
  57. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/flowspec_utils.pyi +1 -1
  58. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/general.pyi +1 -1
  59. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/identity_utils.pyi +2 -2
  60. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/__init__.pyi +1 -1
  61. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/base.pyi +1 -1
  62. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/tar.pyi +2 -2
  63. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/tar_utils.pyi +2 -2
  64. metaflow-stubs/mf_extensions/outerbounds/__init__.pyi +1 -1
  65. metaflow-stubs/mf_extensions/outerbounds/plugins/__init__.pyi +1 -1
  66. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/__init__.pyi +1 -1
  67. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/__init__.pyi +1 -1
  68. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_state_machine.pyi +1 -1
  69. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/__init__.pyi +1 -1
  70. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/spinner/__init__.pyi +1 -1
  71. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/spinner/spinners.pyi +1 -1
  72. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/app_cli.pyi +2 -2
  73. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/app_config.pyi +2 -2
  74. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/capsule.pyi +2 -2
  75. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/click_importer.pyi +1 -1
  76. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/code_package/__init__.pyi +1 -1
  77. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/code_package/code_packager.pyi +2 -2
  78. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/__init__.pyi +1 -1
  79. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/cli_generator.pyi +1 -1
  80. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/config_utils.pyi +3 -3
  81. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/schema_export.pyi +1 -1
  82. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/typed_configs.pyi +2 -2
  83. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/unified_config.pyi +3 -3
  84. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/dependencies.pyi +1 -1
  85. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/deployer.pyi +5 -5
  86. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/experimental/__init__.pyi +1 -1
  87. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/perimeters.pyi +1 -1
  88. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/utils.pyi +2 -2
  89. metaflow-stubs/mf_extensions/outerbounds/plugins/aws/__init__.pyi +1 -1
  90. metaflow-stubs/mf_extensions/outerbounds/plugins/aws/assume_role_decorator.pyi +1 -1
  91. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/__init__.pyi +1 -1
  92. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/async_cards.pyi +1 -1
  93. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/injector.pyi +1 -1
  94. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/__init__.pyi +1 -1
  95. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/coreweave.pyi +1 -1
  96. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/nebius.pyi +1 -1
  97. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/__init__.pyi +1 -1
  98. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/baker.pyi +3 -3
  99. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/docker_environment.pyi +2 -2
  100. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/fast_bakery.pyi +1 -1
  101. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/__init__.pyi +1 -1
  102. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/pod_killer.pyi +1 -1
  103. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/__init__.pyi +1 -1
  104. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/constants.pyi +1 -1
  105. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/exceptions.pyi +1 -1
  106. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/ollama.pyi +1 -1
  107. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/status_card.pyi +1 -1
  108. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/__init__.pyi +1 -1
  109. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/snowflake.pyi +1 -1
  110. metaflow-stubs/mf_extensions/outerbounds/profilers/__init__.pyi +1 -1
  111. metaflow-stubs/mf_extensions/outerbounds/profilers/gpu.pyi +1 -1
  112. metaflow-stubs/mf_extensions/outerbounds/remote_config.pyi +1 -1
  113. metaflow-stubs/mf_extensions/outerbounds/toplevel/__init__.pyi +1 -1
  114. metaflow-stubs/mf_extensions/outerbounds/toplevel/global_aliases_for_metaflow_package.pyi +1 -1
  115. metaflow-stubs/multicore_utils.pyi +1 -1
  116. metaflow-stubs/ob_internal.pyi +1 -1
  117. metaflow-stubs/parameters.pyi +3 -3
  118. metaflow-stubs/plugins/__init__.pyi +12 -12
  119. metaflow-stubs/plugins/airflow/__init__.pyi +1 -1
  120. metaflow-stubs/plugins/airflow/airflow_utils.pyi +1 -1
  121. metaflow-stubs/plugins/airflow/exception.pyi +1 -1
  122. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +1 -1
  123. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +1 -1
  124. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +1 -1
  125. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +1 -1
  126. metaflow-stubs/plugins/argo/__init__.pyi +1 -1
  127. metaflow-stubs/plugins/argo/argo_client.pyi +1 -1
  128. metaflow-stubs/plugins/argo/argo_events.pyi +1 -1
  129. metaflow-stubs/plugins/argo/argo_workflows.pyi +2 -2
  130. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +2 -2
  131. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +3 -3
  132. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +3 -3
  133. metaflow-stubs/plugins/argo/exit_hooks.pyi +2 -2
  134. metaflow-stubs/plugins/aws/__init__.pyi +1 -1
  135. metaflow-stubs/plugins/aws/aws_client.pyi +1 -1
  136. metaflow-stubs/plugins/aws/aws_utils.pyi +1 -1
  137. metaflow-stubs/plugins/aws/batch/__init__.pyi +1 -1
  138. metaflow-stubs/plugins/aws/batch/batch.pyi +1 -1
  139. metaflow-stubs/plugins/aws/batch/batch_client.pyi +1 -1
  140. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +1 -1
  141. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +1 -1
  142. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +3 -3
  143. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +1 -1
  144. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +1 -1
  145. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +1 -1
  146. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +1 -1
  147. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +1 -1
  148. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +3 -3
  149. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +2 -2
  150. metaflow-stubs/plugins/azure/__init__.pyi +1 -1
  151. metaflow-stubs/plugins/azure/azure_credential.pyi +1 -1
  152. metaflow-stubs/plugins/azure/azure_exceptions.pyi +1 -1
  153. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +3 -3
  154. metaflow-stubs/plugins/azure/azure_utils.pyi +1 -1
  155. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +1 -1
  156. metaflow-stubs/plugins/azure/includefile_support.pyi +1 -1
  157. metaflow-stubs/plugins/cards/__init__.pyi +1 -1
  158. metaflow-stubs/plugins/cards/card_client.pyi +2 -2
  159. metaflow-stubs/plugins/cards/card_creator.pyi +1 -1
  160. metaflow-stubs/plugins/cards/card_datastore.pyi +1 -1
  161. metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
  162. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +1 -1
  163. metaflow-stubs/plugins/cards/card_modules/basic.pyi +2 -2
  164. metaflow-stubs/plugins/cards/card_modules/card.pyi +1 -1
  165. metaflow-stubs/plugins/cards/card_modules/components.pyi +3 -3
  166. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +1 -1
  167. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +1 -1
  168. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +1 -1
  169. metaflow-stubs/plugins/cards/card_resolver.pyi +1 -1
  170. metaflow-stubs/plugins/cards/component_serializer.pyi +1 -1
  171. metaflow-stubs/plugins/cards/exception.pyi +1 -1
  172. metaflow-stubs/plugins/catch_decorator.pyi +2 -2
  173. metaflow-stubs/plugins/datatools/__init__.pyi +1 -1
  174. metaflow-stubs/plugins/datatools/local.pyi +1 -1
  175. metaflow-stubs/plugins/datatools/s3/__init__.pyi +1 -1
  176. metaflow-stubs/plugins/datatools/s3/s3.pyi +3 -3
  177. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +1 -1
  178. metaflow-stubs/plugins/datatools/s3/s3util.pyi +1 -1
  179. metaflow-stubs/plugins/debug_logger.pyi +1 -1
  180. metaflow-stubs/plugins/debug_monitor.pyi +1 -1
  181. metaflow-stubs/plugins/environment_decorator.pyi +1 -1
  182. metaflow-stubs/plugins/events_decorator.pyi +1 -1
  183. metaflow-stubs/plugins/exit_hook/__init__.pyi +1 -1
  184. metaflow-stubs/plugins/exit_hook/exit_hook_decorator.pyi +1 -1
  185. metaflow-stubs/plugins/frameworks/__init__.pyi +1 -1
  186. metaflow-stubs/plugins/frameworks/pytorch.pyi +1 -1
  187. metaflow-stubs/plugins/gcp/__init__.pyi +1 -1
  188. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +3 -3
  189. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +1 -1
  190. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +1 -1
  191. metaflow-stubs/plugins/gcp/gs_utils.pyi +1 -1
  192. metaflow-stubs/plugins/gcp/includefile_support.pyi +1 -1
  193. metaflow-stubs/plugins/kubernetes/__init__.pyi +1 -1
  194. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +2 -2
  195. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +1 -1
  196. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +1 -1
  197. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +1 -1
  198. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +1 -1
  199. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +1 -1
  200. metaflow-stubs/plugins/ollama/__init__.pyi +2 -2
  201. metaflow-stubs/plugins/parallel_decorator.pyi +1 -1
  202. metaflow-stubs/plugins/perimeters.pyi +1 -1
  203. metaflow-stubs/plugins/project_decorator.pyi +1 -1
  204. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  205. metaflow-stubs/plugins/pypi/conda_decorator.pyi +1 -1
  206. metaflow-stubs/plugins/pypi/conda_environment.pyi +4 -4
  207. metaflow-stubs/plugins/pypi/parsers.pyi +1 -1
  208. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +1 -1
  209. metaflow-stubs/plugins/pypi/pypi_environment.pyi +1 -1
  210. metaflow-stubs/plugins/pypi/utils.pyi +1 -1
  211. metaflow-stubs/plugins/resources_decorator.pyi +1 -1
  212. metaflow-stubs/plugins/retry_decorator.pyi +1 -1
  213. metaflow-stubs/plugins/secrets/__init__.pyi +1 -1
  214. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +2 -2
  215. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +1 -1
  216. metaflow-stubs/plugins/secrets/secrets_func.pyi +1 -1
  217. metaflow-stubs/plugins/secrets/secrets_spec.pyi +1 -1
  218. metaflow-stubs/plugins/secrets/utils.pyi +1 -1
  219. metaflow-stubs/plugins/snowflake/__init__.pyi +1 -1
  220. metaflow-stubs/plugins/storage_executor.pyi +1 -1
  221. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +1 -1
  222. metaflow-stubs/plugins/timeout_decorator.pyi +2 -2
  223. metaflow-stubs/plugins/torchtune/__init__.pyi +1 -1
  224. metaflow-stubs/plugins/uv/__init__.pyi +1 -1
  225. metaflow-stubs/plugins/uv/uv_environment.pyi +2 -2
  226. metaflow-stubs/profilers/__init__.pyi +1 -1
  227. metaflow-stubs/pylint_wrapper.pyi +1 -1
  228. metaflow-stubs/runner/__init__.pyi +1 -1
  229. metaflow-stubs/runner/deployer.pyi +5 -5
  230. metaflow-stubs/runner/deployer_impl.pyi +1 -1
  231. metaflow-stubs/runner/metaflow_runner.pyi +2 -2
  232. metaflow-stubs/runner/nbdeploy.pyi +1 -1
  233. metaflow-stubs/runner/nbrun.pyi +1 -1
  234. metaflow-stubs/runner/subprocess_manager.pyi +1 -1
  235. metaflow-stubs/runner/utils.pyi +2 -2
  236. metaflow-stubs/system/__init__.pyi +1 -1
  237. metaflow-stubs/system/system_logger.pyi +1 -1
  238. metaflow-stubs/system/system_monitor.pyi +1 -1
  239. metaflow-stubs/tagging_util.pyi +1 -1
  240. metaflow-stubs/tuple_util.pyi +1 -1
  241. metaflow-stubs/user_configs/__init__.pyi +1 -1
  242. metaflow-stubs/user_configs/config_decorators.pyi +6 -6
  243. metaflow-stubs/user_configs/config_options.pyi +3 -3
  244. metaflow-stubs/user_configs/config_parameters.pyi +6 -6
  245. {ob_metaflow_stubs-6.0.4.6rc0.dist-info → ob_metaflow_stubs-6.0.4.6rc1.dist-info}/METADATA +1 -1
  246. ob_metaflow_stubs-6.0.4.6rc1.dist-info/RECORD +249 -0
  247. ob_metaflow_stubs-6.0.4.6rc0.dist-info/RECORD +0 -249
  248. {ob_metaflow_stubs-6.0.4.6rc0.dist-info → ob_metaflow_stubs-6.0.4.6rc1.dist-info}/WHEEL +0 -0
  249. {ob_metaflow_stubs-6.0.4.6rc0.dist-info → ob_metaflow_stubs-6.0.4.6rc1.dist-info}/top_level.txt +0 -0
@@ -1,7 +1,7 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
3
  # MF version: 2.15.21.2+obcheckpoint(0.2.4);ob(v1) #
4
- # Generated on 2025-07-16T22:08:50.283182 #
4
+ # Generated on 2025-07-16T22:24:21.902352 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
@@ -35,17 +35,17 @@ from .user_configs.config_parameters import ConfigValue as ConfigValue
35
35
  from .user_configs.config_parameters import config_expr as config_expr
36
36
  from .user_configs.config_decorators import CustomFlowDecorator as CustomFlowDecorator
37
37
  from .user_configs.config_decorators import CustomStepDecorator as CustomStepDecorator
38
- from . import events as events
39
- from . import metaflow_git as metaflow_git
40
38
  from . import cards as cards
41
39
  from . import tuple_util as tuple_util
40
+ from . import events as events
41
+ from . import metaflow_git as metaflow_git
42
42
  from . import runner as runner
43
43
  from . import plugins as plugins
44
44
  from .mf_extensions.outerbounds.toplevel.global_aliases_for_metaflow_package import S3 as S3
45
45
  from . import includefile as includefile
46
46
  from .includefile import IncludeFile as IncludeFile
47
- from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
48
47
  from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
48
+ from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
49
49
  from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
50
50
  from . import client as client
51
51
  from .client.core import namespace as namespace
@@ -157,80 +157,26 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
157
157
  ...
158
158
 
159
159
  @typing.overload
160
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
160
+ def app_deploy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
161
161
  """
162
- Specifies a timeout for your step.
163
-
164
- This decorator is useful if this step may hang indefinitely.
165
-
166
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
167
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
168
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
169
-
170
- Note that all the values specified in parameters are added together so if you specify
171
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
172
-
173
-
174
- Parameters
175
- ----------
176
- seconds : int, default 0
177
- Number of seconds to wait prior to timing out.
178
- minutes : int, default 0
179
- Number of minutes to wait prior to timing out.
180
- hours : int, default 0
181
- Number of hours to wait prior to timing out.
162
+ Decorator prototype for all step decorators. This function gets specialized
163
+ and imported for all decorators types by _import_plugin_decorators().
182
164
  """
183
165
  ...
184
166
 
185
167
  @typing.overload
186
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
187
- ...
188
-
189
- @typing.overload
190
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
191
- ...
192
-
193
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
194
- """
195
- Specifies a timeout for your step.
196
-
197
- This decorator is useful if this step may hang indefinitely.
198
-
199
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
200
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
201
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
202
-
203
- Note that all the values specified in parameters are added together so if you specify
204
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
205
-
206
-
207
- Parameters
208
- ----------
209
- seconds : int, default 0
210
- Number of seconds to wait prior to timing out.
211
- minutes : int, default 0
212
- Number of minutes to wait prior to timing out.
213
- hours : int, default 0
214
- Number of hours to wait prior to timing out.
215
- """
168
+ def app_deploy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
216
169
  ...
217
170
 
218
- def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
171
+ def app_deploy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
219
172
  """
220
- Specifies that this step should execute on DGX cloud.
221
-
222
-
223
- Parameters
224
- ----------
225
- gpu : int
226
- Number of GPUs to use.
227
- gpu_type : str
228
- Type of Nvidia GPU to use.
173
+ Decorator prototype for all step decorators. This function gets specialized
174
+ and imported for all decorators types by _import_plugin_decorators().
229
175
  """
230
176
  ...
231
177
 
232
178
  @typing.overload
233
- def app_deploy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
179
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
234
180
  """
235
181
  Decorator prototype for all step decorators. This function gets specialized
236
182
  and imported for all decorators types by _import_plugin_decorators().
@@ -238,10 +184,10 @@ def app_deploy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.
238
184
  ...
239
185
 
240
186
  @typing.overload
241
- def app_deploy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
187
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
242
188
  ...
243
189
 
244
- def app_deploy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
190
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
245
191
  """
246
192
  Decorator prototype for all step decorators. This function gets specialized
247
193
  and imported for all decorators types by _import_plugin_decorators().
@@ -249,92 +195,132 @@ def app_deploy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None
249
195
  ...
250
196
 
251
197
  @typing.overload
252
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
198
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
253
199
  """
254
- Specifies secrets to be retrieved and injected as environment variables prior to
255
- the execution of a step.
200
+ Specifies that the step will success under all circumstances.
201
+
202
+ The decorator will create an optional artifact, specified by `var`, which
203
+ contains the exception raised. You can use it to detect the presence
204
+ of errors, indicating that all happy-path artifacts produced by the step
205
+ are missing.
256
206
 
257
207
 
258
208
  Parameters
259
209
  ----------
260
- sources : List[Union[str, Dict[str, Any]]], default: []
261
- List of secret specs, defining how the secrets are to be retrieved
262
- role : str, optional, default: None
263
- Role to use for fetching secrets
210
+ var : str, optional, default None
211
+ Name of the artifact in which to store the caught exception.
212
+ If not specified, the exception is not stored.
213
+ print_exception : bool, default True
214
+ Determines whether or not the exception is printed to
215
+ stdout when caught.
264
216
  """
265
217
  ...
266
218
 
267
219
  @typing.overload
268
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
220
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
269
221
  ...
270
222
 
271
223
  @typing.overload
272
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
224
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
273
225
  ...
274
226
 
275
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
227
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
276
228
  """
277
- Specifies secrets to be retrieved and injected as environment variables prior to
278
- the execution of a step.
229
+ Specifies that the step will success under all circumstances.
230
+
231
+ The decorator will create an optional artifact, specified by `var`, which
232
+ contains the exception raised. You can use it to detect the presence
233
+ of errors, indicating that all happy-path artifacts produced by the step
234
+ are missing.
279
235
 
280
236
 
281
237
  Parameters
282
238
  ----------
283
- sources : List[Union[str, Dict[str, Any]]], default: []
284
- List of secret specs, defining how the secrets are to be retrieved
285
- role : str, optional, default: None
286
- Role to use for fetching secrets
239
+ var : str, optional, default None
240
+ Name of the artifact in which to store the caught exception.
241
+ If not specified, the exception is not stored.
242
+ print_exception : bool, default True
243
+ Determines whether or not the exception is printed to
244
+ stdout when caught.
287
245
  """
288
246
  ...
289
247
 
290
248
  @typing.overload
291
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
249
+ def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
292
250
  """
293
- Specifies the PyPI packages for the step.
251
+ Specifies the resources needed when executing this step.
294
252
 
295
- Information in this decorator will augment any
296
- attributes set in the `@pyi_base` flow-level decorator. Hence,
297
- you can use `@pypi_base` to set packages required by all
298
- steps and use `@pypi` to specify step-specific overrides.
253
+ Use `@resources` to specify the resource requirements
254
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
255
+
256
+ You can choose the compute layer on the command line by executing e.g.
257
+ ```
258
+ python myflow.py run --with batch
259
+ ```
260
+ or
261
+ ```
262
+ python myflow.py run --with kubernetes
263
+ ```
264
+ which executes the flow on the desired system using the
265
+ requirements specified in `@resources`.
299
266
 
300
267
 
301
268
  Parameters
302
269
  ----------
303
- packages : Dict[str, str], default: {}
304
- Packages to use for this step. The key is the name of the package
305
- and the value is the version to use.
306
- python : str, optional, default: None
307
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
308
- that the version used will correspond to the version of the Python interpreter used to start the run.
270
+ cpu : int, default 1
271
+ Number of CPUs required for this step.
272
+ gpu : int, optional, default None
273
+ Number of GPUs required for this step.
274
+ disk : int, optional, default None
275
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
276
+ memory : int, default 4096
277
+ Memory size (in MB) required for this step.
278
+ shared_memory : int, optional, default None
279
+ The value for the size (in MiB) of the /dev/shm volume for this step.
280
+ This parameter maps to the `--shm-size` option in Docker.
309
281
  """
310
282
  ...
311
283
 
312
284
  @typing.overload
313
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
285
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
314
286
  ...
315
287
 
316
288
  @typing.overload
317
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
289
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
318
290
  ...
319
291
 
320
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
292
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
321
293
  """
322
- Specifies the PyPI packages for the step.
294
+ Specifies the resources needed when executing this step.
323
295
 
324
- Information in this decorator will augment any
325
- attributes set in the `@pyi_base` flow-level decorator. Hence,
326
- you can use `@pypi_base` to set packages required by all
327
- steps and use `@pypi` to specify step-specific overrides.
296
+ Use `@resources` to specify the resource requirements
297
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
298
+
299
+ You can choose the compute layer on the command line by executing e.g.
300
+ ```
301
+ python myflow.py run --with batch
302
+ ```
303
+ or
304
+ ```
305
+ python myflow.py run --with kubernetes
306
+ ```
307
+ which executes the flow on the desired system using the
308
+ requirements specified in `@resources`.
328
309
 
329
310
 
330
311
  Parameters
331
312
  ----------
332
- packages : Dict[str, str], default: {}
333
- Packages to use for this step. The key is the name of the package
334
- and the value is the version to use.
335
- python : str, optional, default: None
336
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
337
- that the version used will correspond to the version of the Python interpreter used to start the run.
313
+ cpu : int, default 1
314
+ Number of CPUs required for this step.
315
+ gpu : int, optional, default None
316
+ Number of GPUs required for this step.
317
+ disk : int, optional, default None
318
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
319
+ memory : int, default 4096
320
+ Memory size (in MB) required for this step.
321
+ shared_memory : int, optional, default None
322
+ The value for the size (in MiB) of the /dev/shm volume for this step.
323
+ This parameter maps to the `--shm-size` option in Docker.
338
324
  """
339
325
  ...
340
326
 
@@ -382,306 +368,61 @@ def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy:
382
368
  ...
383
369
 
384
370
  @typing.overload
385
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
371
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
386
372
  """
387
- Internal decorator to support Fast bakery
373
+ Specifies a timeout for your step.
374
+
375
+ This decorator is useful if this step may hang indefinitely.
376
+
377
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
378
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
379
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
380
+
381
+ Note that all the values specified in parameters are added together so if you specify
382
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
383
+
384
+
385
+ Parameters
386
+ ----------
387
+ seconds : int, default 0
388
+ Number of seconds to wait prior to timing out.
389
+ minutes : int, default 0
390
+ Number of minutes to wait prior to timing out.
391
+ hours : int, default 0
392
+ Number of hours to wait prior to timing out.
388
393
  """
389
394
  ...
390
395
 
391
396
  @typing.overload
392
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
397
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
393
398
  ...
394
399
 
395
- def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
396
- """
397
- Internal decorator to support Fast bakery
398
- """
400
+ @typing.overload
401
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
399
402
  ...
400
403
 
401
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
404
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
402
405
  """
403
- Specifies that this step should execute on Kubernetes.
406
+ Specifies a timeout for your step.
407
+
408
+ This decorator is useful if this step may hang indefinitely.
409
+
410
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
411
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
412
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
413
+
414
+ Note that all the values specified in parameters are added together so if you specify
415
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
404
416
 
405
417
 
406
418
  Parameters
407
419
  ----------
408
- cpu : int, default 1
409
- Number of CPUs required for this step. If `@resources` is
410
- also present, the maximum value from all decorators is used.
411
- memory : int, default 4096
412
- Memory size (in MB) required for this step. If
413
- `@resources` is also present, the maximum value from all decorators is
414
- used.
415
- disk : int, default 10240
416
- Disk size (in MB) required for this step. If
417
- `@resources` is also present, the maximum value from all decorators is
418
- used.
419
- image : str, optional, default None
420
- Docker image to use when launching on Kubernetes. If not specified, and
421
- METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
422
- not, a default Docker image mapping to the current version of Python is used.
423
- image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
424
- If given, the imagePullPolicy to be applied to the Docker image of the step.
425
- image_pull_secrets: List[str], default []
426
- The default is extracted from METAFLOW_KUBERNETES_IMAGE_PULL_SECRETS.
427
- Kubernetes image pull secrets to use when pulling container images
428
- in Kubernetes.
429
- service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
430
- Kubernetes service account to use when launching pod in Kubernetes.
431
- secrets : List[str], optional, default None
432
- Kubernetes secrets to use when launching pod in Kubernetes. These
433
- secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
434
- in Metaflow configuration.
435
- node_selector: Union[Dict[str,str], str], optional, default None
436
- Kubernetes node selector(s) to apply to the pod running the task.
437
- Can be passed in as a comma separated string of values e.g.
438
- 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
439
- {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
440
- namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
441
- Kubernetes namespace to use when launching pod in Kubernetes.
442
- gpu : int, optional, default None
443
- Number of GPUs required for this step. A value of zero implies that
444
- the scheduled node should not have GPUs.
445
- gpu_vendor : str, default KUBERNETES_GPU_VENDOR
446
- The vendor of the GPUs to be used for this step.
447
- tolerations : List[str], default []
448
- The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
449
- Kubernetes tolerations to use when launching pod in Kubernetes.
450
- labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
451
- Kubernetes labels to use when launching pod in Kubernetes.
452
- annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
453
- Kubernetes annotations to use when launching pod in Kubernetes.
454
- use_tmpfs : bool, default False
455
- This enables an explicit tmpfs mount for this step.
456
- tmpfs_tempdir : bool, default True
457
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
458
- tmpfs_size : int, optional, default: None
459
- The value for the size (in MiB) of the tmpfs mount for this step.
460
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
461
- memory allocated for this step.
462
- tmpfs_path : str, optional, default /metaflow_temp
463
- Path to tmpfs mount for this step.
464
- persistent_volume_claims : Dict[str, str], optional, default None
465
- A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
466
- volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
467
- shared_memory: int, optional
468
- Shared memory size (in MiB) required for this step
469
- port: int, optional
470
- Port number to specify in the Kubernetes job object
471
- compute_pool : str, optional, default None
472
- Compute pool to be used for for this step.
473
- If not specified, any accessible compute pool within the perimeter is used.
474
- hostname_resolution_timeout: int, default 10 * 60
475
- Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
476
- Only applicable when @parallel is used.
477
- qos: str, default: Burstable
478
- Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
479
-
480
- security_context: Dict[str, Any], optional, default None
481
- Container security context. Applies to the task container. Allows the following keys:
482
- - privileged: bool, optional, default None
483
- - allow_privilege_escalation: bool, optional, default None
484
- - run_as_user: int, optional, default None
485
- - run_as_group: int, optional, default None
486
- - run_as_non_root: bool, optional, default None
487
- """
488
- ...
489
-
490
- def vllm(*, model: str, backend: str, openai_api_server: bool, debug: bool, card_refresh_interval: int, max_retries: int, retry_alert_frequency: int, engine_args: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
491
- """
492
- This decorator is used to run vllm APIs as Metaflow task sidecars.
493
-
494
- User code call
495
- --------------
496
- @vllm(
497
- model="...",
498
- ...
499
- )
500
-
501
- Valid backend options
502
- ---------------------
503
- - 'local': Run as a separate process on the local task machine.
504
-
505
- Valid model options
506
- -------------------
507
- Any HuggingFace model identifier, e.g. 'meta-llama/Llama-3.2-1B'
508
-
509
- NOTE: vLLM's OpenAI-compatible server serves ONE model per server instance.
510
- If you need multiple models, you must create multiple @vllm decorators.
511
-
512
-
513
- Parameters
514
- ----------
515
- model: str
516
- HuggingFace model identifier to be served by vLLM.
517
- backend: str
518
- Determines where and how to run the vLLM process.
519
- openai_api_server: bool
520
- Whether to use OpenAI-compatible API server mode (subprocess) instead of native engine.
521
- Default is False (uses native engine).
522
- Set to True for backward compatibility with existing code.
523
- debug: bool
524
- Whether to turn on verbose debugging logs.
525
- card_refresh_interval: int
526
- Interval in seconds for refreshing the vLLM status card.
527
- Only used when openai_api_server=True.
528
- max_retries: int
529
- Maximum number of retries checking for vLLM server startup.
530
- Only used when openai_api_server=True.
531
- retry_alert_frequency: int
532
- Frequency of alert logs for vLLM server startup retries.
533
- Only used when openai_api_server=True.
534
- engine_args : dict
535
- Additional keyword arguments to pass to the vLLM engine.
536
- For example, `tensor_parallel_size=2`.
537
- """
538
- ...
539
-
540
- @typing.overload
541
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
542
- """
543
- Decorator prototype for all step decorators. This function gets specialized
544
- and imported for all decorators types by _import_plugin_decorators().
545
- """
546
- ...
547
-
548
- @typing.overload
549
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
550
- ...
551
-
552
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
553
- """
554
- Decorator prototype for all step decorators. This function gets specialized
555
- and imported for all decorators types by _import_plugin_decorators().
556
- """
557
- ...
558
-
559
- @typing.overload
560
- def model(*, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
561
- """
562
- Enables loading / saving of models within a step.
563
-
564
- > Examples
565
- - Saving Models
566
- ```python
567
- @model
568
- @step
569
- def train(self):
570
- # current.model.save returns a dictionary reference to the model saved
571
- self.my_model = current.model.save(
572
- path_to_my_model,
573
- label="my_model",
574
- metadata={
575
- "epochs": 10,
576
- "batch-size": 32,
577
- "learning-rate": 0.001,
578
- }
579
- )
580
- self.next(self.test)
581
-
582
- @model(load="my_model")
583
- @step
584
- def test(self):
585
- # `current.model.loaded` returns a dictionary of the loaded models
586
- # where the key is the name of the artifact and the value is the path to the model
587
- print(os.listdir(current.model.loaded["my_model"]))
588
- self.next(self.end)
589
- ```
590
-
591
- - Loading models
592
- ```python
593
- @step
594
- def train(self):
595
- # current.model.load returns the path to the model loaded
596
- checkpoint_path = current.model.load(
597
- self.checkpoint_key,
598
- )
599
- model_path = current.model.load(
600
- self.model,
601
- )
602
- self.next(self.test)
603
- ```
604
-
605
-
606
- Parameters
607
- ----------
608
- load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
609
- Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
610
- These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
611
- If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
612
- the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
613
- If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
614
-
615
- temp_dir_root : str, default: None
616
- The root directory under which `current.model.loaded` will store loaded models
617
- """
618
- ...
619
-
620
- @typing.overload
621
- def model(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
622
- ...
623
-
624
- @typing.overload
625
- def model(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
626
- ...
627
-
628
- def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None):
629
- """
630
- Enables loading / saving of models within a step.
631
-
632
- > Examples
633
- - Saving Models
634
- ```python
635
- @model
636
- @step
637
- def train(self):
638
- # current.model.save returns a dictionary reference to the model saved
639
- self.my_model = current.model.save(
640
- path_to_my_model,
641
- label="my_model",
642
- metadata={
643
- "epochs": 10,
644
- "batch-size": 32,
645
- "learning-rate": 0.001,
646
- }
647
- )
648
- self.next(self.test)
649
-
650
- @model(load="my_model")
651
- @step
652
- def test(self):
653
- # `current.model.loaded` returns a dictionary of the loaded models
654
- # where the key is the name of the artifact and the value is the path to the model
655
- print(os.listdir(current.model.loaded["my_model"]))
656
- self.next(self.end)
657
- ```
658
-
659
- - Loading models
660
- ```python
661
- @step
662
- def train(self):
663
- # current.model.load returns the path to the model loaded
664
- checkpoint_path = current.model.load(
665
- self.checkpoint_key,
666
- )
667
- model_path = current.model.load(
668
- self.model,
669
- )
670
- self.next(self.test)
671
- ```
672
-
673
-
674
- Parameters
675
- ----------
676
- load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
677
- Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
678
- These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
679
- If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
680
- the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
681
- If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
682
-
683
- temp_dir_root : str, default: None
684
- The root directory under which `current.model.loaded` will store loaded models
420
+ seconds : int, default 0
421
+ Number of seconds to wait prior to timing out.
422
+ minutes : int, default 0
423
+ Number of minutes to wait prior to timing out.
424
+ hours : int, default 0
425
+ Number of hours to wait prior to timing out.
685
426
  """
686
427
  ...
687
428
 
@@ -816,35 +557,237 @@ def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None
816
557
 
817
558
  Parameters
818
559
  ----------
819
- load_policy : str, default: "fresh"
820
- The policy for loading the checkpoint. The following policies are supported:
821
- - "eager": Loads the the latest available checkpoint within the namespace.
822
- With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
823
- will be loaded at the start of the task.
824
- - "none": Do not load any checkpoint
825
- - "fresh": Loads the lastest checkpoint created within the running Task.
826
- This mode helps loading checkpoints across various retry attempts of the same task.
827
- With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
828
- created within the task will be loaded when the task is retries execution on failure.
829
-
830
- temp_dir_root : str, default: None
831
- The root directory under which `current.checkpoint.directory` will be created.
560
+ load_policy : str, default: "fresh"
561
+ The policy for loading the checkpoint. The following policies are supported:
562
+ - "eager": Loads the the latest available checkpoint within the namespace.
563
+ With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
564
+ will be loaded at the start of the task.
565
+ - "none": Do not load any checkpoint
566
+ - "fresh": Loads the lastest checkpoint created within the running Task.
567
+ This mode helps loading checkpoints across various retry attempts of the same task.
568
+ With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
569
+ created within the task will be loaded when the task is retries execution on failure.
570
+
571
+ temp_dir_root : str, default: None
572
+ The root directory under which `current.checkpoint.directory` will be created.
573
+ """
574
+ ...
575
+
576
+ @typing.overload
577
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
578
+ """
579
+ Specifies environment variables to be set prior to the execution of a step.
580
+
581
+
582
+ Parameters
583
+ ----------
584
+ vars : Dict[str, str], default {}
585
+ Dictionary of environment variables to set.
586
+ """
587
+ ...
588
+
589
+ @typing.overload
590
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
591
+ ...
592
+
593
+ @typing.overload
594
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
595
+ ...
596
+
597
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
598
+ """
599
+ Specifies environment variables to be set prior to the execution of a step.
600
+
601
+
602
+ Parameters
603
+ ----------
604
+ vars : Dict[str, str], default {}
605
+ Dictionary of environment variables to set.
606
+ """
607
+ ...
608
+
609
+ @typing.overload
610
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
611
+ """
612
+ Internal decorator to support Fast bakery
613
+ """
614
+ ...
615
+
616
+ @typing.overload
617
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
618
+ ...
619
+
620
+ def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
621
+ """
622
+ Internal decorator to support Fast bakery
623
+ """
624
+ ...
625
+
626
+ def vllm(*, model: str, backend: str, openai_api_server: bool, debug: bool, card_refresh_interval: int, max_retries: int, retry_alert_frequency: int, engine_args: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
627
+ """
628
+ This decorator is used to run vllm APIs as Metaflow task sidecars.
629
+
630
+ User code call
631
+ --------------
632
+ @vllm(
633
+ model="...",
634
+ ...
635
+ )
636
+
637
+ Valid backend options
638
+ ---------------------
639
+ - 'local': Run as a separate process on the local task machine.
640
+
641
+ Valid model options
642
+ -------------------
643
+ Any HuggingFace model identifier, e.g. 'meta-llama/Llama-3.2-1B'
644
+
645
+ NOTE: vLLM's OpenAI-compatible server serves ONE model per server instance.
646
+ If you need multiple models, you must create multiple @vllm decorators.
647
+
648
+
649
+ Parameters
650
+ ----------
651
+ model: str
652
+ HuggingFace model identifier to be served by vLLM.
653
+ backend: str
654
+ Determines where and how to run the vLLM process.
655
+ openai_api_server: bool
656
+ Whether to use OpenAI-compatible API server mode (subprocess) instead of native engine.
657
+ Default is False (uses native engine).
658
+ Set to True for backward compatibility with existing code.
659
+ debug: bool
660
+ Whether to turn on verbose debugging logs.
661
+ card_refresh_interval: int
662
+ Interval in seconds for refreshing the vLLM status card.
663
+ Only used when openai_api_server=True.
664
+ max_retries: int
665
+ Maximum number of retries checking for vLLM server startup.
666
+ Only used when openai_api_server=True.
667
+ retry_alert_frequency: int
668
+ Frequency of alert logs for vLLM server startup retries.
669
+ Only used when openai_api_server=True.
670
+ engine_args : dict
671
+ Additional keyword arguments to pass to the vLLM engine.
672
+ For example, `tensor_parallel_size=2`.
673
+ """
674
+ ...
675
+
676
+ def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
677
+ """
678
+ Specifies that this step should execute on DGX cloud.
679
+
680
+
681
+ Parameters
682
+ ----------
683
+ gpu : int
684
+ Number of GPUs to use.
685
+ gpu_type : str
686
+ Type of Nvidia GPU to use.
687
+ """
688
+ ...
689
+
690
+ def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
691
+ """
692
+ Specifies that this step should execute on DGX cloud.
693
+
694
+
695
+ Parameters
696
+ ----------
697
+ gpu : int
698
+ Number of GPUs to use.
699
+ gpu_type : str
700
+ Type of Nvidia GPU to use.
701
+ queue_timeout : int
702
+ Time to keep the job in NVCF's queue.
703
+ """
704
+ ...
705
+
706
+ @typing.overload
707
+ def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
708
+ """
709
+ Creates a human-readable report, a Metaflow Card, after this step completes.
710
+
711
+ Note that you may add multiple `@card` decorators in a step with different parameters.
712
+
713
+
714
+ Parameters
715
+ ----------
716
+ type : str, default 'default'
717
+ Card type.
718
+ id : str, optional, default None
719
+ If multiple cards are present, use this id to identify this card.
720
+ options : Dict[str, Any], default {}
721
+ Options passed to the card. The contents depend on the card type.
722
+ timeout : int, default 45
723
+ Interrupt reporting if it takes more than this many seconds.
724
+ """
725
+ ...
726
+
727
+ @typing.overload
728
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
729
+ ...
730
+
731
+ @typing.overload
732
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
733
+ ...
734
+
735
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
736
+ """
737
+ Creates a human-readable report, a Metaflow Card, after this step completes.
738
+
739
+ Note that you may add multiple `@card` decorators in a step with different parameters.
740
+
741
+
742
+ Parameters
743
+ ----------
744
+ type : str, default 'default'
745
+ Card type.
746
+ id : str, optional, default None
747
+ If multiple cards are present, use this id to identify this card.
748
+ options : Dict[str, Any], default {}
749
+ Options passed to the card. The contents depend on the card type.
750
+ timeout : int, default 45
751
+ Interrupt reporting if it takes more than this many seconds.
752
+ """
753
+ ...
754
+
755
+ @typing.overload
756
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
757
+ """
758
+ Specifies secrets to be retrieved and injected as environment variables prior to
759
+ the execution of a step.
760
+
761
+
762
+ Parameters
763
+ ----------
764
+ sources : List[Union[str, Dict[str, Any]]], default: []
765
+ List of secret specs, defining how the secrets are to be retrieved
766
+ role : str, optional, default: None
767
+ Role to use for fetching secrets
832
768
  """
833
769
  ...
834
770
 
835
- def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
771
+ @typing.overload
772
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
773
+ ...
774
+
775
+ @typing.overload
776
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
777
+ ...
778
+
779
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
836
780
  """
837
- Specifies that this step should execute on DGX cloud.
781
+ Specifies secrets to be retrieved and injected as environment variables prior to
782
+ the execution of a step.
838
783
 
839
784
 
840
785
  Parameters
841
786
  ----------
842
- gpu : int
843
- Number of GPUs to use.
844
- gpu_type : str
845
- Type of Nvidia GPU to use.
846
- queue_timeout : int
847
- Time to keep the job in NVCF's queue.
787
+ sources : List[Union[str, Dict[str, Any]]], default: []
788
+ List of secret specs, defining how the secrets are to be retrieved
789
+ role : str, optional, default: None
790
+ Role to use for fetching secrets
848
791
  """
849
792
  ...
850
793
 
@@ -903,62 +846,92 @@ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
903
846
  """
904
847
  ...
905
848
 
906
- @typing.overload
907
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
849
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
908
850
  """
909
- Specifies the Conda environment for the step.
910
-
911
- Information in this decorator will augment any
912
- attributes set in the `@conda_base` flow-level decorator. Hence,
913
- you can use `@conda_base` to set packages required by all
914
- steps and use `@conda` to specify step-specific overrides.
851
+ Specifies that this step should execute on Kubernetes.
915
852
 
916
853
 
917
854
  Parameters
918
855
  ----------
919
- packages : Dict[str, str], default {}
920
- Packages to use for this step. The key is the name of the package
921
- and the value is the version to use.
922
- libraries : Dict[str, str], default {}
923
- Supported for backward compatibility. When used with packages, packages will take precedence.
924
- python : str, optional, default None
925
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
926
- that the version used will correspond to the version of the Python interpreter used to start the run.
927
- disabled : bool, default False
928
- If set to True, disables @conda.
929
- """
930
- ...
931
-
932
- @typing.overload
933
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
934
- ...
935
-
936
- @typing.overload
937
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
938
- ...
939
-
940
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
941
- """
942
- Specifies the Conda environment for the step.
943
-
944
- Information in this decorator will augment any
945
- attributes set in the `@conda_base` flow-level decorator. Hence,
946
- you can use `@conda_base` to set packages required by all
947
- steps and use `@conda` to specify step-specific overrides.
948
-
856
+ cpu : int, default 1
857
+ Number of CPUs required for this step. If `@resources` is
858
+ also present, the maximum value from all decorators is used.
859
+ memory : int, default 4096
860
+ Memory size (in MB) required for this step. If
861
+ `@resources` is also present, the maximum value from all decorators is
862
+ used.
863
+ disk : int, default 10240
864
+ Disk size (in MB) required for this step. If
865
+ `@resources` is also present, the maximum value from all decorators is
866
+ used.
867
+ image : str, optional, default None
868
+ Docker image to use when launching on Kubernetes. If not specified, and
869
+ METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
870
+ not, a default Docker image mapping to the current version of Python is used.
871
+ image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
872
+ If given, the imagePullPolicy to be applied to the Docker image of the step.
873
+ image_pull_secrets: List[str], default []
874
+ The default is extracted from METAFLOW_KUBERNETES_IMAGE_PULL_SECRETS.
875
+ Kubernetes image pull secrets to use when pulling container images
876
+ in Kubernetes.
877
+ service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
878
+ Kubernetes service account to use when launching pod in Kubernetes.
879
+ secrets : List[str], optional, default None
880
+ Kubernetes secrets to use when launching pod in Kubernetes. These
881
+ secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
882
+ in Metaflow configuration.
883
+ node_selector: Union[Dict[str,str], str], optional, default None
884
+ Kubernetes node selector(s) to apply to the pod running the task.
885
+ Can be passed in as a comma separated string of values e.g.
886
+ 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
887
+ {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
888
+ namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
889
+ Kubernetes namespace to use when launching pod in Kubernetes.
890
+ gpu : int, optional, default None
891
+ Number of GPUs required for this step. A value of zero implies that
892
+ the scheduled node should not have GPUs.
893
+ gpu_vendor : str, default KUBERNETES_GPU_VENDOR
894
+ The vendor of the GPUs to be used for this step.
895
+ tolerations : List[str], default []
896
+ The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
897
+ Kubernetes tolerations to use when launching pod in Kubernetes.
898
+ labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
899
+ Kubernetes labels to use when launching pod in Kubernetes.
900
+ annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
901
+ Kubernetes annotations to use when launching pod in Kubernetes.
902
+ use_tmpfs : bool, default False
903
+ This enables an explicit tmpfs mount for this step.
904
+ tmpfs_tempdir : bool, default True
905
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
906
+ tmpfs_size : int, optional, default: None
907
+ The value for the size (in MiB) of the tmpfs mount for this step.
908
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
909
+ memory allocated for this step.
910
+ tmpfs_path : str, optional, default /metaflow_temp
911
+ Path to tmpfs mount for this step.
912
+ persistent_volume_claims : Dict[str, str], optional, default None
913
+ A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
914
+ volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
915
+ shared_memory: int, optional
916
+ Shared memory size (in MiB) required for this step
917
+ port: int, optional
918
+ Port number to specify in the Kubernetes job object
919
+ compute_pool : str, optional, default None
920
+ Compute pool to be used for for this step.
921
+ If not specified, any accessible compute pool within the perimeter is used.
922
+ hostname_resolution_timeout: int, default 10 * 60
923
+ Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
924
+ Only applicable when @parallel is used.
925
+ qos: str, default: Burstable
926
+ Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
949
927
 
950
- Parameters
951
- ----------
952
- packages : Dict[str, str], default {}
953
- Packages to use for this step. The key is the name of the package
954
- and the value is the version to use.
955
- libraries : Dict[str, str], default {}
956
- Supported for backward compatibility. When used with packages, packages will take precedence.
957
- python : str, optional, default None
958
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
959
- that the version used will correspond to the version of the Python interpreter used to start the run.
960
- disabled : bool, default False
961
- If set to True, disables @conda.
928
+ security_context: Dict[str, Any], optional, default None
929
+ Container security context. Applies to the task container. Allows the following keys:
930
+ - privileged: bool, optional, default None
931
+ - allow_privilege_escalation: bool, optional, default None
932
+ - run_as_user: int, optional, default None
933
+ - run_as_group: int, optional, default None
934
+ - run_as_non_root: bool, optional, default None
962
935
  """
963
936
  ...
964
937
 
@@ -1043,221 +1016,251 @@ def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.
1043
1016
  ...
1044
1017
 
1045
1018
  @typing.overload
1046
- def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1047
- """
1048
- Specifies the resources needed when executing this step.
1049
-
1050
- Use `@resources` to specify the resource requirements
1051
- independently of the specific compute layer (`@batch`, `@kubernetes`).
1052
-
1053
- You can choose the compute layer on the command line by executing e.g.
1054
- ```
1055
- python myflow.py run --with batch
1056
- ```
1057
- or
1058
- ```
1059
- python myflow.py run --with kubernetes
1060
- ```
1061
- which executes the flow on the desired system using the
1062
- requirements specified in `@resources`.
1063
-
1064
-
1065
- Parameters
1066
- ----------
1067
- cpu : int, default 1
1068
- Number of CPUs required for this step.
1069
- gpu : int, optional, default None
1070
- Number of GPUs required for this step.
1071
- disk : int, optional, default None
1072
- Disk size (in MB) required for this step. Only applies on Kubernetes.
1073
- memory : int, default 4096
1074
- Memory size (in MB) required for this step.
1075
- shared_memory : int, optional, default None
1076
- The value for the size (in MiB) of the /dev/shm volume for this step.
1077
- This parameter maps to the `--shm-size` option in Docker.
1078
- """
1079
- ...
1080
-
1081
- @typing.overload
1082
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1083
- ...
1084
-
1085
- @typing.overload
1086
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1087
- ...
1088
-
1089
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
1090
- """
1091
- Specifies the resources needed when executing this step.
1092
-
1093
- Use `@resources` to specify the resource requirements
1094
- independently of the specific compute layer (`@batch`, `@kubernetes`).
1095
-
1096
- You can choose the compute layer on the command line by executing e.g.
1097
- ```
1098
- python myflow.py run --with batch
1099
- ```
1100
- or
1101
- ```
1102
- python myflow.py run --with kubernetes
1103
- ```
1104
- which executes the flow on the desired system using the
1105
- requirements specified in `@resources`.
1106
-
1107
-
1108
- Parameters
1109
- ----------
1110
- cpu : int, default 1
1111
- Number of CPUs required for this step.
1112
- gpu : int, optional, default None
1113
- Number of GPUs required for this step.
1114
- disk : int, optional, default None
1115
- Disk size (in MB) required for this step. Only applies on Kubernetes.
1116
- memory : int, default 4096
1117
- Memory size (in MB) required for this step.
1118
- shared_memory : int, optional, default None
1119
- The value for the size (in MiB) of the /dev/shm volume for this step.
1120
- This parameter maps to the `--shm-size` option in Docker.
1121
- """
1122
- ...
1123
-
1124
- @typing.overload
1125
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1019
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1126
1020
  """
1127
- Specifies that the step will success under all circumstances.
1021
+ Specifies the Conda environment for the step.
1128
1022
 
1129
- The decorator will create an optional artifact, specified by `var`, which
1130
- contains the exception raised. You can use it to detect the presence
1131
- of errors, indicating that all happy-path artifacts produced by the step
1132
- are missing.
1023
+ Information in this decorator will augment any
1024
+ attributes set in the `@conda_base` flow-level decorator. Hence,
1025
+ you can use `@conda_base` to set packages required by all
1026
+ steps and use `@conda` to specify step-specific overrides.
1133
1027
 
1134
1028
 
1135
1029
  Parameters
1136
1030
  ----------
1137
- var : str, optional, default None
1138
- Name of the artifact in which to store the caught exception.
1139
- If not specified, the exception is not stored.
1140
- print_exception : bool, default True
1141
- Determines whether or not the exception is printed to
1142
- stdout when caught.
1031
+ packages : Dict[str, str], default {}
1032
+ Packages to use for this step. The key is the name of the package
1033
+ and the value is the version to use.
1034
+ libraries : Dict[str, str], default {}
1035
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1036
+ python : str, optional, default None
1037
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1038
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1039
+ disabled : bool, default False
1040
+ If set to True, disables @conda.
1143
1041
  """
1144
1042
  ...
1145
1043
 
1146
1044
  @typing.overload
1147
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1045
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1148
1046
  ...
1149
1047
 
1150
1048
  @typing.overload
1151
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1049
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1152
1050
  ...
1153
1051
 
1154
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
1052
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1155
1053
  """
1156
- Specifies that the step will success under all circumstances.
1054
+ Specifies the Conda environment for the step.
1157
1055
 
1158
- The decorator will create an optional artifact, specified by `var`, which
1159
- contains the exception raised. You can use it to detect the presence
1160
- of errors, indicating that all happy-path artifacts produced by the step
1161
- are missing.
1056
+ Information in this decorator will augment any
1057
+ attributes set in the `@conda_base` flow-level decorator. Hence,
1058
+ you can use `@conda_base` to set packages required by all
1059
+ steps and use `@conda` to specify step-specific overrides.
1162
1060
 
1163
1061
 
1164
1062
  Parameters
1165
1063
  ----------
1166
- var : str, optional, default None
1167
- Name of the artifact in which to store the caught exception.
1168
- If not specified, the exception is not stored.
1169
- print_exception : bool, default True
1170
- Determines whether or not the exception is printed to
1171
- stdout when caught.
1064
+ packages : Dict[str, str], default {}
1065
+ Packages to use for this step. The key is the name of the package
1066
+ and the value is the version to use.
1067
+ libraries : Dict[str, str], default {}
1068
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1069
+ python : str, optional, default None
1070
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1071
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1072
+ disabled : bool, default False
1073
+ If set to True, disables @conda.
1172
1074
  """
1173
1075
  ...
1174
1076
 
1175
1077
  @typing.overload
1176
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1078
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1177
1079
  """
1178
- Specifies environment variables to be set prior to the execution of a step.
1080
+ Specifies the PyPI packages for the step.
1081
+
1082
+ Information in this decorator will augment any
1083
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
1084
+ you can use `@pypi_base` to set packages required by all
1085
+ steps and use `@pypi` to specify step-specific overrides.
1179
1086
 
1180
1087
 
1181
1088
  Parameters
1182
1089
  ----------
1183
- vars : Dict[str, str], default {}
1184
- Dictionary of environment variables to set.
1090
+ packages : Dict[str, str], default: {}
1091
+ Packages to use for this step. The key is the name of the package
1092
+ and the value is the version to use.
1093
+ python : str, optional, default: None
1094
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1095
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1185
1096
  """
1186
1097
  ...
1187
1098
 
1188
1099
  @typing.overload
1189
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1100
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1190
1101
  ...
1191
1102
 
1192
1103
  @typing.overload
1193
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1104
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1194
1105
  ...
1195
1106
 
1196
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
1107
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1197
1108
  """
1198
- Specifies environment variables to be set prior to the execution of a step.
1109
+ Specifies the PyPI packages for the step.
1110
+
1111
+ Information in this decorator will augment any
1112
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
1113
+ you can use `@pypi_base` to set packages required by all
1114
+ steps and use `@pypi` to specify step-specific overrides.
1199
1115
 
1200
1116
 
1201
1117
  Parameters
1202
1118
  ----------
1203
- vars : Dict[str, str], default {}
1204
- Dictionary of environment variables to set.
1119
+ packages : Dict[str, str], default: {}
1120
+ Packages to use for this step. The key is the name of the package
1121
+ and the value is the version to use.
1122
+ python : str, optional, default: None
1123
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1124
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1205
1125
  """
1206
1126
  ...
1207
1127
 
1208
1128
  @typing.overload
1209
- def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1129
+ def model(*, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1210
1130
  """
1211
- Creates a human-readable report, a Metaflow Card, after this step completes.
1131
+ Enables loading / saving of models within a step.
1212
1132
 
1213
- Note that you may add multiple `@card` decorators in a step with different parameters.
1133
+ > Examples
1134
+ - Saving Models
1135
+ ```python
1136
+ @model
1137
+ @step
1138
+ def train(self):
1139
+ # current.model.save returns a dictionary reference to the model saved
1140
+ self.my_model = current.model.save(
1141
+ path_to_my_model,
1142
+ label="my_model",
1143
+ metadata={
1144
+ "epochs": 10,
1145
+ "batch-size": 32,
1146
+ "learning-rate": 0.001,
1147
+ }
1148
+ )
1149
+ self.next(self.test)
1150
+
1151
+ @model(load="my_model")
1152
+ @step
1153
+ def test(self):
1154
+ # `current.model.loaded` returns a dictionary of the loaded models
1155
+ # where the key is the name of the artifact and the value is the path to the model
1156
+ print(os.listdir(current.model.loaded["my_model"]))
1157
+ self.next(self.end)
1158
+ ```
1159
+
1160
+ - Loading models
1161
+ ```python
1162
+ @step
1163
+ def train(self):
1164
+ # current.model.load returns the path to the model loaded
1165
+ checkpoint_path = current.model.load(
1166
+ self.checkpoint_key,
1167
+ )
1168
+ model_path = current.model.load(
1169
+ self.model,
1170
+ )
1171
+ self.next(self.test)
1172
+ ```
1214
1173
 
1215
1174
 
1216
1175
  Parameters
1217
1176
  ----------
1218
- type : str, default 'default'
1219
- Card type.
1220
- id : str, optional, default None
1221
- If multiple cards are present, use this id to identify this card.
1222
- options : Dict[str, Any], default {}
1223
- Options passed to the card. The contents depend on the card type.
1224
- timeout : int, default 45
1225
- Interrupt reporting if it takes more than this many seconds.
1177
+ load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
1178
+ Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
1179
+ These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
1180
+ If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
1181
+ the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
1182
+ If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
1183
+
1184
+ temp_dir_root : str, default: None
1185
+ The root directory under which `current.model.loaded` will store loaded models
1226
1186
  """
1227
1187
  ...
1228
1188
 
1229
1189
  @typing.overload
1230
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1190
+ def model(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1231
1191
  ...
1232
1192
 
1233
1193
  @typing.overload
1234
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1194
+ def model(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1235
1195
  ...
1236
1196
 
1237
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
1197
+ def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None):
1238
1198
  """
1239
- Creates a human-readable report, a Metaflow Card, after this step completes.
1199
+ Enables loading / saving of models within a step.
1240
1200
 
1241
- Note that you may add multiple `@card` decorators in a step with different parameters.
1201
+ > Examples
1202
+ - Saving Models
1203
+ ```python
1204
+ @model
1205
+ @step
1206
+ def train(self):
1207
+ # current.model.save returns a dictionary reference to the model saved
1208
+ self.my_model = current.model.save(
1209
+ path_to_my_model,
1210
+ label="my_model",
1211
+ metadata={
1212
+ "epochs": 10,
1213
+ "batch-size": 32,
1214
+ "learning-rate": 0.001,
1215
+ }
1216
+ )
1217
+ self.next(self.test)
1218
+
1219
+ @model(load="my_model")
1220
+ @step
1221
+ def test(self):
1222
+ # `current.model.loaded` returns a dictionary of the loaded models
1223
+ # where the key is the name of the artifact and the value is the path to the model
1224
+ print(os.listdir(current.model.loaded["my_model"]))
1225
+ self.next(self.end)
1226
+ ```
1227
+
1228
+ - Loading models
1229
+ ```python
1230
+ @step
1231
+ def train(self):
1232
+ # current.model.load returns the path to the model loaded
1233
+ checkpoint_path = current.model.load(
1234
+ self.checkpoint_key,
1235
+ )
1236
+ model_path = current.model.load(
1237
+ self.model,
1238
+ )
1239
+ self.next(self.test)
1240
+ ```
1242
1241
 
1243
1242
 
1244
1243
  Parameters
1245
1244
  ----------
1246
- type : str, default 'default'
1247
- Card type.
1248
- id : str, optional, default None
1249
- If multiple cards are present, use this id to identify this card.
1250
- options : Dict[str, Any], default {}
1251
- Options passed to the card. The contents depend on the card type.
1252
- timeout : int, default 45
1253
- Interrupt reporting if it takes more than this many seconds.
1245
+ load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
1246
+ Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
1247
+ These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
1248
+ If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
1249
+ the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
1250
+ If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
1251
+
1252
+ temp_dir_root : str, default: None
1253
+ The root directory under which `current.model.loaded` will store loaded models
1254
1254
  """
1255
1255
  ...
1256
1256
 
1257
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1257
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1258
1258
  """
1259
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1260
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1259
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1260
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1261
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1262
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
1263
+ starts only after all sensors finish.
1261
1264
 
1262
1265
 
1263
1266
  Parameters
@@ -1269,31 +1272,28 @@ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str,
1269
1272
  mode : str
1270
1273
  How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1271
1274
  exponential_backoff : bool
1272
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1273
- pool : str
1274
- the slot pool this task should run in,
1275
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1276
- soft_fail : bool
1277
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1278
- name : str
1279
- Name of the sensor on Airflow
1280
- description : str
1281
- Description of sensor in the Airflow UI
1282
- external_dag_id : str
1283
- The dag_id that contains the task you want to wait for.
1284
- external_task_ids : List[str]
1285
- The list of task_ids that you want to wait for.
1286
- If None (default value) the sensor waits for the DAG. (Default: None)
1287
- allowed_states : List[str]
1288
- Iterable of allowed states, (Default: ['success'])
1289
- failed_states : List[str]
1290
- Iterable of failed or dis-allowed states. (Default: None)
1291
- execution_delta : datetime.timedelta
1292
- time difference with the previous execution to look at,
1293
- the default is the same logical date as the current task or DAG. (Default: None)
1294
- check_existence: bool
1295
- Set to True to check if the external task exists or check if
1296
- the DAG to wait for exists. (Default: True)
1275
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1276
+ pool : str
1277
+ the slot pool this task should run in,
1278
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1279
+ soft_fail : bool
1280
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1281
+ name : str
1282
+ Name of the sensor on Airflow
1283
+ description : str
1284
+ Description of sensor in the Airflow UI
1285
+ bucket_key : Union[str, List[str]]
1286
+ The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1287
+ When it's specified as a full s3:// url, please leave `bucket_name` as None
1288
+ bucket_name : str
1289
+ Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1290
+ When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1291
+ wildcard_match : bool
1292
+ whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1293
+ aws_conn_id : str
1294
+ a reference to the s3 connection on Airflow. (Default: None)
1295
+ verify : bool
1296
+ Whether or not to verify SSL certificates for S3 connection. (Default: None)
1297
1297
  """
1298
1298
  ...
1299
1299
 
@@ -1411,41 +1411,6 @@ def with_artifact_store(f: typing.Optional[typing.Type[FlowSpecDerived]] = None)
1411
1411
  """
1412
1412
  ...
1413
1413
 
1414
- def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1415
- """
1416
- Specifies what flows belong to the same project.
1417
-
1418
- A project-specific namespace is created for all flows that
1419
- use the same `@project(name)`.
1420
-
1421
-
1422
- Parameters
1423
- ----------
1424
- name : str
1425
- Project name. Make sure that the name is unique amongst all
1426
- projects that use the same production scheduler. The name may
1427
- contain only lowercase alphanumeric characters and underscores.
1428
-
1429
- branch : Optional[str], default None
1430
- The branch to use. If not specified, the branch is set to
1431
- `user.<username>` unless `production` is set to `True`. This can
1432
- also be set on the command line using `--branch` as a top-level option.
1433
- It is an error to specify `branch` in the decorator and on the command line.
1434
-
1435
- production : bool, default False
1436
- Whether or not the branch is the production branch. This can also be set on the
1437
- command line using `--production` as a top-level option. It is an error to specify
1438
- `production` in the decorator and on the command line.
1439
- The project branch name will be:
1440
- - if `branch` is specified:
1441
- - if `production` is True: `prod.<branch>`
1442
- - if `production` is False: `test.<branch>`
1443
- - if `branch` is not specified:
1444
- - if `production` is True: `prod`
1445
- - if `production` is False: `user.<username>`
1446
- """
1447
- ...
1448
-
1449
1414
  @typing.overload
1450
1415
  def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1451
1416
  """
@@ -1464,221 +1429,36 @@ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[s
1464
1429
  Supported for backward compatibility. When used with packages, packages will take precedence.
1465
1430
  python : str, optional, default None
1466
1431
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1467
- that the version used will correspond to the version of the Python interpreter used to start the run.
1468
- disabled : bool, default False
1469
- If set to True, disables Conda.
1470
- """
1471
- ...
1472
-
1473
- @typing.overload
1474
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1475
- ...
1476
-
1477
- def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1478
- """
1479
- Specifies the Conda environment for all steps of the flow.
1480
-
1481
- Use `@conda_base` to set common libraries required by all
1482
- steps and use `@conda` to specify step-specific additions.
1483
-
1484
-
1485
- Parameters
1486
- ----------
1487
- packages : Dict[str, str], default {}
1488
- Packages to use for this flow. The key is the name of the package
1489
- and the value is the version to use.
1490
- libraries : Dict[str, str], default {}
1491
- Supported for backward compatibility. When used with packages, packages will take precedence.
1492
- python : str, optional, default None
1493
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1494
- that the version used will correspond to the version of the Python interpreter used to start the run.
1495
- disabled : bool, default False
1496
- If set to True, disables Conda.
1497
- """
1498
- ...
1499
-
1500
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1501
- """
1502
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1503
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1504
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1505
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
1506
- starts only after all sensors finish.
1507
-
1508
-
1509
- Parameters
1510
- ----------
1511
- timeout : int
1512
- Time, in seconds before the task times out and fails. (Default: 3600)
1513
- poke_interval : int
1514
- Time in seconds that the job should wait in between each try. (Default: 60)
1515
- mode : str
1516
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1517
- exponential_backoff : bool
1518
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1519
- pool : str
1520
- the slot pool this task should run in,
1521
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1522
- soft_fail : bool
1523
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1524
- name : str
1525
- Name of the sensor on Airflow
1526
- description : str
1527
- Description of sensor in the Airflow UI
1528
- bucket_key : Union[str, List[str]]
1529
- The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1530
- When it's specified as a full s3:// url, please leave `bucket_name` as None
1531
- bucket_name : str
1532
- Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1533
- When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1534
- wildcard_match : bool
1535
- whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1536
- aws_conn_id : str
1537
- a reference to the s3 connection on Airflow. (Default: None)
1538
- verify : bool
1539
- Whether or not to verify SSL certificates for S3 connection. (Default: None)
1540
- """
1541
- ...
1542
-
1543
- @typing.overload
1544
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1545
- """
1546
- Specifies the PyPI packages for all steps of the flow.
1547
-
1548
- Use `@pypi_base` to set common packages required by all
1549
- steps and use `@pypi` to specify step-specific overrides.
1550
-
1551
- Parameters
1552
- ----------
1553
- packages : Dict[str, str], default: {}
1554
- Packages to use for this flow. The key is the name of the package
1555
- and the value is the version to use.
1556
- python : str, optional, default: None
1557
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1558
- that the version used will correspond to the version of the Python interpreter used to start the run.
1559
- """
1560
- ...
1561
-
1562
- @typing.overload
1563
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1564
- ...
1565
-
1566
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1567
- """
1568
- Specifies the PyPI packages for all steps of the flow.
1569
-
1570
- Use `@pypi_base` to set common packages required by all
1571
- steps and use `@pypi` to specify step-specific overrides.
1572
-
1573
- Parameters
1574
- ----------
1575
- packages : Dict[str, str], default: {}
1576
- Packages to use for this flow. The key is the name of the package
1577
- and the value is the version to use.
1578
- python : str, optional, default: None
1579
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1580
- that the version used will correspond to the version of the Python interpreter used to start the run.
1581
- """
1582
- ...
1583
-
1584
- @typing.overload
1585
- def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1586
- """
1587
- Specifies the flow(s) that this flow depends on.
1588
-
1589
- ```
1590
- @trigger_on_finish(flow='FooFlow')
1591
- ```
1592
- or
1593
- ```
1594
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1595
- ```
1596
- This decorator respects the @project decorator and triggers the flow
1597
- when upstream runs within the same namespace complete successfully
1598
-
1599
- Additionally, you can specify project aware upstream flow dependencies
1600
- by specifying the fully qualified project_flow_name.
1601
- ```
1602
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1603
- ```
1604
- or
1605
- ```
1606
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1607
- ```
1608
-
1609
- You can also specify just the project or project branch (other values will be
1610
- inferred from the current project or project branch):
1611
- ```
1612
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1613
- ```
1614
-
1615
- Note that `branch` is typically one of:
1616
- - `prod`
1617
- - `user.bob`
1618
- - `test.my_experiment`
1619
- - `prod.staging`
1620
-
1621
-
1622
- Parameters
1623
- ----------
1624
- flow : Union[str, Dict[str, str]], optional, default None
1625
- Upstream flow dependency for this flow.
1626
- flows : List[Union[str, Dict[str, str]]], default []
1627
- Upstream flow dependencies for this flow.
1628
- options : Dict[str, Any], default {}
1629
- Backend-specific configuration for tuning eventing behavior.
1630
- """
1631
- ...
1632
-
1633
- @typing.overload
1634
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1635
- ...
1636
-
1637
- def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1638
- """
1639
- Specifies the flow(s) that this flow depends on.
1640
-
1641
- ```
1642
- @trigger_on_finish(flow='FooFlow')
1643
- ```
1644
- or
1645
- ```
1646
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1647
- ```
1648
- This decorator respects the @project decorator and triggers the flow
1649
- when upstream runs within the same namespace complete successfully
1650
-
1651
- Additionally, you can specify project aware upstream flow dependencies
1652
- by specifying the fully qualified project_flow_name.
1653
- ```
1654
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1655
- ```
1656
- or
1657
- ```
1658
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1659
- ```
1660
-
1661
- You can also specify just the project or project branch (other values will be
1662
- inferred from the current project or project branch):
1663
- ```
1664
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1665
- ```
1432
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1433
+ disabled : bool, default False
1434
+ If set to True, disables Conda.
1435
+ """
1436
+ ...
1437
+
1438
+ @typing.overload
1439
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1440
+ ...
1441
+
1442
+ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1443
+ """
1444
+ Specifies the Conda environment for all steps of the flow.
1666
1445
 
1667
- Note that `branch` is typically one of:
1668
- - `prod`
1669
- - `user.bob`
1670
- - `test.my_experiment`
1671
- - `prod.staging`
1446
+ Use `@conda_base` to set common libraries required by all
1447
+ steps and use `@conda` to specify step-specific additions.
1672
1448
 
1673
1449
 
1674
1450
  Parameters
1675
1451
  ----------
1676
- flow : Union[str, Dict[str, str]], optional, default None
1677
- Upstream flow dependency for this flow.
1678
- flows : List[Union[str, Dict[str, str]]], default []
1679
- Upstream flow dependencies for this flow.
1680
- options : Dict[str, Any], default {}
1681
- Backend-specific configuration for tuning eventing behavior.
1452
+ packages : Dict[str, str], default {}
1453
+ Packages to use for this flow. The key is the name of the package
1454
+ and the value is the version to use.
1455
+ libraries : Dict[str, str], default {}
1456
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1457
+ python : str, optional, default None
1458
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1459
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1460
+ disabled : bool, default False
1461
+ If set to True, disables Conda.
1682
1462
  """
1683
1463
  ...
1684
1464
 
@@ -1826,5 +1606,225 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
1826
1606
  """
1827
1607
  ...
1828
1608
 
1609
+ def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1610
+ """
1611
+ Specifies what flows belong to the same project.
1612
+
1613
+ A project-specific namespace is created for all flows that
1614
+ use the same `@project(name)`.
1615
+
1616
+
1617
+ Parameters
1618
+ ----------
1619
+ name : str
1620
+ Project name. Make sure that the name is unique amongst all
1621
+ projects that use the same production scheduler. The name may
1622
+ contain only lowercase alphanumeric characters and underscores.
1623
+
1624
+ branch : Optional[str], default None
1625
+ The branch to use. If not specified, the branch is set to
1626
+ `user.<username>` unless `production` is set to `True`. This can
1627
+ also be set on the command line using `--branch` as a top-level option.
1628
+ It is an error to specify `branch` in the decorator and on the command line.
1629
+
1630
+ production : bool, default False
1631
+ Whether or not the branch is the production branch. This can also be set on the
1632
+ command line using `--production` as a top-level option. It is an error to specify
1633
+ `production` in the decorator and on the command line.
1634
+ The project branch name will be:
1635
+ - if `branch` is specified:
1636
+ - if `production` is True: `prod.<branch>`
1637
+ - if `production` is False: `test.<branch>`
1638
+ - if `branch` is not specified:
1639
+ - if `production` is True: `prod`
1640
+ - if `production` is False: `user.<username>`
1641
+ """
1642
+ ...
1643
+
1644
+ @typing.overload
1645
+ def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1646
+ """
1647
+ Specifies the flow(s) that this flow depends on.
1648
+
1649
+ ```
1650
+ @trigger_on_finish(flow='FooFlow')
1651
+ ```
1652
+ or
1653
+ ```
1654
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1655
+ ```
1656
+ This decorator respects the @project decorator and triggers the flow
1657
+ when upstream runs within the same namespace complete successfully
1658
+
1659
+ Additionally, you can specify project aware upstream flow dependencies
1660
+ by specifying the fully qualified project_flow_name.
1661
+ ```
1662
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1663
+ ```
1664
+ or
1665
+ ```
1666
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1667
+ ```
1668
+
1669
+ You can also specify just the project or project branch (other values will be
1670
+ inferred from the current project or project branch):
1671
+ ```
1672
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1673
+ ```
1674
+
1675
+ Note that `branch` is typically one of:
1676
+ - `prod`
1677
+ - `user.bob`
1678
+ - `test.my_experiment`
1679
+ - `prod.staging`
1680
+
1681
+
1682
+ Parameters
1683
+ ----------
1684
+ flow : Union[str, Dict[str, str]], optional, default None
1685
+ Upstream flow dependency for this flow.
1686
+ flows : List[Union[str, Dict[str, str]]], default []
1687
+ Upstream flow dependencies for this flow.
1688
+ options : Dict[str, Any], default {}
1689
+ Backend-specific configuration for tuning eventing behavior.
1690
+ """
1691
+ ...
1692
+
1693
+ @typing.overload
1694
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1695
+ ...
1696
+
1697
+ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1698
+ """
1699
+ Specifies the flow(s) that this flow depends on.
1700
+
1701
+ ```
1702
+ @trigger_on_finish(flow='FooFlow')
1703
+ ```
1704
+ or
1705
+ ```
1706
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1707
+ ```
1708
+ This decorator respects the @project decorator and triggers the flow
1709
+ when upstream runs within the same namespace complete successfully
1710
+
1711
+ Additionally, you can specify project aware upstream flow dependencies
1712
+ by specifying the fully qualified project_flow_name.
1713
+ ```
1714
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1715
+ ```
1716
+ or
1717
+ ```
1718
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1719
+ ```
1720
+
1721
+ You can also specify just the project or project branch (other values will be
1722
+ inferred from the current project or project branch):
1723
+ ```
1724
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1725
+ ```
1726
+
1727
+ Note that `branch` is typically one of:
1728
+ - `prod`
1729
+ - `user.bob`
1730
+ - `test.my_experiment`
1731
+ - `prod.staging`
1732
+
1733
+
1734
+ Parameters
1735
+ ----------
1736
+ flow : Union[str, Dict[str, str]], optional, default None
1737
+ Upstream flow dependency for this flow.
1738
+ flows : List[Union[str, Dict[str, str]]], default []
1739
+ Upstream flow dependencies for this flow.
1740
+ options : Dict[str, Any], default {}
1741
+ Backend-specific configuration for tuning eventing behavior.
1742
+ """
1743
+ ...
1744
+
1745
+ @typing.overload
1746
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1747
+ """
1748
+ Specifies the PyPI packages for all steps of the flow.
1749
+
1750
+ Use `@pypi_base` to set common packages required by all
1751
+ steps and use `@pypi` to specify step-specific overrides.
1752
+
1753
+ Parameters
1754
+ ----------
1755
+ packages : Dict[str, str], default: {}
1756
+ Packages to use for this flow. The key is the name of the package
1757
+ and the value is the version to use.
1758
+ python : str, optional, default: None
1759
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1760
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1761
+ """
1762
+ ...
1763
+
1764
+ @typing.overload
1765
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1766
+ ...
1767
+
1768
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1769
+ """
1770
+ Specifies the PyPI packages for all steps of the flow.
1771
+
1772
+ Use `@pypi_base` to set common packages required by all
1773
+ steps and use `@pypi` to specify step-specific overrides.
1774
+
1775
+ Parameters
1776
+ ----------
1777
+ packages : Dict[str, str], default: {}
1778
+ Packages to use for this flow. The key is the name of the package
1779
+ and the value is the version to use.
1780
+ python : str, optional, default: None
1781
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1782
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1783
+ """
1784
+ ...
1785
+
1786
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1787
+ """
1788
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1789
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1790
+
1791
+
1792
+ Parameters
1793
+ ----------
1794
+ timeout : int
1795
+ Time, in seconds before the task times out and fails. (Default: 3600)
1796
+ poke_interval : int
1797
+ Time in seconds that the job should wait in between each try. (Default: 60)
1798
+ mode : str
1799
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1800
+ exponential_backoff : bool
1801
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1802
+ pool : str
1803
+ the slot pool this task should run in,
1804
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1805
+ soft_fail : bool
1806
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1807
+ name : str
1808
+ Name of the sensor on Airflow
1809
+ description : str
1810
+ Description of sensor in the Airflow UI
1811
+ external_dag_id : str
1812
+ The dag_id that contains the task you want to wait for.
1813
+ external_task_ids : List[str]
1814
+ The list of task_ids that you want to wait for.
1815
+ If None (default value) the sensor waits for the DAG. (Default: None)
1816
+ allowed_states : List[str]
1817
+ Iterable of allowed states, (Default: ['success'])
1818
+ failed_states : List[str]
1819
+ Iterable of failed or dis-allowed states. (Default: None)
1820
+ execution_delta : datetime.timedelta
1821
+ time difference with the previous execution to look at,
1822
+ the default is the same logical date as the current task or DAG. (Default: None)
1823
+ check_existence: bool
1824
+ Set to True to check if the external task exists or check if
1825
+ the DAG to wait for exists. (Default: True)
1826
+ """
1827
+ ...
1828
+
1829
1829
  pkg_name: str
1830
1830