ob-metaflow-stubs 6.0.3.179rc1__py2.py3-none-any.whl → 6.0.3.179rc2__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (220) hide show
  1. metaflow-stubs/__init__.pyi +760 -760
  2. metaflow-stubs/cards.pyi +1 -1
  3. metaflow-stubs/cli.pyi +1 -1
  4. metaflow-stubs/cli_components/__init__.pyi +1 -1
  5. metaflow-stubs/cli_components/utils.pyi +1 -1
  6. metaflow-stubs/client/__init__.pyi +1 -1
  7. metaflow-stubs/client/core.pyi +4 -4
  8. metaflow-stubs/client/filecache.pyi +2 -2
  9. metaflow-stubs/events.pyi +2 -2
  10. metaflow-stubs/exception.pyi +1 -1
  11. metaflow-stubs/flowspec.pyi +3 -3
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +3 -3
  14. metaflow-stubs/info_file.pyi +1 -1
  15. metaflow-stubs/metadata_provider/__init__.pyi +1 -1
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +1 -1
  17. metaflow-stubs/metadata_provider/metadata.pyi +1 -1
  18. metaflow-stubs/metadata_provider/util.pyi +1 -1
  19. metaflow-stubs/metaflow_config.pyi +1 -1
  20. metaflow-stubs/metaflow_current.pyi +86 -86
  21. metaflow-stubs/metaflow_git.pyi +1 -1
  22. metaflow-stubs/mf_extensions/__init__.pyi +1 -1
  23. metaflow-stubs/mf_extensions/obcheckpoint/__init__.pyi +1 -1
  24. metaflow-stubs/mf_extensions/obcheckpoint/plugins/__init__.pyi +1 -1
  25. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/__init__.pyi +1 -1
  26. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/__init__.pyi +1 -1
  27. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/async_cards.pyi +2 -2
  28. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/deco_injection_mixin.pyi +1 -1
  29. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/extra_components.pyi +1 -1
  30. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/__init__.pyi +1 -1
  31. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/__init__.pyi +1 -1
  32. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/checkpoint_lister.pyi +2 -2
  33. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/lineage_card.pyi +1 -1
  34. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/checkpoint_storage.pyi +2 -2
  35. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/constructors.pyi +1 -1
  36. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/core.pyi +2 -2
  37. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/decorator.pyi +2 -2
  38. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/exceptions.pyi +1 -1
  39. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/final_api.pyi +1 -1
  40. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/lineage.pyi +1 -1
  41. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/__init__.pyi +1 -1
  42. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/context.pyi +2 -2
  43. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/core.pyi +1 -1
  44. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/decorator.pyi +1 -1
  45. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/exceptions.pyi +1 -1
  46. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/task_utils.pyi +2 -2
  47. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/utils.pyi +1 -1
  48. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastructures.pyi +1 -1
  49. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/exceptions.pyi +1 -1
  50. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/__init__.pyi +1 -1
  51. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/decorator.pyi +2 -2
  52. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/__init__.pyi +1 -1
  53. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/core.pyi +3 -3
  54. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/exceptions.pyi +1 -1
  55. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/model_storage.pyi +1 -1
  56. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/__init__.pyi +1 -1
  57. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/flowspec_utils.pyi +1 -1
  58. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/general.pyi +1 -1
  59. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/identity_utils.pyi +2 -2
  60. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/__init__.pyi +1 -1
  61. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/base.pyi +1 -1
  62. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/tar.pyi +1 -1
  63. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/tar_utils.pyi +1 -1
  64. metaflow-stubs/mf_extensions/outerbounds/__init__.pyi +1 -1
  65. metaflow-stubs/mf_extensions/outerbounds/plugins/__init__.pyi +1 -1
  66. metaflow-stubs/mf_extensions/outerbounds/plugins/aws/__init__.pyi +1 -1
  67. metaflow-stubs/mf_extensions/outerbounds/plugins/aws/assume_role_decorator.pyi +1 -1
  68. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/__init__.pyi +1 -1
  69. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/async_cards.pyi +1 -1
  70. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/injector.pyi +1 -1
  71. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/__init__.pyi +1 -1
  72. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/coreweave.pyi +1 -1
  73. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/nebius.pyi +1 -1
  74. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/__init__.pyi +1 -1
  75. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/baker.pyi +2 -2
  76. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/docker_environment.pyi +2 -2
  77. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/fast_bakery.pyi +1 -1
  78. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/__init__.pyi +1 -1
  79. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/pod_killer.pyi +1 -1
  80. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/__init__.pyi +1 -1
  81. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/constants.pyi +1 -1
  82. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/exceptions.pyi +1 -1
  83. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/ollama.pyi +1 -1
  84. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/status_card.pyi +1 -1
  85. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/__init__.pyi +1 -1
  86. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/snowflake.pyi +1 -1
  87. metaflow-stubs/mf_extensions/outerbounds/profilers/__init__.pyi +1 -1
  88. metaflow-stubs/mf_extensions/outerbounds/profilers/gpu.pyi +1 -1
  89. metaflow-stubs/mf_extensions/outerbounds/remote_config.pyi +1 -1
  90. metaflow-stubs/mf_extensions/outerbounds/toplevel/__init__.pyi +1 -1
  91. metaflow-stubs/mf_extensions/outerbounds/toplevel/global_aliases_for_metaflow_package.pyi +1 -1
  92. metaflow-stubs/multicore_utils.pyi +1 -1
  93. metaflow-stubs/ob_internal.pyi +1 -1
  94. metaflow-stubs/parameters.pyi +2 -2
  95. metaflow-stubs/plugins/__init__.pyi +9 -9
  96. metaflow-stubs/plugins/airflow/__init__.pyi +1 -1
  97. metaflow-stubs/plugins/airflow/airflow_utils.pyi +1 -1
  98. metaflow-stubs/plugins/airflow/exception.pyi +1 -1
  99. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +1 -1
  100. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +1 -1
  101. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +1 -1
  102. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +1 -1
  103. metaflow-stubs/plugins/argo/__init__.pyi +1 -1
  104. metaflow-stubs/plugins/argo/argo_client.pyi +1 -1
  105. metaflow-stubs/plugins/argo/argo_events.pyi +1 -1
  106. metaflow-stubs/plugins/argo/argo_workflows.pyi +2 -2
  107. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +1 -1
  108. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +2 -2
  109. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +1 -1
  110. metaflow-stubs/plugins/aws/__init__.pyi +1 -1
  111. metaflow-stubs/plugins/aws/aws_client.pyi +1 -1
  112. metaflow-stubs/plugins/aws/aws_utils.pyi +1 -1
  113. metaflow-stubs/plugins/aws/batch/__init__.pyi +1 -1
  114. metaflow-stubs/plugins/aws/batch/batch.pyi +1 -1
  115. metaflow-stubs/plugins/aws/batch/batch_client.pyi +1 -1
  116. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +1 -1
  117. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +1 -1
  118. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +2 -2
  119. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +1 -1
  120. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +1 -1
  121. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +1 -1
  122. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +1 -1
  123. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +1 -1
  124. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +2 -2
  125. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +2 -2
  126. metaflow-stubs/plugins/azure/__init__.pyi +1 -1
  127. metaflow-stubs/plugins/azure/azure_credential.pyi +1 -1
  128. metaflow-stubs/plugins/azure/azure_exceptions.pyi +1 -1
  129. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +2 -2
  130. metaflow-stubs/plugins/azure/azure_utils.pyi +1 -1
  131. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +1 -1
  132. metaflow-stubs/plugins/azure/includefile_support.pyi +1 -1
  133. metaflow-stubs/plugins/cards/__init__.pyi +1 -1
  134. metaflow-stubs/plugins/cards/card_client.pyi +2 -2
  135. metaflow-stubs/plugins/cards/card_creator.pyi +1 -1
  136. metaflow-stubs/plugins/cards/card_datastore.pyi +1 -1
  137. metaflow-stubs/plugins/cards/card_decorator.pyi +1 -1
  138. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +1 -1
  139. metaflow-stubs/plugins/cards/card_modules/basic.pyi +1 -1
  140. metaflow-stubs/plugins/cards/card_modules/card.pyi +1 -1
  141. metaflow-stubs/plugins/cards/card_modules/components.pyi +1 -1
  142. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +1 -1
  143. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +1 -1
  144. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +1 -1
  145. metaflow-stubs/plugins/cards/card_resolver.pyi +1 -1
  146. metaflow-stubs/plugins/cards/component_serializer.pyi +1 -1
  147. metaflow-stubs/plugins/cards/exception.pyi +1 -1
  148. metaflow-stubs/plugins/catch_decorator.pyi +2 -2
  149. metaflow-stubs/plugins/datatools/__init__.pyi +1 -1
  150. metaflow-stubs/plugins/datatools/local.pyi +1 -1
  151. metaflow-stubs/plugins/datatools/s3/__init__.pyi +1 -1
  152. metaflow-stubs/plugins/datatools/s3/s3.pyi +3 -3
  153. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +1 -1
  154. metaflow-stubs/plugins/datatools/s3/s3util.pyi +1 -1
  155. metaflow-stubs/plugins/debug_logger.pyi +1 -1
  156. metaflow-stubs/plugins/debug_monitor.pyi +1 -1
  157. metaflow-stubs/plugins/environment_decorator.pyi +1 -1
  158. metaflow-stubs/plugins/events_decorator.pyi +1 -1
  159. metaflow-stubs/plugins/frameworks/__init__.pyi +1 -1
  160. metaflow-stubs/plugins/frameworks/pytorch.pyi +1 -1
  161. metaflow-stubs/plugins/gcp/__init__.pyi +1 -1
  162. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +2 -2
  163. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +1 -1
  164. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +1 -1
  165. metaflow-stubs/plugins/gcp/gs_utils.pyi +1 -1
  166. metaflow-stubs/plugins/gcp/includefile_support.pyi +1 -1
  167. metaflow-stubs/plugins/kubernetes/__init__.pyi +1 -1
  168. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +1 -1
  169. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +1 -1
  170. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +1 -1
  171. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +1 -1
  172. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +1 -1
  173. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +1 -1
  174. metaflow-stubs/plugins/ollama/__init__.pyi +2 -2
  175. metaflow-stubs/plugins/parallel_decorator.pyi +1 -1
  176. metaflow-stubs/plugins/perimeters.pyi +1 -1
  177. metaflow-stubs/plugins/project_decorator.pyi +1 -1
  178. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  179. metaflow-stubs/plugins/pypi/conda_decorator.pyi +1 -1
  180. metaflow-stubs/plugins/pypi/conda_environment.pyi +4 -4
  181. metaflow-stubs/plugins/pypi/parsers.pyi +1 -1
  182. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +1 -1
  183. metaflow-stubs/plugins/pypi/pypi_environment.pyi +1 -1
  184. metaflow-stubs/plugins/pypi/utils.pyi +1 -1
  185. metaflow-stubs/plugins/resources_decorator.pyi +1 -1
  186. metaflow-stubs/plugins/retry_decorator.pyi +1 -1
  187. metaflow-stubs/plugins/secrets/__init__.pyi +1 -1
  188. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +1 -1
  189. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +1 -1
  190. metaflow-stubs/plugins/snowflake/__init__.pyi +1 -1
  191. metaflow-stubs/plugins/storage_executor.pyi +1 -1
  192. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +1 -1
  193. metaflow-stubs/plugins/timeout_decorator.pyi +2 -2
  194. metaflow-stubs/plugins/torchtune/__init__.pyi +1 -1
  195. metaflow-stubs/plugins/uv/__init__.pyi +1 -1
  196. metaflow-stubs/plugins/uv/uv_environment.pyi +2 -2
  197. metaflow-stubs/profilers/__init__.pyi +1 -1
  198. metaflow-stubs/pylint_wrapper.pyi +1 -1
  199. metaflow-stubs/runner/__init__.pyi +1 -1
  200. metaflow-stubs/runner/deployer.pyi +29 -29
  201. metaflow-stubs/runner/deployer_impl.pyi +1 -1
  202. metaflow-stubs/runner/metaflow_runner.pyi +1 -1
  203. metaflow-stubs/runner/nbdeploy.pyi +1 -1
  204. metaflow-stubs/runner/nbrun.pyi +1 -1
  205. metaflow-stubs/runner/subprocess_manager.pyi +1 -1
  206. metaflow-stubs/runner/utils.pyi +2 -2
  207. metaflow-stubs/system/__init__.pyi +1 -1
  208. metaflow-stubs/system/system_logger.pyi +2 -2
  209. metaflow-stubs/system/system_monitor.pyi +1 -1
  210. metaflow-stubs/tagging_util.pyi +1 -1
  211. metaflow-stubs/tuple_util.pyi +1 -1
  212. metaflow-stubs/user_configs/__init__.pyi +1 -1
  213. metaflow-stubs/user_configs/config_decorators.pyi +5 -5
  214. metaflow-stubs/user_configs/config_options.pyi +1 -1
  215. metaflow-stubs/user_configs/config_parameters.pyi +4 -4
  216. {ob_metaflow_stubs-6.0.3.179rc1.dist-info → ob_metaflow_stubs-6.0.3.179rc2.dist-info}/METADATA +1 -1
  217. ob_metaflow_stubs-6.0.3.179rc2.dist-info/RECORD +220 -0
  218. ob_metaflow_stubs-6.0.3.179rc1.dist-info/RECORD +0 -220
  219. {ob_metaflow_stubs-6.0.3.179rc1.dist-info → ob_metaflow_stubs-6.0.3.179rc2.dist-info}/WHEEL +0 -0
  220. {ob_metaflow_stubs-6.0.3.179rc1.dist-info → ob_metaflow_stubs-6.0.3.179rc2.dist-info}/top_level.txt +0 -0
@@ -1,15 +1,15 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
3
  # MF version: 2.15.17.1+obcheckpoint(0.2.1);ob(v1) #
4
- # Generated on 2025-06-13T20:01:40.043002 #
4
+ # Generated on 2025-06-13T20:28:05.258106 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import typing
12
11
  import datetime
12
+ import typing
13
13
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
14
14
  StepFlag = typing.NewType("StepFlag", bool)
15
15
 
@@ -35,8 +35,8 @@ from .user_configs.config_parameters import ConfigValue as ConfigValue
35
35
  from .user_configs.config_parameters import config_expr as config_expr
36
36
  from .user_configs.config_decorators import CustomFlowDecorator as CustomFlowDecorator
37
37
  from .user_configs.config_decorators import CustomStepDecorator as CustomStepDecorator
38
- from . import tuple_util as tuple_util
39
38
  from . import cards as cards
39
+ from . import tuple_util as tuple_util
40
40
  from . import metaflow_git as metaflow_git
41
41
  from . import events as events
42
42
  from . import runner as runner
@@ -44,9 +44,9 @@ from . import plugins as plugins
44
44
  from .mf_extensions.outerbounds.toplevel.global_aliases_for_metaflow_package import S3 as S3
45
45
  from . import includefile as includefile
46
46
  from .includefile import IncludeFile as IncludeFile
47
- from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
48
- from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
49
47
  from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
48
+ from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
49
+ from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
50
50
  from . import client as client
51
51
  from .client.core import namespace as namespace
52
52
  from .client.core import get_namespace as get_namespace
@@ -156,19 +156,108 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
156
156
  ...
157
157
 
158
158
  @typing.overload
159
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
159
+ def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
160
160
  """
161
- Internal decorator to support Fast bakery
161
+ Creates a human-readable report, a Metaflow Card, after this step completes.
162
+
163
+ Note that you may add multiple `@card` decorators in a step with different parameters.
164
+
165
+
166
+ Parameters
167
+ ----------
168
+ type : str, default 'default'
169
+ Card type.
170
+ id : str, optional, default None
171
+ If multiple cards are present, use this id to identify this card.
172
+ options : Dict[str, Any], default {}
173
+ Options passed to the card. The contents depend on the card type.
174
+ timeout : int, default 45
175
+ Interrupt reporting if it takes more than this many seconds.
162
176
  """
163
177
  ...
164
178
 
165
179
  @typing.overload
166
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
180
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
167
181
  ...
168
182
 
169
- def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
183
+ @typing.overload
184
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
185
+ ...
186
+
187
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
170
188
  """
171
- Internal decorator to support Fast bakery
189
+ Creates a human-readable report, a Metaflow Card, after this step completes.
190
+
191
+ Note that you may add multiple `@card` decorators in a step with different parameters.
192
+
193
+
194
+ Parameters
195
+ ----------
196
+ type : str, default 'default'
197
+ Card type.
198
+ id : str, optional, default None
199
+ If multiple cards are present, use this id to identify this card.
200
+ options : Dict[str, Any], default {}
201
+ Options passed to the card. The contents depend on the card type.
202
+ timeout : int, default 45
203
+ Interrupt reporting if it takes more than this many seconds.
204
+ """
205
+ ...
206
+
207
+ @typing.overload
208
+ def checkpoint(*, load_policy: str = 'fresh', temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
209
+ """
210
+ Enables checkpointing for a step.
211
+
212
+
213
+
214
+ Parameters
215
+ ----------
216
+ load_policy : str, default: "fresh"
217
+ The policy for loading the checkpoint. The following policies are supported:
218
+ - "eager": Loads the the latest available checkpoint within the namespace.
219
+ With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
220
+ will be loaded at the start of the task.
221
+ - "none": Do not load any checkpoint
222
+ - "fresh": Loads the lastest checkpoint created within the running Task.
223
+ This mode helps loading checkpoints across various retry attempts of the same task.
224
+ With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
225
+ created within the task will be loaded when the task is retries execution on failure.
226
+
227
+ temp_dir_root : str, default: None
228
+ The root directory under which `current.checkpoint.directory` will be created.
229
+ """
230
+ ...
231
+
232
+ @typing.overload
233
+ def checkpoint(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
234
+ ...
235
+
236
+ @typing.overload
237
+ def checkpoint(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
238
+ ...
239
+
240
+ def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load_policy: str = 'fresh', temp_dir_root: str = None):
241
+ """
242
+ Enables checkpointing for a step.
243
+
244
+
245
+
246
+ Parameters
247
+ ----------
248
+ load_policy : str, default: "fresh"
249
+ The policy for loading the checkpoint. The following policies are supported:
250
+ - "eager": Loads the the latest available checkpoint within the namespace.
251
+ With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
252
+ will be loaded at the start of the task.
253
+ - "none": Do not load any checkpoint
254
+ - "fresh": Loads the lastest checkpoint created within the running Task.
255
+ This mode helps loading checkpoints across various retry attempts of the same task.
256
+ With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
257
+ created within the task will be loaded when the task is retries execution on failure.
258
+
259
+ temp_dir_root : str, default: None
260
+ The root directory under which `current.checkpoint.directory` will be created.
172
261
  """
173
262
  ...
174
263
 
@@ -231,199 +320,242 @@ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
231
320
  """
232
321
  ...
233
322
 
234
- @typing.overload
235
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
323
+ def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
236
324
  """
237
- Specifies the number of times the task corresponding
238
- to a step needs to be retried.
325
+ Specifies that this step should execute on DGX cloud.
239
326
 
240
- This decorator is useful for handling transient errors, such as networking issues.
241
- If your task contains operations that can't be retried safely, e.g. database updates,
242
- it is advisable to annotate it with `@retry(times=0)`.
243
327
 
244
- This can be used in conjunction with the `@catch` decorator. The `@catch`
245
- decorator will execute a no-op task after all retries have been exhausted,
246
- ensuring that the flow execution can continue.
328
+ Parameters
329
+ ----------
330
+ gpu : int
331
+ Number of GPUs to use.
332
+ gpu_type : str
333
+ Type of Nvidia GPU to use.
334
+ """
335
+ ...
336
+
337
+ @typing.overload
338
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
339
+ """
340
+ Specifies environment variables to be set prior to the execution of a step.
247
341
 
248
342
 
249
343
  Parameters
250
344
  ----------
251
- times : int, default 3
252
- Number of times to retry this task.
253
- minutes_between_retries : int, default 2
254
- Number of minutes between retries.
345
+ vars : Dict[str, str], default {}
346
+ Dictionary of environment variables to set.
255
347
  """
256
348
  ...
257
349
 
258
350
  @typing.overload
259
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
351
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
260
352
  ...
261
353
 
262
354
  @typing.overload
263
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
355
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
264
356
  ...
265
357
 
266
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
358
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
267
359
  """
268
- Specifies the number of times the task corresponding
269
- to a step needs to be retried.
270
-
271
- This decorator is useful for handling transient errors, such as networking issues.
272
- If your task contains operations that can't be retried safely, e.g. database updates,
273
- it is advisable to annotate it with `@retry(times=0)`.
274
-
275
- This can be used in conjunction with the `@catch` decorator. The `@catch`
276
- decorator will execute a no-op task after all retries have been exhausted,
277
- ensuring that the flow execution can continue.
360
+ Specifies environment variables to be set prior to the execution of a step.
278
361
 
279
362
 
280
363
  Parameters
281
364
  ----------
282
- times : int, default 3
283
- Number of times to retry this task.
284
- minutes_between_retries : int, default 2
285
- Number of minutes between retries.
365
+ vars : Dict[str, str], default {}
366
+ Dictionary of environment variables to set.
286
367
  """
287
368
  ...
288
369
 
289
370
  @typing.overload
290
- def checkpoint(*, load_policy: str = 'fresh', temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
371
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
291
372
  """
292
- Enables checkpointing for a step.
293
-
373
+ Internal decorator to support Fast bakery
374
+ """
375
+ ...
376
+
377
+ @typing.overload
378
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
379
+ ...
380
+
381
+ def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
382
+ """
383
+ Internal decorator to support Fast bakery
384
+ """
385
+ ...
386
+
387
+ @typing.overload
388
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
389
+ """
390
+ Specifies secrets to be retrieved and injected as environment variables prior to
391
+ the execution of a step.
294
392
 
295
393
 
296
394
  Parameters
297
395
  ----------
298
- load_policy : str, default: "fresh"
299
- The policy for loading the checkpoint. The following policies are supported:
300
- - "eager": Loads the the latest available checkpoint within the namespace.
301
- With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
302
- will be loaded at the start of the task.
303
- - "none": Do not load any checkpoint
304
- - "fresh": Loads the lastest checkpoint created within the running Task.
305
- This mode helps loading checkpoints across various retry attempts of the same task.
306
- With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
307
- created within the task will be loaded when the task is retries execution on failure.
308
-
309
- temp_dir_root : str, default: None
310
- The root directory under which `current.checkpoint.directory` will be created.
396
+ sources : List[Union[str, Dict[str, Any]]], default: []
397
+ List of secret specs, defining how the secrets are to be retrieved
311
398
  """
312
399
  ...
313
400
 
314
401
  @typing.overload
315
- def checkpoint(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
402
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
316
403
  ...
317
404
 
318
405
  @typing.overload
319
- def checkpoint(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
406
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
320
407
  ...
321
408
 
322
- def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load_policy: str = 'fresh', temp_dir_root: str = None):
409
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
323
410
  """
324
- Enables checkpointing for a step.
325
-
411
+ Specifies secrets to be retrieved and injected as environment variables prior to
412
+ the execution of a step.
326
413
 
327
414
 
328
415
  Parameters
329
416
  ----------
330
- load_policy : str, default: "fresh"
331
- The policy for loading the checkpoint. The following policies are supported:
332
- - "eager": Loads the the latest available checkpoint within the namespace.
333
- With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
334
- will be loaded at the start of the task.
335
- - "none": Do not load any checkpoint
336
- - "fresh": Loads the lastest checkpoint created within the running Task.
337
- This mode helps loading checkpoints across various retry attempts of the same task.
338
- With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
339
- created within the task will be loaded when the task is retries execution on failure.
340
-
341
- temp_dir_root : str, default: None
342
- The root directory under which `current.checkpoint.directory` will be created.
417
+ sources : List[Union[str, Dict[str, Any]]], default: []
418
+ List of secret specs, defining how the secrets are to be retrieved
343
419
  """
344
420
  ...
345
421
 
346
422
  @typing.overload
347
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
423
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
348
424
  """
349
- Specifies the Conda environment for the step.
425
+ Specifies the PyPI packages for the step.
350
426
 
351
427
  Information in this decorator will augment any
352
- attributes set in the `@conda_base` flow-level decorator. Hence,
353
- you can use `@conda_base` to set packages required by all
354
- steps and use `@conda` to specify step-specific overrides.
428
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
429
+ you can use `@pypi_base` to set packages required by all
430
+ steps and use `@pypi` to specify step-specific overrides.
355
431
 
356
432
 
357
433
  Parameters
358
434
  ----------
359
- packages : Dict[str, str], default {}
435
+ packages : Dict[str, str], default: {}
360
436
  Packages to use for this step. The key is the name of the package
361
437
  and the value is the version to use.
362
- libraries : Dict[str, str], default {}
363
- Supported for backward compatibility. When used with packages, packages will take precedence.
364
- python : str, optional, default None
438
+ python : str, optional, default: None
365
439
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
366
440
  that the version used will correspond to the version of the Python interpreter used to start the run.
367
- disabled : bool, default False
368
- If set to True, disables @conda.
369
441
  """
370
442
  ...
371
443
 
372
444
  @typing.overload
373
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
445
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
374
446
  ...
375
447
 
376
448
  @typing.overload
377
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
449
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
378
450
  ...
379
451
 
380
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
452
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
381
453
  """
382
- Specifies the Conda environment for the step.
454
+ Specifies the PyPI packages for the step.
383
455
 
384
456
  Information in this decorator will augment any
385
- attributes set in the `@conda_base` flow-level decorator. Hence,
386
- you can use `@conda_base` to set packages required by all
387
- steps and use `@conda` to specify step-specific overrides.
457
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
458
+ you can use `@pypi_base` to set packages required by all
459
+ steps and use `@pypi` to specify step-specific overrides.
388
460
 
389
461
 
390
462
  Parameters
391
463
  ----------
392
- packages : Dict[str, str], default {}
464
+ packages : Dict[str, str], default: {}
393
465
  Packages to use for this step. The key is the name of the package
394
466
  and the value is the version to use.
395
- libraries : Dict[str, str], default {}
396
- Supported for backward compatibility. When used with packages, packages will take precedence.
397
- python : str, optional, default None
467
+ python : str, optional, default: None
398
468
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
399
469
  that the version used will correspond to the version of the Python interpreter used to start the run.
400
- disabled : bool, default False
401
- If set to True, disables @conda.
402
470
  """
403
471
  ...
404
472
 
405
- def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
473
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
406
474
  """
407
- Decorator that helps cache, version and store models/datasets from huggingface hub.
475
+ Specifies that this step should execute on Kubernetes.
408
476
 
409
477
 
410
478
  Parameters
411
479
  ----------
412
- temp_dir_root : str, optional
413
- The root directory that will hold the temporary directory where objects will be downloaded.
414
-
415
- load: Union[List[str], List[Tuple[Dict, str]], List[Tuple[str, str]], List[Dict], None]
416
- The list of repos (models/datasets) to load.
417
-
418
- Loaded repos can be accessed via `current.huggingface_hub.loaded`. If load is set, then the following happens:
419
-
420
- - If repo (model/dataset) is not found in the datastore:
421
- - Downloads the repo from Hugging Face Hub to a temporary directory (or uses specified path) for local access
422
- - Stores it in Metaflow's datastore (s3/gcs/azure etc.) with a unique name based on repo_type/repo_id
423
- - All HF models loaded for a `@step` will be cached separately under flow/step/namespace.
480
+ cpu : int, default 1
481
+ Number of CPUs required for this step. If `@resources` is
482
+ also present, the maximum value from all decorators is used.
483
+ memory : int, default 4096
484
+ Memory size (in MB) required for this step. If
485
+ `@resources` is also present, the maximum value from all decorators is
486
+ used.
487
+ disk : int, default 10240
488
+ Disk size (in MB) required for this step. If
489
+ `@resources` is also present, the maximum value from all decorators is
490
+ used.
491
+ image : str, optional, default None
492
+ Docker image to use when launching on Kubernetes. If not specified, and
493
+ METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
494
+ not, a default Docker image mapping to the current version of Python is used.
495
+ image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
496
+ If given, the imagePullPolicy to be applied to the Docker image of the step.
497
+ image_pull_secrets: List[str], default []
498
+ The default is extracted from METAFLOW_KUBERNETES_IMAGE_PULL_SECRETS.
499
+ Kubernetes image pull secrets to use when pulling container images
500
+ in Kubernetes.
501
+ service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
502
+ Kubernetes service account to use when launching pod in Kubernetes.
503
+ secrets : List[str], optional, default None
504
+ Kubernetes secrets to use when launching pod in Kubernetes. These
505
+ secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
506
+ in Metaflow configuration.
507
+ node_selector: Union[Dict[str,str], str], optional, default None
508
+ Kubernetes node selector(s) to apply to the pod running the task.
509
+ Can be passed in as a comma separated string of values e.g.
510
+ 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
511
+ {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
512
+ namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
513
+ Kubernetes namespace to use when launching pod in Kubernetes.
514
+ gpu : int, optional, default None
515
+ Number of GPUs required for this step. A value of zero implies that
516
+ the scheduled node should not have GPUs.
517
+ gpu_vendor : str, default KUBERNETES_GPU_VENDOR
518
+ The vendor of the GPUs to be used for this step.
519
+ tolerations : List[str], default []
520
+ The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
521
+ Kubernetes tolerations to use when launching pod in Kubernetes.
522
+ labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
523
+ Kubernetes labels to use when launching pod in Kubernetes.
524
+ annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
525
+ Kubernetes annotations to use when launching pod in Kubernetes.
526
+ use_tmpfs : bool, default False
527
+ This enables an explicit tmpfs mount for this step.
528
+ tmpfs_tempdir : bool, default True
529
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
530
+ tmpfs_size : int, optional, default: None
531
+ The value for the size (in MiB) of the tmpfs mount for this step.
532
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
533
+ memory allocated for this step.
534
+ tmpfs_path : str, optional, default /metaflow_temp
535
+ Path to tmpfs mount for this step.
536
+ persistent_volume_claims : Dict[str, str], optional, default None
537
+ A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
538
+ volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
539
+ shared_memory: int, optional
540
+ Shared memory size (in MiB) required for this step
541
+ port: int, optional
542
+ Port number to specify in the Kubernetes job object
543
+ compute_pool : str, optional, default None
544
+ Compute pool to be used for for this step.
545
+ If not specified, any accessible compute pool within the perimeter is used.
546
+ hostname_resolution_timeout: int, default 10 * 60
547
+ Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
548
+ Only applicable when @parallel is used.
549
+ qos: str, default: Burstable
550
+ Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
424
551
 
425
- - If repo is found in the datastore:
426
- - Loads it directly from datastore to local path (can be temporary directory or specified path)
552
+ security_context: Dict[str, Any], optional, default None
553
+ Container security context. Applies to the task container. Allows the following keys:
554
+ - privileged: bool, optional, default None
555
+ - allow_privilege_escalation: bool, optional, default None
556
+ - run_as_user: int, optional, default None
557
+ - run_as_group: int, optional, default None
558
+ - run_as_non_root: bool, optional, default None
427
559
  """
428
560
  ...
429
561
 
@@ -484,497 +616,466 @@ def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
484
616
  """
485
617
  ...
486
618
 
487
- @typing.overload
488
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
489
- """
490
- Decorator prototype for all step decorators. This function gets specialized
491
- and imported for all decorators types by _import_plugin_decorators().
492
- """
493
- ...
494
-
495
- @typing.overload
496
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
497
- ...
498
-
499
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
619
+ def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy: str, force_cache_update: bool, debug: bool, circuit_breaker_config: dict, timeout_config: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
500
620
  """
501
- Decorator prototype for all step decorators. This function gets specialized
502
- and imported for all decorators types by _import_plugin_decorators().
621
+ This decorator is used to run Ollama APIs as Metaflow task sidecars.
622
+
623
+ User code call
624
+ --------------
625
+ @ollama(
626
+ models=[...],
627
+ ...
628
+ )
629
+
630
+ Valid backend options
631
+ ---------------------
632
+ - 'local': Run as a separate process on the local task machine.
633
+ - (TODO) 'managed': Outerbounds hosts and selects compute provider.
634
+ - (TODO) 'remote': Spin up separate instance to serve Ollama models.
635
+
636
+ Valid model options
637
+ -------------------
638
+ Any model here https://ollama.com/search, e.g. 'llama3.2', 'llama3.3'
639
+
640
+
641
+ Parameters
642
+ ----------
643
+ models: list[str]
644
+ List of Ollama containers running models in sidecars.
645
+ backend: str
646
+ Determines where and how to run the Ollama process.
647
+ force_pull: bool
648
+ Whether to run `ollama pull` no matter what, or first check the remote cache in Metaflow datastore for this model key.
649
+ cache_update_policy: str
650
+ Cache update policy: "auto", "force", or "never".
651
+ force_cache_update: bool
652
+ Simple override for "force" cache update policy.
653
+ debug: bool
654
+ Whether to turn on verbose debugging logs.
655
+ circuit_breaker_config: dict
656
+ Configuration for circuit breaker protection. Keys: failure_threshold, recovery_timeout, reset_timeout.
657
+ timeout_config: dict
658
+ Configuration for various operation timeouts. Keys: pull, stop, health_check, install, server_startup.
503
659
  """
504
660
  ...
505
661
 
506
662
  @typing.overload
507
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
663
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
508
664
  """
509
- Specifies that the step will success under all circumstances.
665
+ Specifies the Conda environment for the step.
510
666
 
511
- The decorator will create an optional artifact, specified by `var`, which
512
- contains the exception raised. You can use it to detect the presence
513
- of errors, indicating that all happy-path artifacts produced by the step
514
- are missing.
667
+ Information in this decorator will augment any
668
+ attributes set in the `@conda_base` flow-level decorator. Hence,
669
+ you can use `@conda_base` to set packages required by all
670
+ steps and use `@conda` to specify step-specific overrides.
515
671
 
516
672
 
517
673
  Parameters
518
674
  ----------
519
- var : str, optional, default None
520
- Name of the artifact in which to store the caught exception.
521
- If not specified, the exception is not stored.
522
- print_exception : bool, default True
523
- Determines whether or not the exception is printed to
524
- stdout when caught.
675
+ packages : Dict[str, str], default {}
676
+ Packages to use for this step. The key is the name of the package
677
+ and the value is the version to use.
678
+ libraries : Dict[str, str], default {}
679
+ Supported for backward compatibility. When used with packages, packages will take precedence.
680
+ python : str, optional, default None
681
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
682
+ that the version used will correspond to the version of the Python interpreter used to start the run.
683
+ disabled : bool, default False
684
+ If set to True, disables @conda.
525
685
  """
526
686
  ...
527
687
 
528
688
  @typing.overload
529
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
689
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
530
690
  ...
531
691
 
532
692
  @typing.overload
533
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
693
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
534
694
  ...
535
695
 
536
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
696
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
537
697
  """
538
- Specifies that the step will success under all circumstances.
698
+ Specifies the Conda environment for the step.
539
699
 
540
- The decorator will create an optional artifact, specified by `var`, which
541
- contains the exception raised. You can use it to detect the presence
542
- of errors, indicating that all happy-path artifacts produced by the step
543
- are missing.
700
+ Information in this decorator will augment any
701
+ attributes set in the `@conda_base` flow-level decorator. Hence,
702
+ you can use `@conda_base` to set packages required by all
703
+ steps and use `@conda` to specify step-specific overrides.
544
704
 
545
705
 
546
706
  Parameters
547
707
  ----------
548
- var : str, optional, default None
549
- Name of the artifact in which to store the caught exception.
550
- If not specified, the exception is not stored.
551
- print_exception : bool, default True
552
- Determines whether or not the exception is printed to
553
- stdout when caught.
708
+ packages : Dict[str, str], default {}
709
+ Packages to use for this step. The key is the name of the package
710
+ and the value is the version to use.
711
+ libraries : Dict[str, str], default {}
712
+ Supported for backward compatibility. When used with packages, packages will take precedence.
713
+ python : str, optional, default None
714
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
715
+ that the version used will correspond to the version of the Python interpreter used to start the run.
716
+ disabled : bool, default False
717
+ If set to True, disables @conda.
554
718
  """
555
719
  ...
556
720
 
557
- def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
721
+ @typing.overload
722
+ def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
558
723
  """
559
- Specifies that this step should execute on DGX cloud.
724
+ Specifies the resources needed when executing this step.
725
+
726
+ Use `@resources` to specify the resource requirements
727
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
728
+
729
+ You can choose the compute layer on the command line by executing e.g.
730
+ ```
731
+ python myflow.py run --with batch
732
+ ```
733
+ or
734
+ ```
735
+ python myflow.py run --with kubernetes
736
+ ```
737
+ which executes the flow on the desired system using the
738
+ requirements specified in `@resources`.
560
739
 
561
740
 
562
741
  Parameters
563
742
  ----------
564
- gpu : int
565
- Number of GPUs to use.
566
- gpu_type : str
567
- Type of Nvidia GPU to use.
743
+ cpu : int, default 1
744
+ Number of CPUs required for this step.
745
+ gpu : int, optional, default None
746
+ Number of GPUs required for this step.
747
+ disk : int, optional, default None
748
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
749
+ memory : int, default 4096
750
+ Memory size (in MB) required for this step.
751
+ shared_memory : int, optional, default None
752
+ The value for the size (in MiB) of the /dev/shm volume for this step.
753
+ This parameter maps to the `--shm-size` option in Docker.
568
754
  """
569
755
  ...
570
756
 
571
757
  @typing.overload
572
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
573
- """
574
- Specifies environment variables to be set prior to the execution of a step.
575
-
576
-
577
- Parameters
578
- ----------
579
- vars : Dict[str, str], default {}
580
- Dictionary of environment variables to set.
581
- """
582
- ...
583
-
584
- @typing.overload
585
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
758
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
586
759
  ...
587
760
 
588
761
  @typing.overload
589
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
762
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
590
763
  ...
591
764
 
592
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
765
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
593
766
  """
594
- Specifies environment variables to be set prior to the execution of a step.
767
+ Specifies the resources needed when executing this step.
768
+
769
+ Use `@resources` to specify the resource requirements
770
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
771
+
772
+ You can choose the compute layer on the command line by executing e.g.
773
+ ```
774
+ python myflow.py run --with batch
775
+ ```
776
+ or
777
+ ```
778
+ python myflow.py run --with kubernetes
779
+ ```
780
+ which executes the flow on the desired system using the
781
+ requirements specified in `@resources`.
595
782
 
596
783
 
597
784
  Parameters
598
785
  ----------
599
- vars : Dict[str, str], default {}
600
- Dictionary of environment variables to set.
786
+ cpu : int, default 1
787
+ Number of CPUs required for this step.
788
+ gpu : int, optional, default None
789
+ Number of GPUs required for this step.
790
+ disk : int, optional, default None
791
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
792
+ memory : int, default 4096
793
+ Memory size (in MB) required for this step.
794
+ shared_memory : int, optional, default None
795
+ The value for the size (in MiB) of the /dev/shm volume for this step.
796
+ This parameter maps to the `--shm-size` option in Docker.
601
797
  """
602
798
  ...
603
799
 
604
- def app_deploy(*, app_port: int, app_name: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
800
+ def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
605
801
  """
606
- Specifies that this step is used to deploy an instance of the app.
607
- Requires that self.app_name, self.app_port, self.entrypoint and self.deployDir is set.
802
+ Decorator that helps cache, version and store models/datasets from huggingface hub.
608
803
 
609
804
 
610
805
  Parameters
611
806
  ----------
612
- app_port : int
613
- Number of GPUs to use.
614
- app_name : str
615
- Name of the app to deploy.
807
+ temp_dir_root : str, optional
808
+ The root directory that will hold the temporary directory where objects will be downloaded.
809
+
810
+ load: Union[List[str], List[Tuple[Dict, str]], List[Tuple[str, str]], List[Dict], None]
811
+ The list of repos (models/datasets) to load.
812
+
813
+ Loaded repos can be accessed via `current.huggingface_hub.loaded`. If load is set, then the following happens:
814
+
815
+ - If repo (model/dataset) is not found in the datastore:
816
+ - Downloads the repo from Hugging Face Hub to a temporary directory (or uses specified path) for local access
817
+ - Stores it in Metaflow's datastore (s3/gcs/azure etc.) with a unique name based on repo_type/repo_id
818
+ - All HF models loaded for a `@step` will be cached separately under flow/step/namespace.
819
+
820
+ - If repo is found in the datastore:
821
+ - Loads it directly from datastore to local path (can be temporary directory or specified path)
616
822
  """
617
823
  ...
618
824
 
619
825
  @typing.overload
620
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
826
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
621
827
  """
622
- Specifies the PyPI packages for the step.
828
+ Specifies that the step will success under all circumstances.
623
829
 
624
- Information in this decorator will augment any
625
- attributes set in the `@pyi_base` flow-level decorator. Hence,
626
- you can use `@pypi_base` to set packages required by all
627
- steps and use `@pypi` to specify step-specific overrides.
830
+ The decorator will create an optional artifact, specified by `var`, which
831
+ contains the exception raised. You can use it to detect the presence
832
+ of errors, indicating that all happy-path artifacts produced by the step
833
+ are missing.
628
834
 
629
835
 
630
836
  Parameters
631
837
  ----------
632
- packages : Dict[str, str], default: {}
633
- Packages to use for this step. The key is the name of the package
634
- and the value is the version to use.
635
- python : str, optional, default: None
636
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
637
- that the version used will correspond to the version of the Python interpreter used to start the run.
838
+ var : str, optional, default None
839
+ Name of the artifact in which to store the caught exception.
840
+ If not specified, the exception is not stored.
841
+ print_exception : bool, default True
842
+ Determines whether or not the exception is printed to
843
+ stdout when caught.
638
844
  """
639
845
  ...
640
846
 
641
847
  @typing.overload
642
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
848
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
643
849
  ...
644
850
 
645
851
  @typing.overload
646
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
647
- ...
648
-
649
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
650
- """
651
- Specifies the PyPI packages for the step.
652
-
653
- Information in this decorator will augment any
654
- attributes set in the `@pyi_base` flow-level decorator. Hence,
655
- you can use `@pypi_base` to set packages required by all
656
- steps and use `@pypi` to specify step-specific overrides.
657
-
658
-
659
- Parameters
660
- ----------
661
- packages : Dict[str, str], default: {}
662
- Packages to use for this step. The key is the name of the package
663
- and the value is the version to use.
664
- python : str, optional, default: None
665
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
666
- that the version used will correspond to the version of the Python interpreter used to start the run.
667
- """
852
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
668
853
  ...
669
854
 
670
- def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy: str, force_cache_update: bool, debug: bool, circuit_breaker_config: dict, timeout_config: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
855
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
671
856
  """
672
- This decorator is used to run Ollama APIs as Metaflow task sidecars.
673
-
674
- User code call
675
- --------------
676
- @ollama(
677
- models=[...],
678
- ...
679
- )
680
-
681
- Valid backend options
682
- ---------------------
683
- - 'local': Run as a separate process on the local task machine.
684
- - (TODO) 'managed': Outerbounds hosts and selects compute provider.
685
- - (TODO) 'remote': Spin up separate instance to serve Ollama models.
857
+ Specifies that the step will success under all circumstances.
686
858
 
687
- Valid model options
688
- -------------------
689
- Any model here https://ollama.com/search, e.g. 'llama3.2', 'llama3.3'
859
+ The decorator will create an optional artifact, specified by `var`, which
860
+ contains the exception raised. You can use it to detect the presence
861
+ of errors, indicating that all happy-path artifacts produced by the step
862
+ are missing.
690
863
 
691
864
 
692
865
  Parameters
693
866
  ----------
694
- models: list[str]
695
- List of Ollama containers running models in sidecars.
696
- backend: str
697
- Determines where and how to run the Ollama process.
698
- force_pull: bool
699
- Whether to run `ollama pull` no matter what, or first check the remote cache in Metaflow datastore for this model key.
700
- cache_update_policy: str
701
- Cache update policy: "auto", "force", or "never".
702
- force_cache_update: bool
703
- Simple override for "force" cache update policy.
704
- debug: bool
705
- Whether to turn on verbose debugging logs.
706
- circuit_breaker_config: dict
707
- Configuration for circuit breaker protection. Keys: failure_threshold, recovery_timeout, reset_timeout.
708
- timeout_config: dict
709
- Configuration for various operation timeouts. Keys: pull, stop, health_check, install, server_startup.
867
+ var : str, optional, default None
868
+ Name of the artifact in which to store the caught exception.
869
+ If not specified, the exception is not stored.
870
+ print_exception : bool, default True
871
+ Determines whether or not the exception is printed to
872
+ stdout when caught.
710
873
  """
711
874
  ...
712
875
 
713
- def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
876
+ def app_deploy(*, app_port: int, app_name: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
714
877
  """
715
- Specifies that this step should execute on DGX cloud.
878
+ Specifies that this step is used to deploy an instance of the app.
879
+ Requires that self.app_name, self.app_port, self.entrypoint and self.deployDir is set.
716
880
 
717
881
 
718
882
  Parameters
719
883
  ----------
720
- gpu : int
884
+ app_port : int
721
885
  Number of GPUs to use.
722
- gpu_type : str
723
- Type of Nvidia GPU to use.
724
- queue_timeout : int
725
- Time to keep the job in NVCF's queue.
886
+ app_name : str
887
+ Name of the app to deploy.
726
888
  """
727
889
  ...
728
890
 
729
891
  @typing.overload
730
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
892
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
731
893
  """
732
- Specifies secrets to be retrieved and injected as environment variables prior to
733
- the execution of a step.
894
+ Specifies the number of times the task corresponding
895
+ to a step needs to be retried.
896
+
897
+ This decorator is useful for handling transient errors, such as networking issues.
898
+ If your task contains operations that can't be retried safely, e.g. database updates,
899
+ it is advisable to annotate it with `@retry(times=0)`.
900
+
901
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
902
+ decorator will execute a no-op task after all retries have been exhausted,
903
+ ensuring that the flow execution can continue.
734
904
 
735
905
 
736
906
  Parameters
737
907
  ----------
738
- sources : List[Union[str, Dict[str, Any]]], default: []
739
- List of secret specs, defining how the secrets are to be retrieved
908
+ times : int, default 3
909
+ Number of times to retry this task.
910
+ minutes_between_retries : int, default 2
911
+ Number of minutes between retries.
740
912
  """
741
913
  ...
742
914
 
743
915
  @typing.overload
744
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
916
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
745
917
  ...
746
918
 
747
919
  @typing.overload
748
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
920
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
749
921
  ...
750
922
 
751
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
923
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
752
924
  """
753
- Specifies secrets to be retrieved and injected as environment variables prior to
754
- the execution of a step.
925
+ Specifies the number of times the task corresponding
926
+ to a step needs to be retried.
927
+
928
+ This decorator is useful for handling transient errors, such as networking issues.
929
+ If your task contains operations that can't be retried safely, e.g. database updates,
930
+ it is advisable to annotate it with `@retry(times=0)`.
931
+
932
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
933
+ decorator will execute a no-op task after all retries have been exhausted,
934
+ ensuring that the flow execution can continue.
755
935
 
756
936
 
757
937
  Parameters
758
938
  ----------
759
- sources : List[Union[str, Dict[str, Any]]], default: []
760
- List of secret specs, defining how the secrets are to be retrieved
939
+ times : int, default 3
940
+ Number of times to retry this task.
941
+ minutes_between_retries : int, default 2
942
+ Number of minutes between retries.
761
943
  """
762
944
  ...
763
945
 
764
- @typing.overload
765
- def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
946
+ def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
766
947
  """
767
- Creates a human-readable report, a Metaflow Card, after this step completes.
768
-
769
- Note that you may add multiple `@card` decorators in a step with different parameters.
948
+ Specifies that this step should execute on DGX cloud.
770
949
 
771
950
 
772
951
  Parameters
773
952
  ----------
774
- type : str, default 'default'
775
- Card type.
776
- id : str, optional, default None
777
- If multiple cards are present, use this id to identify this card.
778
- options : Dict[str, Any], default {}
779
- Options passed to the card. The contents depend on the card type.
780
- timeout : int, default 45
781
- Interrupt reporting if it takes more than this many seconds.
953
+ gpu : int
954
+ Number of GPUs to use.
955
+ gpu_type : str
956
+ Type of Nvidia GPU to use.
957
+ queue_timeout : int
958
+ Time to keep the job in NVCF's queue.
782
959
  """
783
960
  ...
784
961
 
785
962
  @typing.overload
786
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
963
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
964
+ """
965
+ Decorator prototype for all step decorators. This function gets specialized
966
+ and imported for all decorators types by _import_plugin_decorators().
967
+ """
787
968
  ...
788
969
 
789
970
  @typing.overload
790
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
791
- ...
792
-
793
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
794
- """
795
- Creates a human-readable report, a Metaflow Card, after this step completes.
796
-
797
- Note that you may add multiple `@card` decorators in a step with different parameters.
798
-
799
-
800
- Parameters
801
- ----------
802
- type : str, default 'default'
803
- Card type.
804
- id : str, optional, default None
805
- If multiple cards are present, use this id to identify this card.
806
- options : Dict[str, Any], default {}
807
- Options passed to the card. The contents depend on the card type.
808
- timeout : int, default 45
809
- Interrupt reporting if it takes more than this many seconds.
810
- """
971
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
811
972
  ...
812
973
 
813
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
974
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
814
975
  """
815
- Specifies that this step should execute on Kubernetes.
816
-
817
-
818
- Parameters
819
- ----------
820
- cpu : int, default 1
821
- Number of CPUs required for this step. If `@resources` is
822
- also present, the maximum value from all decorators is used.
823
- memory : int, default 4096
824
- Memory size (in MB) required for this step. If
825
- `@resources` is also present, the maximum value from all decorators is
826
- used.
827
- disk : int, default 10240
828
- Disk size (in MB) required for this step. If
829
- `@resources` is also present, the maximum value from all decorators is
830
- used.
831
- image : str, optional, default None
832
- Docker image to use when launching on Kubernetes. If not specified, and
833
- METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
834
- not, a default Docker image mapping to the current version of Python is used.
835
- image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
836
- If given, the imagePullPolicy to be applied to the Docker image of the step.
837
- image_pull_secrets: List[str], default []
838
- The default is extracted from METAFLOW_KUBERNETES_IMAGE_PULL_SECRETS.
839
- Kubernetes image pull secrets to use when pulling container images
840
- in Kubernetes.
841
- service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
842
- Kubernetes service account to use when launching pod in Kubernetes.
843
- secrets : List[str], optional, default None
844
- Kubernetes secrets to use when launching pod in Kubernetes. These
845
- secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
846
- in Metaflow configuration.
847
- node_selector: Union[Dict[str,str], str], optional, default None
848
- Kubernetes node selector(s) to apply to the pod running the task.
849
- Can be passed in as a comma separated string of values e.g.
850
- 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
851
- {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
852
- namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
853
- Kubernetes namespace to use when launching pod in Kubernetes.
854
- gpu : int, optional, default None
855
- Number of GPUs required for this step. A value of zero implies that
856
- the scheduled node should not have GPUs.
857
- gpu_vendor : str, default KUBERNETES_GPU_VENDOR
858
- The vendor of the GPUs to be used for this step.
859
- tolerations : List[str], default []
860
- The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
861
- Kubernetes tolerations to use when launching pod in Kubernetes.
862
- labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
863
- Kubernetes labels to use when launching pod in Kubernetes.
864
- annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
865
- Kubernetes annotations to use when launching pod in Kubernetes.
866
- use_tmpfs : bool, default False
867
- This enables an explicit tmpfs mount for this step.
868
- tmpfs_tempdir : bool, default True
869
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
870
- tmpfs_size : int, optional, default: None
871
- The value for the size (in MiB) of the tmpfs mount for this step.
872
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
873
- memory allocated for this step.
874
- tmpfs_path : str, optional, default /metaflow_temp
875
- Path to tmpfs mount for this step.
876
- persistent_volume_claims : Dict[str, str], optional, default None
877
- A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
878
- volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
879
- shared_memory: int, optional
880
- Shared memory size (in MiB) required for this step
881
- port: int, optional
882
- Port number to specify in the Kubernetes job object
883
- compute_pool : str, optional, default None
884
- Compute pool to be used for for this step.
885
- If not specified, any accessible compute pool within the perimeter is used.
886
- hostname_resolution_timeout: int, default 10 * 60
887
- Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
888
- Only applicable when @parallel is used.
889
- qos: str, default: Burstable
890
- Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
891
-
892
- security_context: Dict[str, Any], optional, default None
893
- Container security context. Applies to the task container. Allows the following keys:
894
- - privileged: bool, optional, default None
895
- - allow_privilege_escalation: bool, optional, default None
896
- - run_as_user: int, optional, default None
897
- - run_as_group: int, optional, default None
898
- - run_as_non_root: bool, optional, default None
976
+ Decorator prototype for all step decorators. This function gets specialized
977
+ and imported for all decorators types by _import_plugin_decorators().
899
978
  """
900
979
  ...
901
980
 
902
981
  @typing.overload
903
- def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
982
+ def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
904
983
  """
905
- Specifies the resources needed when executing this step.
984
+ Specifies the flow(s) that this flow depends on.
906
985
 
907
- Use `@resources` to specify the resource requirements
908
- independently of the specific compute layer (`@batch`, `@kubernetes`).
986
+ ```
987
+ @trigger_on_finish(flow='FooFlow')
988
+ ```
989
+ or
990
+ ```
991
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
992
+ ```
993
+ This decorator respects the @project decorator and triggers the flow
994
+ when upstream runs within the same namespace complete successfully
909
995
 
910
- You can choose the compute layer on the command line by executing e.g.
996
+ Additionally, you can specify project aware upstream flow dependencies
997
+ by specifying the fully qualified project_flow_name.
911
998
  ```
912
- python myflow.py run --with batch
999
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
913
1000
  ```
914
1001
  or
915
1002
  ```
916
- python myflow.py run --with kubernetes
1003
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
917
1004
  ```
918
- which executes the flow on the desired system using the
919
- requirements specified in `@resources`.
1005
+
1006
+ You can also specify just the project or project branch (other values will be
1007
+ inferred from the current project or project branch):
1008
+ ```
1009
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1010
+ ```
1011
+
1012
+ Note that `branch` is typically one of:
1013
+ - `prod`
1014
+ - `user.bob`
1015
+ - `test.my_experiment`
1016
+ - `prod.staging`
920
1017
 
921
1018
 
922
1019
  Parameters
923
1020
  ----------
924
- cpu : int, default 1
925
- Number of CPUs required for this step.
926
- gpu : int, optional, default None
927
- Number of GPUs required for this step.
928
- disk : int, optional, default None
929
- Disk size (in MB) required for this step. Only applies on Kubernetes.
930
- memory : int, default 4096
931
- Memory size (in MB) required for this step.
932
- shared_memory : int, optional, default None
933
- The value for the size (in MiB) of the /dev/shm volume for this step.
934
- This parameter maps to the `--shm-size` option in Docker.
1021
+ flow : Union[str, Dict[str, str]], optional, default None
1022
+ Upstream flow dependency for this flow.
1023
+ flows : List[Union[str, Dict[str, str]]], default []
1024
+ Upstream flow dependencies for this flow.
1025
+ options : Dict[str, Any], default {}
1026
+ Backend-specific configuration for tuning eventing behavior.
935
1027
  """
936
1028
  ...
937
1029
 
938
1030
  @typing.overload
939
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
940
- ...
941
-
942
- @typing.overload
943
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1031
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
944
1032
  ...
945
1033
 
946
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
1034
+ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
947
1035
  """
948
- Specifies the resources needed when executing this step.
1036
+ Specifies the flow(s) that this flow depends on.
949
1037
 
950
- Use `@resources` to specify the resource requirements
951
- independently of the specific compute layer (`@batch`, `@kubernetes`).
1038
+ ```
1039
+ @trigger_on_finish(flow='FooFlow')
1040
+ ```
1041
+ or
1042
+ ```
1043
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1044
+ ```
1045
+ This decorator respects the @project decorator and triggers the flow
1046
+ when upstream runs within the same namespace complete successfully
952
1047
 
953
- You can choose the compute layer on the command line by executing e.g.
1048
+ Additionally, you can specify project aware upstream flow dependencies
1049
+ by specifying the fully qualified project_flow_name.
954
1050
  ```
955
- python myflow.py run --with batch
1051
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
956
1052
  ```
957
1053
  or
958
1054
  ```
959
- python myflow.py run --with kubernetes
1055
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
960
1056
  ```
961
- which executes the flow on the desired system using the
962
- requirements specified in `@resources`.
1057
+
1058
+ You can also specify just the project or project branch (other values will be
1059
+ inferred from the current project or project branch):
1060
+ ```
1061
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1062
+ ```
1063
+
1064
+ Note that `branch` is typically one of:
1065
+ - `prod`
1066
+ - `user.bob`
1067
+ - `test.my_experiment`
1068
+ - `prod.staging`
963
1069
 
964
1070
 
965
1071
  Parameters
966
1072
  ----------
967
- cpu : int, default 1
968
- Number of CPUs required for this step.
969
- gpu : int, optional, default None
970
- Number of GPUs required for this step.
971
- disk : int, optional, default None
972
- Disk size (in MB) required for this step. Only applies on Kubernetes.
973
- memory : int, default 4096
974
- Memory size (in MB) required for this step.
975
- shared_memory : int, optional, default None
976
- The value for the size (in MiB) of the /dev/shm volume for this step.
977
- This parameter maps to the `--shm-size` option in Docker.
1073
+ flow : Union[str, Dict[str, str]], optional, default None
1074
+ Upstream flow dependency for this flow.
1075
+ flows : List[Union[str, Dict[str, str]]], default []
1076
+ Upstream flow dependencies for this flow.
1077
+ options : Dict[str, Any], default {}
1078
+ Backend-specific configuration for tuning eventing behavior.
978
1079
  """
979
1080
  ...
980
1081
 
@@ -1017,99 +1118,156 @@ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str,
1017
1118
  the default is the same logical date as the current task or DAG. (Default: None)
1018
1119
  check_existence: bool
1019
1120
  Set to True to check if the external task exists or check if
1020
- the DAG to wait for exists. (Default: True)
1021
- """
1022
- ...
1023
-
1024
- @typing.overload
1025
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1026
- """
1027
- Specifies the PyPI packages for all steps of the flow.
1028
-
1029
- Use `@pypi_base` to set common packages required by all
1030
- steps and use `@pypi` to specify step-specific overrides.
1031
-
1032
- Parameters
1033
- ----------
1034
- packages : Dict[str, str], default: {}
1035
- Packages to use for this flow. The key is the name of the package
1036
- and the value is the version to use.
1037
- python : str, optional, default: None
1038
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1039
- that the version used will correspond to the version of the Python interpreter used to start the run.
1040
- """
1041
- ...
1042
-
1043
- @typing.overload
1044
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1121
+ the DAG to wait for exists. (Default: True)
1122
+ """
1045
1123
  ...
1046
1124
 
1047
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1125
+ def with_artifact_store(f: typing.Optional[typing.Type[FlowSpecDerived]] = None):
1048
1126
  """
1049
- Specifies the PyPI packages for all steps of the flow.
1127
+ Allows setting external datastores to save data for the
1128
+ `@checkpoint`/`@model`/`@huggingface_hub` decorators.
1050
1129
 
1051
- Use `@pypi_base` to set common packages required by all
1052
- steps and use `@pypi` to specify step-specific overrides.
1130
+ This decorator is useful when users wish to save data to a different datastore
1131
+ than what is configured in Metaflow. This can be for variety of reasons:
1053
1132
 
1054
- Parameters
1133
+ 1. Data security: The objects needs to be stored in a bucket (object storage) that is not accessible by other flows.
1134
+ 2. Data Locality: The location where the task is executing is not located in the same region as the datastore.
1135
+ - Example: Metaflow datastore lives in US East, but the task is executing in Finland datacenters.
1136
+ 3. Data Lifecycle Policies: The objects need to be archived / managed separately from the Metaflow managed objects.
1137
+ - Example: Flow is training very large models that need to be stored separately and will be deleted more aggressively than the Metaflow managed objects.
1138
+
1139
+ Usage:
1055
1140
  ----------
1056
- packages : Dict[str, str], default: {}
1057
- Packages to use for this flow. The key is the name of the package
1058
- and the value is the version to use.
1059
- python : str, optional, default: None
1060
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1061
- that the version used will correspond to the version of the Python interpreter used to start the run.
1062
- """
1063
- ...
1064
-
1065
- @typing.overload
1066
- def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1067
- """
1068
- Specifies the Conda environment for all steps of the flow.
1069
1141
 
1070
- Use `@conda_base` to set common libraries required by all
1071
- steps and use `@conda` to specify step-specific additions.
1142
+ - Using a custom IAM role to access the datastore.
1072
1143
 
1144
+ ```python
1145
+ @with_artifact_store(
1146
+ type="s3",
1147
+ config=lambda: {
1148
+ "root": "s3://my-bucket-foo/path/to/root",
1149
+ "role_arn": ROLE,
1150
+ },
1151
+ )
1152
+ class MyFlow(FlowSpec):
1073
1153
 
1074
- Parameters
1154
+ @checkpoint
1155
+ @step
1156
+ def start(self):
1157
+ with open("my_file.txt", "w") as f:
1158
+ f.write("Hello, World!")
1159
+ self.external_bucket_checkpoint = current.checkpoint.save("my_file.txt")
1160
+ self.next(self.end)
1161
+
1162
+ ```
1163
+
1164
+ - Using credentials to access the s3-compatible datastore.
1165
+
1166
+ ```python
1167
+ @with_artifact_store(
1168
+ type="s3",
1169
+ config=lambda: {
1170
+ "root": "s3://my-bucket-foo/path/to/root",
1171
+ "client_params": {
1172
+ "aws_access_key_id": os.environ.get("MY_CUSTOM_ACCESS_KEY"),
1173
+ "aws_secret_access_key": os.environ.get("MY_CUSTOM_SECRET_KEY"),
1174
+ },
1175
+ },
1176
+ )
1177
+ class MyFlow(FlowSpec):
1178
+
1179
+ @checkpoint
1180
+ @step
1181
+ def start(self):
1182
+ with open("my_file.txt", "w") as f:
1183
+ f.write("Hello, World!")
1184
+ self.external_bucket_checkpoint = current.checkpoint.save("my_file.txt")
1185
+ self.next(self.end)
1186
+
1187
+ ```
1188
+
1189
+ - Accessing objects stored in external datastores after task execution.
1190
+
1191
+ ```python
1192
+ run = Run("CheckpointsTestsFlow/8992")
1193
+ with artifact_store_from(run=run, config={
1194
+ "client_params": {
1195
+ "aws_access_key_id": os.environ.get("MY_CUSTOM_ACCESS_KEY"),
1196
+ "aws_secret_access_key": os.environ.get("MY_CUSTOM_SECRET_KEY"),
1197
+ },
1198
+ }):
1199
+ with Checkpoint() as cp:
1200
+ latest = cp.list(
1201
+ task=run["start"].task
1202
+ )[0]
1203
+ print(latest)
1204
+ cp.load(
1205
+ latest,
1206
+ "test-checkpoints"
1207
+ )
1208
+
1209
+ task = Task("TorchTuneFlow/8484/train/53673")
1210
+ with artifact_store_from(run=run, config={
1211
+ "client_params": {
1212
+ "aws_access_key_id": os.environ.get("MY_CUSTOM_ACCESS_KEY"),
1213
+ "aws_secret_access_key": os.environ.get("MY_CUSTOM_SECRET_KEY"),
1214
+ },
1215
+ }):
1216
+ load_model(
1217
+ task.data.model_ref,
1218
+ "test-models"
1219
+ )
1220
+ ```
1221
+ Parameters:
1075
1222
  ----------
1076
- packages : Dict[str, str], default {}
1077
- Packages to use for this flow. The key is the name of the package
1078
- and the value is the version to use.
1079
- libraries : Dict[str, str], default {}
1080
- Supported for backward compatibility. When used with packages, packages will take precedence.
1081
- python : str, optional, default None
1082
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1083
- that the version used will correspond to the version of the Python interpreter used to start the run.
1084
- disabled : bool, default False
1085
- If set to True, disables Conda.
1223
+
1224
+ type: str
1225
+ The type of the datastore. Can be one of 's3', 'gcs', 'azure' or any other supported metaflow Datastore.
1226
+
1227
+ config: dict or Callable
1228
+ Dictionary of configuration options for the datastore. The following keys are required:
1229
+ - root: The root path in the datastore where the data will be saved. (needs to be in the format expected by the datastore)
1230
+ - example: 's3://bucket-name/path/to/root'
1231
+ - example: 'gs://bucket-name/path/to/root'
1232
+ - example: 'https://myblockacc.blob.core.windows.net/metaflow/'
1233
+ - role_arn (optional): AWS IAM role to access s3 bucket (only when `type` is 's3')
1234
+ - session_vars (optional): AWS session variables to access s3 bucket (only when `type` is 's3')
1235
+ - client_params (optional): AWS client parameters to access s3 bucket (only when `type` is 's3')
1086
1236
  """
1087
1237
  ...
1088
1238
 
1089
- @typing.overload
1090
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1091
- ...
1092
-
1093
- def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1239
+ def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1094
1240
  """
1095
- Specifies the Conda environment for all steps of the flow.
1241
+ Specifies what flows belong to the same project.
1096
1242
 
1097
- Use `@conda_base` to set common libraries required by all
1098
- steps and use `@conda` to specify step-specific additions.
1243
+ A project-specific namespace is created for all flows that
1244
+ use the same `@project(name)`.
1099
1245
 
1100
1246
 
1101
1247
  Parameters
1102
1248
  ----------
1103
- packages : Dict[str, str], default {}
1104
- Packages to use for this flow. The key is the name of the package
1105
- and the value is the version to use.
1106
- libraries : Dict[str, str], default {}
1107
- Supported for backward compatibility. When used with packages, packages will take precedence.
1108
- python : str, optional, default None
1109
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1110
- that the version used will correspond to the version of the Python interpreter used to start the run.
1111
- disabled : bool, default False
1112
- If set to True, disables Conda.
1249
+ name : str
1250
+ Project name. Make sure that the name is unique amongst all
1251
+ projects that use the same production scheduler. The name may
1252
+ contain only lowercase alphanumeric characters and underscores.
1253
+
1254
+ branch : Optional[str], default None
1255
+ The branch to use. If not specified, the branch is set to
1256
+ `user.<username>` unless `production` is set to `True`. This can
1257
+ also be set on the command line using `--branch` as a top-level option.
1258
+ It is an error to specify `branch` in the decorator and on the command line.
1259
+
1260
+ production : bool, default False
1261
+ Whether or not the branch is the production branch. This can also be set on the
1262
+ command line using `--production` as a top-level option. It is an error to specify
1263
+ `production` in the decorator and on the command line.
1264
+ The project branch name will be:
1265
+ - if `branch` is specified:
1266
+ - if `production` is True: `prod.<branch>`
1267
+ - if `production` is False: `test.<branch>`
1268
+ - if `branch` is not specified:
1269
+ - if `production` is True: `prod`
1270
+ - if `production` is False: `user.<username>`
1113
1271
  """
1114
1272
  ...
1115
1273
 
@@ -1191,132 +1349,69 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
1191
1349
  ```
1192
1350
  This is equivalent to:
1193
1351
  ```
1194
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1195
- ```
1196
-
1197
-
1198
- Parameters
1199
- ----------
1200
- event : Union[str, Dict[str, Any]], optional, default None
1201
- Event dependency for this flow.
1202
- events : List[Union[str, Dict[str, Any]]], default []
1203
- Events dependency for this flow.
1204
- options : Dict[str, Any], default {}
1205
- Backend-specific configuration for tuning eventing behavior.
1206
- """
1207
- ...
1208
-
1209
- def with_artifact_store(f: typing.Optional[typing.Type[FlowSpecDerived]] = None):
1210
- """
1211
- Allows setting external datastores to save data for the
1212
- `@checkpoint`/`@model`/`@huggingface_hub` decorators.
1213
-
1214
- This decorator is useful when users wish to save data to a different datastore
1215
- than what is configured in Metaflow. This can be for variety of reasons:
1216
-
1217
- 1. Data security: The objects needs to be stored in a bucket (object storage) that is not accessible by other flows.
1218
- 2. Data Locality: The location where the task is executing is not located in the same region as the datastore.
1219
- - Example: Metaflow datastore lives in US East, but the task is executing in Finland datacenters.
1220
- 3. Data Lifecycle Policies: The objects need to be archived / managed separately from the Metaflow managed objects.
1221
- - Example: Flow is training very large models that need to be stored separately and will be deleted more aggressively than the Metaflow managed objects.
1222
-
1223
- Usage:
1224
- ----------
1225
-
1226
- - Using a custom IAM role to access the datastore.
1227
-
1228
- ```python
1229
- @with_artifact_store(
1230
- type="s3",
1231
- config=lambda: {
1232
- "root": "s3://my-bucket-foo/path/to/root",
1233
- "role_arn": ROLE,
1234
- },
1235
- )
1236
- class MyFlow(FlowSpec):
1237
-
1238
- @checkpoint
1239
- @step
1240
- def start(self):
1241
- with open("my_file.txt", "w") as f:
1242
- f.write("Hello, World!")
1243
- self.external_bucket_checkpoint = current.checkpoint.save("my_file.txt")
1244
- self.next(self.end)
1245
-
1246
- ```
1247
-
1248
- - Using credentials to access the s3-compatible datastore.
1249
-
1250
- ```python
1251
- @with_artifact_store(
1252
- type="s3",
1253
- config=lambda: {
1254
- "root": "s3://my-bucket-foo/path/to/root",
1255
- "client_params": {
1256
- "aws_access_key_id": os.environ.get("MY_CUSTOM_ACCESS_KEY"),
1257
- "aws_secret_access_key": os.environ.get("MY_CUSTOM_SECRET_KEY"),
1258
- },
1259
- },
1260
- )
1261
- class MyFlow(FlowSpec):
1262
-
1263
- @checkpoint
1264
- @step
1265
- def start(self):
1266
- with open("my_file.txt", "w") as f:
1267
- f.write("Hello, World!")
1268
- self.external_bucket_checkpoint = current.checkpoint.save("my_file.txt")
1269
- self.next(self.end)
1352
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1353
+ ```
1270
1354
 
1271
- ```
1272
1355
 
1273
- - Accessing objects stored in external datastores after task execution.
1356
+ Parameters
1357
+ ----------
1358
+ event : Union[str, Dict[str, Any]], optional, default None
1359
+ Event dependency for this flow.
1360
+ events : List[Union[str, Dict[str, Any]]], default []
1361
+ Events dependency for this flow.
1362
+ options : Dict[str, Any], default {}
1363
+ Backend-specific configuration for tuning eventing behavior.
1364
+ """
1365
+ ...
1366
+
1367
+ @typing.overload
1368
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1369
+ """
1370
+ Specifies the Conda environment for all steps of the flow.
1274
1371
 
1275
- ```python
1276
- run = Run("CheckpointsTestsFlow/8992")
1277
- with artifact_store_from(run=run, config={
1278
- "client_params": {
1279
- "aws_access_key_id": os.environ.get("MY_CUSTOM_ACCESS_KEY"),
1280
- "aws_secret_access_key": os.environ.get("MY_CUSTOM_SECRET_KEY"),
1281
- },
1282
- }):
1283
- with Checkpoint() as cp:
1284
- latest = cp.list(
1285
- task=run["start"].task
1286
- )[0]
1287
- print(latest)
1288
- cp.load(
1289
- latest,
1290
- "test-checkpoints"
1291
- )
1372
+ Use `@conda_base` to set common libraries required by all
1373
+ steps and use `@conda` to specify step-specific additions.
1292
1374
 
1293
- task = Task("TorchTuneFlow/8484/train/53673")
1294
- with artifact_store_from(run=run, config={
1295
- "client_params": {
1296
- "aws_access_key_id": os.environ.get("MY_CUSTOM_ACCESS_KEY"),
1297
- "aws_secret_access_key": os.environ.get("MY_CUSTOM_SECRET_KEY"),
1298
- },
1299
- }):
1300
- load_model(
1301
- task.data.model_ref,
1302
- "test-models"
1303
- )
1304
- ```
1305
- Parameters:
1375
+
1376
+ Parameters
1306
1377
  ----------
1378
+ packages : Dict[str, str], default {}
1379
+ Packages to use for this flow. The key is the name of the package
1380
+ and the value is the version to use.
1381
+ libraries : Dict[str, str], default {}
1382
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1383
+ python : str, optional, default None
1384
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1385
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1386
+ disabled : bool, default False
1387
+ If set to True, disables Conda.
1388
+ """
1389
+ ...
1390
+
1391
+ @typing.overload
1392
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1393
+ ...
1394
+
1395
+ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1396
+ """
1397
+ Specifies the Conda environment for all steps of the flow.
1307
1398
 
1308
- type: str
1309
- The type of the datastore. Can be one of 's3', 'gcs', 'azure' or any other supported metaflow Datastore.
1399
+ Use `@conda_base` to set common libraries required by all
1400
+ steps and use `@conda` to specify step-specific additions.
1310
1401
 
1311
- config: dict or Callable
1312
- Dictionary of configuration options for the datastore. The following keys are required:
1313
- - root: The root path in the datastore where the data will be saved. (needs to be in the format expected by the datastore)
1314
- - example: 's3://bucket-name/path/to/root'
1315
- - example: 'gs://bucket-name/path/to/root'
1316
- - example: 'https://myblockacc.blob.core.windows.net/metaflow/'
1317
- - role_arn (optional): AWS IAM role to access s3 bucket (only when `type` is 's3')
1318
- - session_vars (optional): AWS session variables to access s3 bucket (only when `type` is 's3')
1319
- - client_params (optional): AWS client parameters to access s3 bucket (only when `type` is 's3')
1402
+
1403
+ Parameters
1404
+ ----------
1405
+ packages : Dict[str, str], default {}
1406
+ Packages to use for this flow. The key is the name of the package
1407
+ and the value is the version to use.
1408
+ libraries : Dict[str, str], default {}
1409
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1410
+ python : str, optional, default None
1411
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1412
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1413
+ disabled : bool, default False
1414
+ If set to True, disables Conda.
1320
1415
  """
1321
1416
  ...
1322
1417
 
@@ -1363,38 +1458,44 @@ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, expone
1363
1458
  """
1364
1459
  ...
1365
1460
 
1366
- def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1461
+ @typing.overload
1462
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1367
1463
  """
1368
- Specifies what flows belong to the same project.
1369
-
1370
- A project-specific namespace is created for all flows that
1371
- use the same `@project(name)`.
1464
+ Specifies the PyPI packages for all steps of the flow.
1372
1465
 
1466
+ Use `@pypi_base` to set common packages required by all
1467
+ steps and use `@pypi` to specify step-specific overrides.
1373
1468
 
1374
1469
  Parameters
1375
1470
  ----------
1376
- name : str
1377
- Project name. Make sure that the name is unique amongst all
1378
- projects that use the same production scheduler. The name may
1379
- contain only lowercase alphanumeric characters and underscores.
1471
+ packages : Dict[str, str], default: {}
1472
+ Packages to use for this flow. The key is the name of the package
1473
+ and the value is the version to use.
1474
+ python : str, optional, default: None
1475
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1476
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1477
+ """
1478
+ ...
1479
+
1480
+ @typing.overload
1481
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1482
+ ...
1483
+
1484
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1485
+ """
1486
+ Specifies the PyPI packages for all steps of the flow.
1380
1487
 
1381
- branch : Optional[str], default None
1382
- The branch to use. If not specified, the branch is set to
1383
- `user.<username>` unless `production` is set to `True`. This can
1384
- also be set on the command line using `--branch` as a top-level option.
1385
- It is an error to specify `branch` in the decorator and on the command line.
1488
+ Use `@pypi_base` to set common packages required by all
1489
+ steps and use `@pypi` to specify step-specific overrides.
1386
1490
 
1387
- production : bool, default False
1388
- Whether or not the branch is the production branch. This can also be set on the
1389
- command line using `--production` as a top-level option. It is an error to specify
1390
- `production` in the decorator and on the command line.
1391
- The project branch name will be:
1392
- - if `branch` is specified:
1393
- - if `production` is True: `prod.<branch>`
1394
- - if `production` is False: `test.<branch>`
1395
- - if `branch` is not specified:
1396
- - if `production` is True: `prod`
1397
- - if `production` is False: `user.<username>`
1491
+ Parameters
1492
+ ----------
1493
+ packages : Dict[str, str], default: {}
1494
+ Packages to use for this flow. The key is the name of the package
1495
+ and the value is the version to use.
1496
+ python : str, optional, default: None
1497
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1498
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1398
1499
  """
1399
1500
  ...
1400
1501
 
@@ -1449,106 +1550,5 @@ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly:
1449
1550
  """
1450
1551
  ...
1451
1552
 
1452
- @typing.overload
1453
- def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1454
- """
1455
- Specifies the flow(s) that this flow depends on.
1456
-
1457
- ```
1458
- @trigger_on_finish(flow='FooFlow')
1459
- ```
1460
- or
1461
- ```
1462
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1463
- ```
1464
- This decorator respects the @project decorator and triggers the flow
1465
- when upstream runs within the same namespace complete successfully
1466
-
1467
- Additionally, you can specify project aware upstream flow dependencies
1468
- by specifying the fully qualified project_flow_name.
1469
- ```
1470
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1471
- ```
1472
- or
1473
- ```
1474
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1475
- ```
1476
-
1477
- You can also specify just the project or project branch (other values will be
1478
- inferred from the current project or project branch):
1479
- ```
1480
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1481
- ```
1482
-
1483
- Note that `branch` is typically one of:
1484
- - `prod`
1485
- - `user.bob`
1486
- - `test.my_experiment`
1487
- - `prod.staging`
1488
-
1489
-
1490
- Parameters
1491
- ----------
1492
- flow : Union[str, Dict[str, str]], optional, default None
1493
- Upstream flow dependency for this flow.
1494
- flows : List[Union[str, Dict[str, str]]], default []
1495
- Upstream flow dependencies for this flow.
1496
- options : Dict[str, Any], default {}
1497
- Backend-specific configuration for tuning eventing behavior.
1498
- """
1499
- ...
1500
-
1501
- @typing.overload
1502
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1503
- ...
1504
-
1505
- def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1506
- """
1507
- Specifies the flow(s) that this flow depends on.
1508
-
1509
- ```
1510
- @trigger_on_finish(flow='FooFlow')
1511
- ```
1512
- or
1513
- ```
1514
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1515
- ```
1516
- This decorator respects the @project decorator and triggers the flow
1517
- when upstream runs within the same namespace complete successfully
1518
-
1519
- Additionally, you can specify project aware upstream flow dependencies
1520
- by specifying the fully qualified project_flow_name.
1521
- ```
1522
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1523
- ```
1524
- or
1525
- ```
1526
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1527
- ```
1528
-
1529
- You can also specify just the project or project branch (other values will be
1530
- inferred from the current project or project branch):
1531
- ```
1532
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1533
- ```
1534
-
1535
- Note that `branch` is typically one of:
1536
- - `prod`
1537
- - `user.bob`
1538
- - `test.my_experiment`
1539
- - `prod.staging`
1540
-
1541
-
1542
- Parameters
1543
- ----------
1544
- flow : Union[str, Dict[str, str]], optional, default None
1545
- Upstream flow dependency for this flow.
1546
- flows : List[Union[str, Dict[str, str]]], default []
1547
- Upstream flow dependencies for this flow.
1548
- options : Dict[str, Any], default {}
1549
- Backend-specific configuration for tuning eventing behavior.
1550
- """
1551
- ...
1552
-
1553
1553
  pkg_name: str
1554
1554